os: osx
osx_image: xcode8.2
install: &osx_install_sccache >
- travis_retry curl -o /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-apple-darwin &&
+ travis_retry curl -o /usr/local/bin/sccache https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-apple-darwin &&
chmod +x /usr/local/bin/sccache &&
travis_retry curl -o /usr/local/bin/stamp https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-17-stamp-x86_64-apple-darwin &&
chmod +x /usr/local/bin/stamp
- set PATH=C:\Python27;%PATH%
# Download and install sccache
- - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-pc-windows-msvc
- - mv 2017-03-22-sccache-x86_64-pc-windows-msvc sccache.exe
+ - appveyor-retry appveyor DownloadFile https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-pc-windows-msvc
+ - mv 2017-03-24-sccache-x86_64-pc-windows-msvc sccache.exe
- set PATH=%PATH%;%CD%
# Download and install ninja
-Subproject commit c995e9eb5acf3976ae8674a0dc6d9e958053d9fd
+Subproject commit 4e95c6b41eca3388f54dd5f7787366ad2df637b5
name = "compiletest"
version = "0.0.0"
dependencies = [
- "env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-[[package]]
-name = "env_logger"
-version = "0.3.5"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "env_logger"
version = "0.4.2"
name = "linkchecker"
version = "0.1.0"
-[[package]]
-name = "log"
-version = "0.0.0"
-
[[package]]
name = "log"
version = "0.3.7"
"arena 0.0.0",
"fmt_macros 0.0.0",
"graphviz 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_back 0.0.0",
"rustc_bitflags 0.0.0",
"rustc_const_math 0.0.0",
name = "rustc_back"
version = "0.0.0"
dependencies = [
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"serialize 0.0.0",
"syntax 0.0.0",
]
version = "0.0.0"
dependencies = [
"graphviz 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
dependencies = [
"arena 0.0.0",
"graphviz 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_back 0.0.0",
"rustc_const_math 0.0.0",
name = "rustc_data_structures"
version = "0.0.0"
dependencies = [
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"serialize 0.0.0",
]
version = "0.0.0"
dependencies = [
"arena 0.0.0",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"graphviz 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"proc_macro_plugin 0.0.0",
"rustc 0.0.0",
"rustc_back 0.0.0",
version = "0.0.0"
dependencies = [
"graphviz 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_data_structures 0.0.0",
"serialize 0.0.0",
name = "rustc_lint"
version = "0.0.0"
dependencies = [
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_back 0.0.0",
"rustc_const_eval 0.0.0",
version = "0.0.0"
dependencies = [
"flate 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"proc_macro 0.0.0",
"rustc 0.0.0",
"rustc_back 0.0.0",
version = "0.0.0"
dependencies = [
"graphviz 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_bitflags 0.0.0",
"rustc_const_eval 0.0.0",
name = "rustc_passes"
version = "0.0.0"
dependencies = [
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_const_eval 0.0.0",
"rustc_const_math 0.0.0",
version = "0.0.0"
dependencies = [
"arena 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_errors 0.0.0",
"syntax 0.0.0",
name = "rustc_save_analysis"
version = "0.0.0"
dependencies = [
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-data 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-span 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
version = "0.0.0"
dependencies = [
"flate 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_back 0.0.0",
"rustc_bitflags 0.0.0",
dependencies = [
"arena 0.0.0",
"fmt_macros 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_back 0.0.0",
"rustc_const_eval 0.0.0",
dependencies = [
"arena 0.0.0",
"build_helper 0.1.0",
+ "env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_back 0.0.0",
"rustc_const_eval 0.0.0",
name = "syntax"
version = "0.0.0"
dependencies = [
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_bitflags 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
version = "0.0.0"
dependencies = [
"fmt_macros 0.0.0",
- "log 0.0.0",
+ "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"proc_macro 0.0.0",
"rustc_errors 0.0.0",
"syntax 0.0.0",
"checksum clap 2.21.1 (registry+https://github.com/rust-lang/crates.io-index)" = "74a80f603221c9cd9aa27a28f52af452850051598537bb6b359c38a7d61e5cda"
"checksum cmake 0.1.22 (registry+https://github.com/rust-lang/crates.io-index)" = "d18d68987ed4c516dcc3e7913659bfa4076f5182eea4a7e0038bb060953e76ac"
"checksum dtoa 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80c8b71fd71146990a9742fc06dcbbde19161a267e0ad4e572c35162f4578c90"
-"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
"checksum env_logger 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e3856f1697098606fc6cb97a93de88ca3f3bc35bb878c725920e6e82ecf05e83"
"checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922"
"checksum gcc 0.3.44 (registry+https://github.com/rust-lang/crates.io-index)" = "a32cd40070d7611ab76343dcb3204b2bb28c8a9450989a83a3d590248142f439"
.arg(ADB_TEST_DIR));
let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
- build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir[..]]));
+ build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir]));
for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
let f = t!(f);
RUN curl -O http://ftp.nl.debian.org/debian/dists/jessie/main/installer-armhf/current/images/device-tree/vexpress-v2p-ca15-tc1.dtb
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV TARGETS=arm-linux-androideabi
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/arm-unknown-linux-gnueabi/bin
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/aarch64-unknown-linux-gnueabi/bin
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV \
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV RUST_CONFIGURE_ARGS \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/powerpc-unknown-linux-gnu/bin
RUN ./build-powerpc64le-toolchain.sh
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/powerpc64-unknown-linux-gnu/bin
USER root
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV PATH=$PATH:/x-tools/s390x-ibm-linux-gnu/bin:/x-tools/x86_64-unknown-netbsd/bin
ENTRYPOINT ["/rustroot/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV HOSTS=i686-unknown-linux-gnu
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
ENV RUST_CONFIGURE_ARGS \
lib32stdc++6
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
}
curl https://s3.amazonaws.com/mozilla-games/emscripten/releases/emsdk-portable.tar.gz | \
- tar xzf -
+ tar xzf -
+
+# Some versions of the EMSDK archive have their contents in .emsdk-portable
+# and others in emsdk_portable. Make sure the EMSDK ends up in a fixed path.
+if [ -d emsdk-portable ]; then
+ mv emsdk-portable emsdk_portable
+fi
+
+if [ ! -d emsdk_portable ]; then
+ echo "ERROR: Invalid emsdk archive. Dumping working directory." >&2
+ ls -l
+ exit 1
+fi
+
+# Some versions of the EMSDK set the permissions of the root directory to
+# 0700. Ensure the directory is readable by all users.
+chmod 755 emsdk_portable
+
source emsdk_portable/emsdk_env.sh
hide_output emsdk update
hide_output emsdk install --build=Release sdk-tag-1.37.1-32bit
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
pkg-config
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
xz-utils
RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-22-sccache-x86_64-unknown-linux-musl && \
+ https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-03-24-sccache-x86_64-unknown-linux-musl && \
chmod +x /usr/local/bin/sccache
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
------------------------
+To get a range that goes from 0 to 10 and includes the value 10, you
+can write `0...10`:
+```rust
+#![feature(inclusive_range_syntax)]
+fn main() {
+ for i in 0...10 {
+ println!("{}", i);
+ }
+}
+```
let toknum = &s[content_end + 3 .. toknum_end];
let not_found = format!("didn't find token {:?} in the map", toknum);
- let proto_tok = tokens.get(toknum).expect(¬_found[..]);
+ let proto_tok = tokens.get(toknum).expect(¬_found);
let nm = Symbol::intern(content);
let mut token_file = File::open(&Path::new(&args.next().unwrap())).unwrap();
let mut token_list = String::new();
token_file.read_to_string(&mut token_list).unwrap();
- let token_map = parse_token_list(&token_list[..]);
+ let token_map = parse_token_list(&token_list);
let stdin = std::io::stdin();
let lock = stdin.lock();
let lines = lock.lines();
let antlr_tokens = lines.map(|l| parse_antlr_token(l.unwrap().trim(),
&token_map,
- &surrogate_pairs_pos[..],
+ &surrogate_pairs_pos,
has_bom));
for antlr_tok in antlr_tokens {
thread::spawn(move || {
check_links(&n);
let a: &[_] = &[&1, &2, &3];
- assert_eq!(a, &n.iter().collect::<Vec<_>>()[..]);
+ assert_eq!(a, &*n.iter().collect::<Vec<_>>());
})
.join()
.ok()
///
/// It rounds ``f`` to a float with 64 bit significand and multiplies it by the best approximation
/// of `10^e` (in the same floating point format). This is often enough to get the correct result.
-/// However, when the result is close to halfway between two adjecent (ordinary) floats, the
+/// However, when the result is close to halfway between two adjacent (ordinary) floats, the
/// compound rounding error from multiplying two approximation means the result may be off by a
/// few bits. When this happens, the iterative Algorithm R fixes things up.
///
//
// Therefore, when the rounded-off bits are != 0.5 ULP, they decide the rounding
// on their own. When they are equal and the remainder is non-zero, the value still
- // needs to be rounded up. Only when the rounded off bits are 1/2 and the remainer
+ // needs to be rounded up. Only when the rounded off bits are 1/2 and the remainder
// is zero, we have a half-to-even situation.
let bits = x.bit_length();
let lsb = bits - T::sig_bits() as usize;
fn insertion_sort<T, F>(v: &mut [T], is_less: &mut F)
where F: FnMut(&T, &T) -> bool
{
- for i in 2..v.len()+1 {
- shift_tail(&mut v[..i], is_less);
+ for i in 1..v.len() {
+ shift_tail(&mut v[..i+1], is_less);
}
}
pub fn to_dot_string(&self) -> String {
match self {
&LabelStr(ref s) => format!("\"{}\"", s.escape_default()),
- &EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s[..])),
+ &EscStr(ref s) => format!("\"{}\"", LabelText::escape_str(&s)),
&HtmlStr(ref s) => format!("<{}>", s),
}
}
let mut prefix = self.pre_escaped_content().into_owned();
let suffix = suffix.pre_escaped_content();
prefix.push_str(r"\n\n");
- prefix.push_str(&suffix[..]);
+ prefix.push_str(&suffix);
EscStr(prefix.into_cow())
}
}
type Node = Node;
type Edge = &'a Edge;
fn graph_id(&'a self) -> Id<'a> {
- Id::new(&self.name[..]).unwrap()
+ Id::new(self.name).unwrap()
}
fn node_id(&'a self, n: &Node) -> Id<'a> {
id_name(n)
-Subproject commit 64d954c6a76e896fbf7ed5c17e77c40e388abe84
+Subproject commit 05a2d197356ef253dfd985166576619ac9b6947f
+++ /dev/null
-[package]
-authors = ["The Rust Project Developers"]
-name = "log"
-version = "0.0.0"
-
-[lib]
-name = "log"
-path = "lib.rs"
-crate-type = ["dylib", "rlib"]
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use std::ascii::AsciiExt;
-use std::cmp;
-
-#[derive(Debug, Clone)]
-pub struct LogDirective {
- pub name: Option<String>,
- pub level: u32,
-}
-
-pub const LOG_LEVEL_NAMES: [&'static str; 5] = ["ERROR", "WARN", "INFO", "DEBUG", "TRACE"];
-
-/// Parse an individual log level that is either a number or a symbolic log level
-fn parse_log_level(level: &str) -> Option<u32> {
- level.parse::<u32>()
- .ok()
- .or_else(|| {
- let pos = LOG_LEVEL_NAMES.iter().position(|&name| name.eq_ignore_ascii_case(level));
- pos.map(|p| p as u32 + 1)
- })
- .map(|p| cmp::min(p, ::MAX_LOG_LEVEL))
-}
-
-/// Parse a logging specification string (e.g: "crate1,crate2::mod3,crate3::x=1/foo")
-/// and return a vector with log directives.
-///
-/// Valid log levels are 0-255, with the most likely ones being 1-4 (defined in
-/// std::). Also supports string log levels of error, warn, info, and debug
-pub fn parse_logging_spec(spec: &str) -> (Vec<LogDirective>, Option<String>) {
- let mut dirs = Vec::new();
-
- let mut parts = spec.split('/');
- let mods = parts.next();
- let filter = parts.next();
- if parts.next().is_some() {
- println!("warning: invalid logging spec '{}', ignoring it (too many '/'s)",
- spec);
- return (dirs, None);
- }
- if let Some(m) = mods {
- for s in m.split(',') {
- if s.is_empty() {
- continue;
- }
- let mut parts = s.split('=');
- let (log_level, name) =
- match (parts.next(), parts.next().map(|s| s.trim()), parts.next()) {
- (Some(part0), None, None) => {
- // if the single argument is a log-level string or number,
- // treat that as a global fallback
- match parse_log_level(part0) {
- Some(num) => (num, None),
- None => (::MAX_LOG_LEVEL, Some(part0)),
- }
- }
- (Some(part0), Some(""), None) => (::MAX_LOG_LEVEL, Some(part0)),
- (Some(part0), Some(part1), None) => {
- match parse_log_level(part1) {
- Some(num) => (num, Some(part0)),
- _ => {
- println!("warning: invalid logging spec '{}', ignoring it", part1);
- continue;
- }
- }
- }
- _ => {
- println!("warning: invalid logging spec '{}', ignoring it", s);
- continue;
- }
- };
- dirs.push(LogDirective {
- name: name.map(str::to_owned),
- level: log_level,
- });
- }
- }
-
- (dirs, filter.map(str::to_owned))
-}
-
-#[cfg(test)]
-mod tests {
- use super::parse_logging_spec;
-
- #[test]
- fn parse_logging_spec_valid() {
- let (dirs, filter) = parse_logging_spec("crate1::mod1=1,crate1::mod2,crate2=4");
- assert_eq!(dirs.len(), 3);
- assert_eq!(dirs[0].name, Some("crate1::mod1".to_owned()));
- assert_eq!(dirs[0].level, 1);
-
- assert_eq!(dirs[1].name, Some("crate1::mod2".to_owned()));
- assert_eq!(dirs[1].level, ::MAX_LOG_LEVEL);
-
- assert_eq!(dirs[2].name, Some("crate2".to_owned()));
- assert_eq!(dirs[2].level, 4);
- assert!(filter.is_none());
- }
-
- #[test]
- fn parse_logging_spec_invalid_crate() {
- // test parse_logging_spec with multiple = in specification
- let (dirs, filter) = parse_logging_spec("crate1::mod1=1=2,crate2=4");
- assert_eq!(dirs.len(), 1);
- assert_eq!(dirs[0].name, Some("crate2".to_owned()));
- assert_eq!(dirs[0].level, 4);
- assert!(filter.is_none());
- }
-
- #[test]
- fn parse_logging_spec_invalid_log_level() {
- // test parse_logging_spec with 'noNumber' as log level
- let (dirs, filter) = parse_logging_spec("crate1::mod1=noNumber,crate2=4");
- assert_eq!(dirs.len(), 1);
- assert_eq!(dirs[0].name, Some("crate2".to_owned()));
- assert_eq!(dirs[0].level, 4);
- assert!(filter.is_none());
- }
-
- #[test]
- fn parse_logging_spec_string_log_level() {
- // test parse_logging_spec with 'warn' as log level
- let (dirs, filter) = parse_logging_spec("crate1::mod1=wrong,crate2=warn");
- assert_eq!(dirs.len(), 1);
- assert_eq!(dirs[0].name, Some("crate2".to_owned()));
- assert_eq!(dirs[0].level, ::WARN);
- assert!(filter.is_none());
- }
-
- #[test]
- fn parse_logging_spec_empty_log_level() {
- // test parse_logging_spec with '' as log level
- let (dirs, filter) = parse_logging_spec("crate1::mod1=wrong,crate2=");
- assert_eq!(dirs.len(), 1);
- assert_eq!(dirs[0].name, Some("crate2".to_owned()));
- assert_eq!(dirs[0].level, ::MAX_LOG_LEVEL);
- assert!(filter.is_none());
- }
-
- #[test]
- fn parse_logging_spec_global() {
- // test parse_logging_spec with no crate
- let (dirs, filter) = parse_logging_spec("warn,crate2=4");
- assert_eq!(dirs.len(), 2);
- assert_eq!(dirs[0].name, None);
- assert_eq!(dirs[0].level, 2);
- assert_eq!(dirs[1].name, Some("crate2".to_owned()));
- assert_eq!(dirs[1].level, 4);
- assert!(filter.is_none());
- }
-
- #[test]
- fn parse_logging_spec_valid_filter() {
- let (dirs, filter) = parse_logging_spec("crate1::mod1=1,crate1::mod2,crate2=4/abc");
- assert_eq!(dirs.len(), 3);
- assert_eq!(dirs[0].name, Some("crate1::mod1".to_owned()));
- assert_eq!(dirs[0].level, 1);
-
- assert_eq!(dirs[1].name, Some("crate1::mod2".to_owned()));
- assert_eq!(dirs[1].level, ::MAX_LOG_LEVEL);
-
- assert_eq!(dirs[2].name, Some("crate2".to_owned()));
- assert_eq!(dirs[2].level, 4);
- assert!(filter.is_some() && filter.unwrap().to_owned() == "abc");
- }
-
- #[test]
- fn parse_logging_spec_invalid_crate_filter() {
- let (dirs, filter) = parse_logging_spec("crate1::mod1=1=2,crate2=4/a.c");
- assert_eq!(dirs.len(), 1);
- assert_eq!(dirs[0].name, Some("crate2".to_owned()));
- assert_eq!(dirs[0].level, 4);
- assert!(filter.is_some() && filter.unwrap().to_owned() == "a.c");
- }
-
- #[test]
- fn parse_logging_spec_empty_with_filter() {
- let (dirs, filter) = parse_logging_spec("crate1/a*c");
- assert_eq!(dirs.len(), 1);
- assert_eq!(dirs[0].name, Some("crate1".to_owned()));
- assert_eq!(dirs[0].level, ::MAX_LOG_LEVEL);
- assert!(filter.is_some() && filter.unwrap().to_owned() == "a*c");
- }
-}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Utilities for program-wide and customizable logging
-//!
-//! # Examples
-//!
-//! ```
-//! # #![feature(rustc_private)]
-//! #[macro_use] extern crate log;
-//!
-//! fn main() {
-//! debug!("this is a debug {:?}", "message");
-//! error!("this is printed by default");
-//!
-//! if log_enabled!(log::INFO) {
-//! let x = 3 * 4; // expensive computation
-//! info!("the answer was: {:?}", x);
-//! }
-//! }
-//! ```
-//!
-//! Assumes the binary is `main`:
-//!
-//! ```{.bash}
-//! $ RUST_LOG=error ./main
-//! ERROR:main: this is printed by default
-//! ```
-//!
-//! ```{.bash}
-//! $ RUST_LOG=info ./main
-//! ERROR:main: this is printed by default
-//! INFO:main: the answer was: 12
-//! ```
-//!
-//! ```{.bash}
-//! $ RUST_LOG=debug ./main
-//! DEBUG:main: this is a debug message
-//! ERROR:main: this is printed by default
-//! INFO:main: the answer was: 12
-//! ```
-//!
-//! You can also set the log level on a per module basis:
-//!
-//! ```{.bash}
-//! $ RUST_LOG=main=info ./main
-//! ERROR:main: this is printed by default
-//! INFO:main: the answer was: 12
-//! ```
-//!
-//! And enable all logging:
-//!
-//! ```{.bash}
-//! $ RUST_LOG=main ./main
-//! DEBUG:main: this is a debug message
-//! ERROR:main: this is printed by default
-//! INFO:main: the answer was: 12
-//! ```
-//!
-//! # Logging Macros
-//!
-//! There are five macros that the logging subsystem uses:
-//!
-//! * `log!(level, ...)` - the generic logging macro, takes a level as a u32 and any
-//! related `format!` arguments
-//! * `debug!(...)` - a macro hard-wired to the log level of `DEBUG`
-//! * `info!(...)` - a macro hard-wired to the log level of `INFO`
-//! * `warn!(...)` - a macro hard-wired to the log level of `WARN`
-//! * `error!(...)` - a macro hard-wired to the log level of `ERROR`
-//!
-//! All of these macros use the same style of syntax as the `format!` syntax
-//! extension. Details about the syntax can be found in the documentation of
-//! `std::fmt` along with the Rust tutorial/manual.
-//!
-//! If you want to check at runtime if a given logging level is enabled (e.g. if the
-//! information you would want to log is expensive to produce), you can use the
-//! following macro:
-//!
-//! * `log_enabled!(level)` - returns true if logging of the given level is enabled
-//!
-//! # Enabling logging
-//!
-//! Log levels are controlled on a per-module basis, and by default all logging is
-//! disabled except for `error!` (a log level of 1). Logging is controlled via the
-//! `RUST_LOG` environment variable. The value of this environment variable is a
-//! comma-separated list of logging directives. A logging directive is of the form:
-//!
-//! ```text
-//! path::to::module=log_level
-//! ```
-//!
-//! The path to the module is rooted in the name of the crate it was compiled for,
-//! so if your program is contained in a file `hello.rs`, for example, to turn on
-//! logging for this file you would use a value of `RUST_LOG=hello`.
-//! Furthermore, this path is a prefix-search, so all modules nested in the
-//! specified module will also have logging enabled.
-//!
-//! The actual `log_level` is optional to specify. If omitted, all logging will be
-//! enabled. If specified, the it must be either a numeric in the range of 1-255, or
-//! it must be one of the strings `debug`, `error`, `info`, or `warn`. If a numeric
-//! is specified, then all logging less than or equal to that numeral is enabled.
-//! For example, if logging level 3 is active, error, warn, and info logs will be
-//! printed, but debug will be omitted.
-//!
-//! As the log level for a module is optional, the module to enable logging for is
-//! also optional. If only a `log_level` is provided, then the global log level for
-//! all modules is set to this value.
-//!
-//! Some examples of valid values of `RUST_LOG` are:
-//!
-//! * `hello` turns on all logging for the 'hello' module
-//! * `info` turns on all info logging
-//! * `hello=debug` turns on debug logging for 'hello'
-//! * `hello=3` turns on info logging for 'hello'
-//! * `hello,std::option` turns on hello, and std's option logging
-//! * `error,hello=warn` turn on global error logging and also warn for hello
-//!
-//! # Filtering results
-//!
-//! A RUST_LOG directive may include a string filter. The syntax is to append
-//! `/` followed by a string. Each message is checked against the string and is
-//! only logged if it contains the string. Note that the matching is done after
-//! formatting the log string but before adding any logging meta-data. There is
-//! a single filter for all modules.
-//!
-//! Some examples:
-//!
-//! * `hello/foo` turns on all logging for the 'hello' module where the log message
-//! includes 'foo'.
-//! * `info/f.o` turns on all info logging where the log message includes 'foo',
-//! 'f1o', 'fao', etc.
-//! * `hello=debug/foo*foo` turns on debug logging for 'hello' where the log
-//! message includes 'foofoo' or 'fofoo' or 'fooooooofoo', etc.
-//! * `error,hello=warn/[0-9] scopes` turn on global error logging and also warn for
-//! hello. In both cases the log message must include a single digit number
-//! followed by 'scopes'
-//!
-//! # Performance and Side Effects
-//!
-//! Each of these macros will expand to code similar to:
-//!
-//! ```rust,ignore
-//! if log_level <= my_module_log_level() {
-//! ::log::log(log_level, format!(...));
-//! }
-//! ```
-//!
-//! What this means is that each of these macros are very cheap at runtime if
-//! they're turned off (just a load and an integer comparison). This also means that
-//! if logging is disabled, none of the components of the log will be executed.
-
-#![crate_name = "log"]
-#![unstable(feature = "rustc_private",
- reason = "use the crates.io `log` library instead",
- issue = "27812")]
-#![crate_type = "rlib"]
-#![crate_type = "dylib"]
-#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
- html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
- html_root_url = "https://doc.rust-lang.org/nightly/",
- html_playground_url = "https://play.rust-lang.org/",
- test(attr(deny(warnings))))]
-#![deny(missing_docs)]
-#![deny(warnings)]
-
-#![feature(staged_api)]
-
-use std::cell::RefCell;
-use std::fmt;
-use std::io::{self, Stderr};
-use std::io::prelude::*;
-use std::mem;
-use std::env;
-use std::slice;
-use std::sync::{Mutex, ONCE_INIT, Once};
-
-use directive::LOG_LEVEL_NAMES;
-
-#[macro_use]
-pub mod macros;
-
-mod directive;
-
-/// Maximum logging level of a module that can be specified. Common logging
-/// levels are found in the DEBUG/INFO/WARN/ERROR constants.
-pub const MAX_LOG_LEVEL: u32 = 255;
-
-/// The default logging level of a crate if no other is specified.
-const DEFAULT_LOG_LEVEL: u32 = 1;
-
-static mut LOCK: *mut Mutex<(Vec<directive::LogDirective>, Option<String>)> = 0 as *mut _;
-
-/// An unsafe constant that is the maximum logging level of any module
-/// specified. This is the first line of defense to determining whether a
-/// logging statement should be run.
-static mut LOG_LEVEL: u32 = MAX_LOG_LEVEL;
-
-/// Debug log level
-pub const DEBUG: u32 = 4;
-/// Info log level
-pub const INFO: u32 = 3;
-/// Warn log level
-pub const WARN: u32 = 2;
-/// Error log level
-pub const ERROR: u32 = 1;
-
-thread_local! {
- static LOCAL_LOGGER: RefCell<Option<Box<Logger + Send>>> = {
- RefCell::new(None)
- }
-}
-
-/// A trait used to represent an interface to a thread-local logger. Each thread
-/// can have its own custom logger which can respond to logging messages
-/// however it likes.
-pub trait Logger {
- /// Logs a single message described by the `record`.
- fn log(&mut self, record: &LogRecord);
-}
-
-struct DefaultLogger {
- handle: Stderr,
-}
-
-/// Wraps the log level with fmt implementations.
-#[derive(Copy, Clone, PartialEq, PartialOrd, Debug)]
-pub struct LogLevel(pub u32);
-
-impl fmt::Display for LogLevel {
- fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
- let LogLevel(level) = *self;
- match LOG_LEVEL_NAMES.get(level as usize - 1) {
- Some(ref name) => fmt::Display::fmt(name, fmt),
- None => fmt::Display::fmt(&level, fmt),
- }
- }
-}
-
-impl Logger for DefaultLogger {
- fn log(&mut self, record: &LogRecord) {
- match writeln!(&mut self.handle,
- "{}:{}: {}",
- record.level,
- record.module_path,
- record.args) {
- Err(e) => panic!("failed to log: {:?}", e),
- Ok(()) => {}
- }
- }
-}
-
-impl Drop for DefaultLogger {
- fn drop(&mut self) {
- // FIXME(#12628): is panicking the right thing to do?
- match self.handle.flush() {
- Err(e) => panic!("failed to flush a logger: {:?}", e),
- Ok(()) => {}
- }
- }
-}
-
-/// This function is called directly by the compiler when using the logging
-/// macros. This function does not take into account whether the log level
-/// specified is active or not, it will always log something if this method is
-/// called.
-///
-/// It is not recommended to call this function directly, rather it should be
-/// invoked through the logging family of macros.
-#[doc(hidden)]
-pub fn log(level: u32, loc: &'static LogLocation, args: fmt::Arguments) {
- // Test the literal string from args against the current filter, if there
- // is one.
- unsafe {
- let filter = (*LOCK).lock().unwrap();
- if let Some(ref filter) = filter.1 {
- if !args.to_string().contains(filter) {
- return;
- }
- }
- }
-
- // Completely remove the local logger from TLS in case anyone attempts to
- // frob the slot while we're doing the logging. This will destroy any logger
- // set during logging.
- let logger = LOCAL_LOGGER.with(|s| s.borrow_mut().take());
- let mut logger = logger.unwrap_or_else(|| Box::new(DefaultLogger { handle: io::stderr() }));
- logger.log(&LogRecord {
- level: LogLevel(level),
- args: args,
- file: loc.file,
- module_path: loc.module_path,
- line: loc.line,
- });
- set_logger(logger);
-}
-
-/// Getter for the global log level. This is a function so that it can be called
-/// safely
-#[doc(hidden)]
-#[inline(always)]
-pub fn log_level() -> u32 {
- unsafe { LOG_LEVEL }
-}
-
-/// Replaces the thread-local logger with the specified logger, returning the old
-/// logger.
-pub fn set_logger(logger: Box<Logger + Send>) -> Option<Box<Logger + Send>> {
- LOCAL_LOGGER.with(|slot| mem::replace(&mut *slot.borrow_mut(), Some(logger)))
-}
-
-/// A LogRecord is created by the logging macros, and passed as the only
-/// argument to Loggers.
-#[derive(Debug)]
-pub struct LogRecord<'a> {
- /// The module path of where the LogRecord originated.
- pub module_path: &'a str,
-
- /// The LogLevel of this record.
- pub level: LogLevel,
-
- /// The arguments from the log line.
- pub args: fmt::Arguments<'a>,
-
- /// The file of where the LogRecord originated.
- pub file: &'a str,
-
- /// The line number of where the LogRecord originated.
- pub line: u32,
-}
-
-#[doc(hidden)]
-#[derive(Copy, Clone)]
-pub struct LogLocation {
- pub module_path: &'static str,
- pub file: &'static str,
- pub line: u32,
-}
-
-/// Tests whether a given module's name is enabled for a particular level of
-/// logging. This is the second layer of defense about determining whether a
-/// module's log statement should be emitted or not.
-#[doc(hidden)]
-pub fn mod_enabled(level: u32, module: &str) -> bool {
- static INIT: Once = ONCE_INIT;
- INIT.call_once(init);
-
- // It's possible for many threads are in this function, only one of them
- // will perform the global initialization, but all of them will need to check
- // again to whether they should really be here or not. Hence, despite this
- // check being expanded manually in the logging macro, this function checks
- // the log level again.
- if level > unsafe { LOG_LEVEL } {
- return false;
- }
-
- // This assertion should never get tripped unless we're in an at_exit
- // handler after logging has been torn down and a logging attempt was made.
-
- unsafe {
- let directives = (*LOCK).lock().unwrap();
- enabled(level, module, directives.0.iter())
- }
-}
-
-fn enabled(level: u32, module: &str, iter: slice::Iter<directive::LogDirective>) -> bool {
- // Search for the longest match, the vector is assumed to be pre-sorted.
- for directive in iter.rev() {
- match directive.name {
- Some(ref name) if !module.starts_with(&name[..]) => {}
- Some(..) | None => return level <= directive.level,
- }
- }
- level <= DEFAULT_LOG_LEVEL
-}
-
-/// Initialize logging for the current process.
-///
-/// This is not threadsafe at all, so initialization is performed through a
-/// `Once` primitive (and this function is called from that primitive).
-fn init() {
- let (mut directives, filter) = match env::var("RUST_LOG") {
- Ok(spec) => directive::parse_logging_spec(&spec[..]),
- Err(..) => (Vec::new(), None),
- };
-
- // Sort the provided directives by length of their name, this allows a
- // little more efficient lookup at runtime.
- directives.sort_by(|a, b| {
- let alen = a.name.as_ref().map(|a| a.len()).unwrap_or(0);
- let blen = b.name.as_ref().map(|b| b.len()).unwrap_or(0);
- alen.cmp(&blen)
- });
-
- let max_level = {
- let max = directives.iter().max_by_key(|d| d.level);
- max.map(|d| d.level).unwrap_or(DEFAULT_LOG_LEVEL)
- };
-
- unsafe {
- LOG_LEVEL = max_level;
-
- assert!(LOCK.is_null());
- LOCK = Box::into_raw(Box::new(Mutex::new((directives, filter))));
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::enabled;
- use directive::LogDirective;
-
- #[test]
- fn match_full_path() {
- let dirs = [LogDirective {
- name: Some("crate2".to_string()),
- level: 3,
- },
- LogDirective {
- name: Some("crate1::mod1".to_string()),
- level: 2,
- }];
- assert!(enabled(2, "crate1::mod1", dirs.iter()));
- assert!(!enabled(3, "crate1::mod1", dirs.iter()));
- assert!(enabled(3, "crate2", dirs.iter()));
- assert!(!enabled(4, "crate2", dirs.iter()));
- }
-
- #[test]
- fn no_match() {
- let dirs = [LogDirective {
- name: Some("crate2".to_string()),
- level: 3,
- },
- LogDirective {
- name: Some("crate1::mod1".to_string()),
- level: 2,
- }];
- assert!(!enabled(2, "crate3", dirs.iter()));
- }
-
- #[test]
- fn match_beginning() {
- let dirs = [LogDirective {
- name: Some("crate2".to_string()),
- level: 3,
- },
- LogDirective {
- name: Some("crate1::mod1".to_string()),
- level: 2,
- }];
- assert!(enabled(3, "crate2::mod1", dirs.iter()));
- }
-
- #[test]
- fn match_beginning_longest_match() {
- let dirs = [LogDirective {
- name: Some("crate2".to_string()),
- level: 3,
- },
- LogDirective {
- name: Some("crate2::mod".to_string()),
- level: 4,
- },
- LogDirective {
- name: Some("crate1::mod1".to_string()),
- level: 2,
- }];
- assert!(enabled(4, "crate2::mod1", dirs.iter()));
- assert!(!enabled(4, "crate2", dirs.iter()));
- }
-
- #[test]
- fn match_default() {
- let dirs = [LogDirective {
- name: None,
- level: 3,
- },
- LogDirective {
- name: Some("crate1::mod1".to_string()),
- level: 2,
- }];
- assert!(enabled(2, "crate1::mod1", dirs.iter()));
- assert!(enabled(3, "crate2::mod2", dirs.iter()));
- }
-
- #[test]
- fn zero_level() {
- let dirs = [LogDirective {
- name: None,
- level: 3,
- },
- LogDirective {
- name: Some("crate1::mod1".to_string()),
- level: 0,
- }];
- assert!(!enabled(1, "crate1::mod1", dirs.iter()));
- assert!(enabled(3, "crate2::mod2", dirs.iter()));
- }
-}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Logging macros
-
-/// The standard logging macro
-///
-/// This macro will generically log over a provided level (of type u32) with a
-/// format!-based argument list. See documentation in `std::fmt` for details on
-/// how to use the syntax.
-///
-/// # Examples
-///
-/// ```
-/// # #![feature(rustc_private)]
-/// #[macro_use] extern crate log;
-///
-/// fn main() {
-/// log!(log::WARN, "this is a warning {}", "message");
-/// log!(log::DEBUG, "this is a debug message");
-/// log!(6, "this is a custom logging level: {level}", level=6);
-/// }
-/// ```
-///
-/// Assumes the binary is `main`:
-///
-/// ```{.bash}
-/// $ RUST_LOG=warn ./main
-/// WARN:main: this is a warning message
-/// ```
-///
-/// ```{.bash}
-/// $ RUST_LOG=debug ./main
-/// DEBUG:main: this is a debug message
-/// WARN:main: this is a warning message
-/// ```
-///
-/// ```{.bash}
-/// $ RUST_LOG=6 ./main
-/// DEBUG:main: this is a debug message
-/// WARN:main: this is a warning message
-/// 6:main: this is a custom logging level: 6
-/// ```
-#[macro_export]
-macro_rules! log {
- ($lvl:expr, $($arg:tt)+) => ({
- static LOC: ::log::LogLocation = ::log::LogLocation {
- line: line!(),
- file: file!(),
- module_path: module_path!(),
- };
- let lvl = $lvl;
- if log_enabled!(lvl) {
- ::log::log(lvl, &LOC, format_args!($($arg)+))
- }
- })
-}
-
-/// A convenience macro for logging at the error log level.
-///
-/// # Examples
-///
-/// ```
-/// # #![feature(rustc_private)]
-/// #[macro_use] extern crate log;
-///
-/// fn main() {
-/// let error = 3;
-/// error!("the build has failed with error code: {}", error);
-/// }
-/// ```
-///
-/// Assumes the binary is `main`:
-///
-/// ```{.bash}
-/// $ RUST_LOG=error ./main
-/// ERROR:main: the build has failed with error code: 3
-/// ```
-///
-#[macro_export]
-macro_rules! error {
- ($($arg:tt)*) => (log!(::log::ERROR, $($arg)*))
-}
-
-/// A convenience macro for logging at the warning log level.
-///
-/// # Examples
-///
-/// ```
-/// # #![feature(rustc_private)]
-/// #[macro_use] extern crate log;
-///
-/// fn main() {
-/// let code = 3;
-/// warn!("you may like to know that a process exited with: {}", code);
-/// }
-/// ```
-///
-/// Assumes the binary is `main`:
-///
-/// ```{.bash}
-/// $ RUST_LOG=warn ./main
-/// WARN:main: you may like to know that a process exited with: 3
-/// ```
-#[macro_export]
-macro_rules! warn {
- ($($arg:tt)*) => (log!(::log::WARN, $($arg)*))
-}
-
-/// A convenience macro for logging at the info log level.
-///
-/// # Examples
-///
-/// ```
-/// # #![feature(rustc_private)]
-/// #[macro_use] extern crate log;
-///
-/// fn main() {
-/// let ret = 3;
-/// info!("this function is about to return: {}", ret);
-/// }
-/// ```
-///
-/// Assumes the binary is `main`:
-///
-/// ```{.bash}
-/// $ RUST_LOG=info ./main
-/// INFO:main: this function is about to return: 3
-/// ```
-#[macro_export]
-macro_rules! info {
- ($($arg:tt)*) => (log!(::log::INFO, $($arg)*))
-}
-
-/// A convenience macro for logging at the debug log level. This macro will
-/// be omitted at compile time in an optimized build unless `-C debug-assertions`
-/// is passed to the compiler.
-///
-/// # Examples
-///
-/// ```
-/// # #![feature(rustc_private)]
-/// #[macro_use] extern crate log;
-///
-/// fn main() {
-/// debug!("x = {x}, y = {y}", x=10, y=20);
-/// }
-/// ```
-///
-/// Assumes the binary is `main`:
-///
-/// ```{.bash}
-/// $ RUST_LOG=debug ./main
-/// DEBUG:main: x = 10, y = 20
-/// ```
-#[macro_export]
-macro_rules! debug {
- ($($arg:tt)*) => (if cfg!(debug_assertions) { log!(::log::DEBUG, $($arg)*) })
-}
-
-/// A macro to test whether a log level is enabled for the current module.
-///
-/// # Examples
-///
-/// ```
-/// # #![feature(rustc_private)]
-/// #[macro_use] extern crate log;
-///
-/// struct Point { x: i32, y: i32 }
-/// fn some_expensive_computation() -> Point { Point { x: 1, y: 2 } }
-///
-/// fn main() {
-/// if log_enabled!(log::DEBUG) {
-/// let x = some_expensive_computation();
-/// debug!("x.x = {}, x.y = {}", x.x, x.y);
-/// }
-/// }
-/// ```
-///
-/// Assumes the binary is `main`:
-///
-/// ```{.bash}
-/// $ RUST_LOG=error ./main
-/// ```
-///
-/// ```{.bash}
-/// $ RUST_LOG=debug ./main
-/// DEBUG:main: x.x = 1, x.y = 2
-/// ```
-#[macro_export]
-macro_rules! log_enabled {
- ($lvl:expr) => ({
- let lvl = $lvl;
- (lvl != ::log::DEBUG || cfg!(debug_assertions)) &&
- lvl <= ::log::log_level() &&
- ::log::mod_enabled(lvl, module_path!())
- })
-}
arena = { path = "../libarena" }
fmt_macros = { path = "../libfmt_macros" }
graphviz = { path = "../libgraphviz" }
-log = { path = "../liblog" }
+log = "0.3"
rustc_back = { path = "../librustc_back" }
rustc_bitflags = { path = "../librustc_bitflags" }
rustc_const_math = { path = "../librustc_const_math" }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use hir::def_id::CrateNum;
use std::fmt::Debug;
use std::sync::Arc;
CoherenceCheckImpl(D),
CoherenceOverlapCheck(D),
CoherenceOverlapCheckSpecial(D),
- CoherenceOverlapInherentCheck(D),
CoherenceOrphanCheck(D),
Variance,
WfCheck(D),
TypeckItemType(D),
UnusedTraitCheck,
CheckConst(D),
- Privacy,
+ PrivacyAccessLevels(CrateNum),
IntrinsicCheck(D),
MatchCheck(D),
CheckEntryFn => Some(CheckEntryFn),
Variance => Some(Variance),
UnusedTraitCheck => Some(UnusedTraitCheck),
- Privacy => Some(Privacy),
+ PrivacyAccessLevels(k) => Some(PrivacyAccessLevels(k)),
Reachability => Some(Reachability),
DeadCheck => Some(DeadCheck),
LateLintCheck => Some(LateLintCheck),
CoherenceCheckImpl(ref d) => op(d).map(CoherenceCheckImpl),
CoherenceOverlapCheck(ref d) => op(d).map(CoherenceOverlapCheck),
CoherenceOverlapCheckSpecial(ref d) => op(d).map(CoherenceOverlapCheckSpecial),
- CoherenceOverlapInherentCheck(ref d) => op(d).map(CoherenceOverlapInherentCheck),
CoherenceOrphanCheck(ref d) => op(d).map(CoherenceOrphanCheck),
WfCheck(ref d) => op(d).map(WfCheck),
TypeckItemType(ref d) => op(d).map(TypeckItemType),
pub fn keys(&self) -> Vec<M::Key> {
self.map.keys().cloned().collect()
}
-
- /// Append `elem` to the vector stored for `k`, creating a new vector if needed.
- /// This is considered a write to `k`.
- ///
- /// NOTE: Caution is required when using this method. You should
- /// be sure that nobody is **reading from the vector** while you
- /// are writing to it. Eventually, it'd be nice to remove this.
- pub fn push<E: Clone>(&mut self, k: M::Key, elem: E)
- where M: DepTrackingMapConfig<Value=Vec<E>>
- {
- self.write(&k);
- self.map.entry(k)
- .or_insert(Vec::new())
- .push(elem);
- }
}
impl<M: DepTrackingMapConfig> MemoizationMap for RefCell<DepTrackingMap<M>> {
}
/// Indicates that the current task `C` reads `v` by adding an
- /// edge from `v` to `C`. If there is no current task, panics. If
- /// you want to suppress this edge, use `ignore`.
+ /// edge from `v` to `C`. If there is no current task, has no
+ /// effect. Note that *reading* from tracked state is harmless if
+ /// you are not in a task; what is bad is *writing* to tracked
+ /// state (and leaking data that you read into a tracked task).
pub fn read(&mut self, v: DepNode<D>) {
- let source = self.make_node(v);
- self.add_edge_from_current_node(|current| (source, current))
+ if self.current_node().is_some() {
+ let source = self.make_node(v);
+ self.add_edge_from_current_node(|current| (source, current))
+ }
}
/// Indicates that the current task `C` writes `v` by adding an
let mut stack = self.stack.borrow_mut();
match *message {
- DepMessage::Read(ref n) => self.check_edge(Some(Some(n)), top(&stack)),
+ // It is ok to READ shared state outside of a
+ // task. That can't do any harm (at least, the only
+ // way it can do harm is by leaking that data into a
+ // query or task, which would be a problem
+ // anyway). What would be bad is WRITING to that
+ // state.
+ DepMessage::Read(_) => { }
DepMessage::Write(ref n) => self.check_edge(top(&stack), Some(Some(n))),
DepMessage::PushTask(ref n) => stack.push(Some(n.clone())),
DepMessage::PushIgnore => stack.push(None),
(None, None) => unreachable!(),
// nothing on top of the stack
- (None, Some(n)) | (Some(n), None) => bug!("read/write of {:?} but no current task", n),
+ (None, Some(n)) | (Some(n), None) => bug!("write of {:?} but no current task", n),
// this corresponds to an Ignore being top of the stack
(Some(None), _) | (_, Some(None)) => (),
hir::ExprIndex(P(self.lower_expr(el)), P(self.lower_expr(er)))
}
ExprKind::Range(ref e1, ref e2, lims) => {
- fn make_struct(this: &mut LoweringContext,
- ast_expr: &Expr,
- path: &[&str],
- fields: &[(&str, &P<Expr>)]) -> hir::Expr {
- let struct_path = &iter::once(&"ops").chain(path).map(|s| *s)
- .collect::<Vec<_>>();
- let unstable_span = this.allow_internal_unstable("...", ast_expr.span);
-
- if fields.len() == 0 {
- this.expr_std_path(unstable_span, struct_path,
- ast_expr.attrs.clone())
- } else {
- let fields = fields.into_iter().map(|&(s, e)| {
- let expr = P(this.lower_expr(&e));
- let unstable_span = this.allow_internal_unstable("...", e.span);
- this.field(Symbol::intern(s), expr, unstable_span)
- }).collect();
- let attrs = ast_expr.attrs.clone();
-
- this.expr_std_struct(unstable_span, struct_path, fields, None, attrs)
- }
- }
-
use syntax::ast::RangeLimits::*;
- return match (e1, e2, lims) {
- (&None, &None, HalfOpen) =>
- make_struct(self, e, &["RangeFull"], &[]),
-
- (&Some(ref e1), &None, HalfOpen) =>
- make_struct(self, e, &["RangeFrom"],
- &[("start", e1)]),
-
- (&None, &Some(ref e2), HalfOpen) =>
- make_struct(self, e, &["RangeTo"],
- &[("end", e2)]),
-
- (&Some(ref e1), &Some(ref e2), HalfOpen) =>
- make_struct(self, e, &["Range"],
- &[("start", e1), ("end", e2)]),
-
- (&None, &Some(ref e2), Closed) =>
- make_struct(self, e, &["RangeToInclusive"],
- &[("end", e2)]),
-
- (&Some(ref e1), &Some(ref e2), Closed) =>
- make_struct(self, e, &["RangeInclusive", "NonEmpty"],
- &[("start", e1), ("end", e2)]),
+ let (path, variant) = match (e1, e2, lims) {
+ (&None, &None, HalfOpen) => ("RangeFull", None),
+ (&Some(..), &None, HalfOpen) => ("RangeFrom", None),
+ (&None, &Some(..), HalfOpen) => ("RangeTo", None),
+ (&Some(..), &Some(..), HalfOpen) => ("Range", None),
+ (&None, &Some(..), Closed) => ("RangeToInclusive", None),
+ (&Some(..), &Some(..), Closed) => ("RangeInclusive", Some("NonEmpty")),
+ (_, &None, Closed) =>
+ panic!(self.diagnostic().span_fatal(
+ e.span, "inclusive range with no end")),
+ };
- _ => panic!(self.diagnostic()
- .span_fatal(e.span, "inclusive range with no end")),
+ let fields =
+ e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e)))
+ .map(|(s, e)| {
+ let expr = P(self.lower_expr(&e));
+ let unstable_span = self.allow_internal_unstable("...", e.span);
+ self.field(Symbol::intern(s), expr, unstable_span)
+ }).collect::<P<[hir::Field]>>();
+
+ let is_unit = fields.is_empty();
+ let unstable_span = self.allow_internal_unstable("...", e.span);
+ let struct_path =
+ iter::once("ops").chain(iter::once(path)).chain(variant)
+ .collect::<Vec<_>>();
+ let struct_path = self.std_path(unstable_span, &struct_path, is_unit);
+ let struct_path = hir::QPath::Resolved(None, P(struct_path));
+
+ return hir::Expr {
+ id: self.lower_node_id(e.id),
+ node: if is_unit {
+ hir::ExprPath(struct_path)
+ } else {
+ hir::ExprStruct(struct_path, fields, None)
+ },
+ span: unstable_span,
+ attrs: e.attrs.clone(),
};
}
ExprKind::Path(ref qself, ref path) => {
P(self.expr(sp, hir::ExprTup(exprs), ThinVec::new()))
}
- fn expr_std_struct(&mut self,
- span: Span,
- components: &[&str],
- fields: hir::HirVec<hir::Field>,
- e: Option<P<hir::Expr>>,
- attrs: ThinVec<Attribute>) -> hir::Expr {
- let path = self.std_path(span, components, false);
- let qpath = hir::QPath::Resolved(None, P(path));
- self.expr(span, hir::ExprStruct(qpath, fields, e), attrs)
- }
-
fn expr(&mut self, span: Span, node: hir::Expr_, attrs: ThinVec<Attribute>) -> hir::Expr {
hir::Expr {
id: self.next_id(),
intravisit::walk_crate(&mut collector, &forest.krate);
let map = collector.map;
- if log_enabled!(::log::DEBUG) {
+ if log_enabled!(::log::LogLevel::Debug) {
// This only makes sense for ordered stores; note the
// enumerate to count the number of entries.
let (entries_less_1, _) = map.iter().filter(|&x| {
pub fn is_elided(&self) -> bool {
self.name == keywords::Invalid.name()
}
+
+ pub fn is_static(&self) -> bool {
+ self.name == keywords::StaticLifetime.name()
+ }
}
/// A lifetime definition, eg `'a: 'b+'c+'d`
impl Encodable for Fingerprint {
#[inline]
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- for &byte in &self.0[..] {
+ for &byte in &self.0 {
s.emit_u8(byte)?;
}
Ok(())
#[inline]
fn decode<D: Decoder>(d: &mut D) -> Result<Fingerprint, D::Error> {
let mut result = Fingerprint([0u8; FINGERPRINT_LENGTH]);
- for byte in &mut result.0[..] {
+ for byte in &mut result.0 {
*byte = d.read_u8()?;
}
Ok(result)
use std::default::Default as StdDefault;
use std::mem;
use std::fmt;
-use std::ops::Deref;
use syntax::attr;
use syntax::ast;
use syntax::symbol::Symbol;
-use syntax_pos::{MultiSpan, Span};
+use syntax_pos::{DUMMY_SP, MultiSpan, Span};
use errors::{self, Diagnostic, DiagnosticBuilder};
use hir;
+use hir::def_id::LOCAL_CRATE;
use hir::intravisit as hir_visit;
use syntax::visit as ast_visit;
Allow => bug!("earlier conditional return should handle Allow case")
};
let hyphen_case_lint_name = name.replace("_", "-");
- if lint_flag_val.as_str().deref() == name {
+ if lint_flag_val.as_str() == name {
err.note(&format!("requested on the command line with `{} {}`",
flag, hyphen_case_lint_name));
} else {
},
Node(lint_attr_name, src) => {
def = Some(src);
- if lint_attr_name.as_str().deref() != name {
+ if lint_attr_name.as_str() != name {
let level_str = level.as_str();
err.note(&format!("#[{}({})] implied by #[{}({})]",
level_str, name, level_str, lint_attr_name));
/// Perform lint checking on a crate.
///
/// Consumes the `lint_store` field of the `Session`.
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- access_levels: &AccessLevels) {
+pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let _task = tcx.dep_graph.in_task(DepNode::LateLintCheck);
+ let access_levels = &ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE);
+
let krate = tcx.hir.krate();
// We want to own the lint store, so move it out of the session.
fn item_generics_cloned(&self, def: DefId) -> ty::Generics;
fn item_attrs(&self, def_id: DefId) -> Vec<ast::Attribute>;
fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name>;
- fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId>;
// trait info
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>;
fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, LibSource)>;
fn used_crate_source(&self, cnum: CrateNum) -> CrateSource;
fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum>;
- fn encode_metadata<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- reexports: &def::ExportMap,
+ fn encode_metadata<'a, 'tcx>(&self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
link_meta: &LinkMeta,
reachable: &NodeSet) -> Vec<u8>;
fn metadata_encoding_version(&self) -> &[u8];
{ bug!("item_generics_cloned") }
fn item_attrs(&self, def_id: DefId) -> Vec<ast::Attribute> { bug!("item_attrs") }
fn fn_arg_names(&self, did: DefId) -> Vec<ast::Name> { bug!("fn_arg_names") }
- fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId> { vec![] }
// trait info
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId> { vec![] }
{ vec![] }
fn used_crate_source(&self, cnum: CrateNum) -> CrateSource { bug!("used_crate_source") }
fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option<CrateNum> { None }
- fn encode_metadata<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- reexports: &def::ExportMap,
- link_meta: &LinkMeta,
- reachable: &NodeSet) -> Vec<u8> { vec![] }
+ fn encode_metadata<'a, 'tcx>(&self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ link_meta: &LinkMeta,
+ reachable: &NodeSet) -> Vec<u8> { vec![] }
fn metadata_encoding_version(&self) -> &[u8] { bug!("metadata_encoding_version") }
}
use middle::privacy;
use ty::{self, TyCtxt};
use hir::def::Def;
-use hir::def_id::{DefId};
+use hir::def_id::{DefId, LOCAL_CRATE};
use lint;
use util::nodemap::FxHashSet;
use syntax::{ast, codemap};
use syntax::attr;
+use syntax::codemap::DUMMY_SP;
use syntax_pos;
// Any local node that may call something in its body block should be
}
}
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- access_levels: &privacy::AccessLevels) {
+pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let _task = tcx.dep_graph.in_task(DepNode::DeadCheck);
+ let access_levels = &ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE);
let krate = tcx.hir.krate();
let live_symbols = find_live(tcx, access_levels, krate);
let mut visitor = DeadVisitor { tcx: tcx, live_symbols: live_symbols };
use syntax::abi::Abi;
use syntax::ast;
use syntax::attr;
+use syntax::codemap::DUMMY_SP;
use hir;
+use hir::def_id::LOCAL_CRATE;
use hir::intravisit::{Visitor, NestedVisitorMap};
use hir::itemlikevisit::ItemLikeVisitor;
use hir::intravisit;
}
}
-pub fn find_reachable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- access_levels: &privacy::AccessLevels)
- -> NodeSet {
+pub fn find_reachable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> NodeSet {
let _task = tcx.dep_graph.in_task(DepNode::Reachability);
+ let access_levels = &ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE);
+
let any_library = tcx.sess.crate_types.borrow().iter().any(|ty| {
*ty == config::CrateTypeRlib || *ty == config::CrateTypeDylib ||
*ty == config::CrateTypeProcMacro
use syntax::attr;
use syntax::ptr::P;
use syntax::symbol::keywords;
-use syntax_pos::Span;
+use syntax_pos::{mk_sp, Span};
use errors::DiagnosticBuilder;
use util::nodemap::{NodeMap, NodeSet, FxHashSet, FxHashMap, DefIdMap};
use rustc_back::slice;
self.resolve_elided_lifetimes(slice::ref_slice(lifetime_ref));
return;
}
- if lifetime_ref.name == keywords::StaticLifetime.name() {
+ if lifetime_ref.is_static() {
self.insert_lifetime(lifetime_ref, Region::Static);
return;
}
let lifetime_i = &lifetimes[i];
for lifetime in lifetimes {
- if lifetime.lifetime.name == keywords::StaticLifetime.name() {
+ if lifetime.lifetime.is_static() {
let lifetime = lifetime.lifetime;
let mut err = struct_span_err!(self.sess, lifetime.span, E0262,
"invalid lifetime parameter name: `{}`", lifetime.name);
self.check_lifetime_def_for_shadowing(old_scope, &lifetime_i.lifetime);
for bound in &lifetime_i.bounds {
- self.resolve_lifetime_ref(bound);
+ if !bound.is_static() {
+ self.resolve_lifetime_ref(bound);
+ } else {
+ self.insert_lifetime(bound, Region::Static);
+ let full_span = mk_sp(lifetime_i.lifetime.span.lo, bound.span.hi);
+ self.sess.struct_span_warn(full_span,
+ &format!("unnecessary lifetime parameter `{}`", lifetime_i.lifetime.name))
+ .help(&format!("you can use the `'static` lifetime directly, in place \
+ of `{}`", lifetime_i.lifetime.name))
+ .emit();
+ }
}
}
}
if !self.stability.borrow().active_features.contains(feature) {
let msg = match *reason {
Some(ref r) => format!("use of unstable library feature '{}': {}",
- &feature.as_str(), &r),
+ feature.as_str(), &r),
None => format!("use of unstable library feature '{}'", &feature)
};
emit_feature_err(&self.sess.parse_sess, &feature.as_str(), span,
/// Given the list of enabled features that were not language features (i.e. that
/// were expected to be library features), and the list of features used from
/// libraries, identify activated features that don't exist and error about them.
-pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- access_levels: &AccessLevels) {
+pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let sess = &tcx.sess;
+ let access_levels = &ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE);
+
if tcx.stability.borrow().staged_api[&LOCAL_CRATE] && tcx.sess.features.borrow().staged_api {
let _task = tcx.dep_graph.in_task(DepNode::StabilityIndex);
let krate = tcx.hir.krate();
RawPtr(hir::Mutability),
}
+/// Information for `CoerceUnsized` impls, storing information we
+/// have computed about the coercion.
+///
+/// This struct can be obtained via the `coerce_impl_info` query.
+/// Demanding this struct also has the side-effect of reporting errors
+/// for inappropriate impls.
+#[derive(Clone, Copy, RustcEncodable, RustcDecodable, Debug)]
+pub struct CoerceUnsizedInfo {
+ /// If this is a "custom coerce" impl, then what kind of custom
+ /// coercion is it? This applies to impls of `CoerceUnsized` for
+ /// structs, primarily, where we store a bit of info about which
+ /// fields need to be coerced.
+ pub custom_kind: Option<CustomCoerceUnsized>
+}
+
#[derive(Clone, Copy, RustcEncodable, RustcDecodable, Debug)]
pub enum CustomCoerceUnsized {
/// Records the index of the field being coerced.
use lint;
use middle;
use hir::TraitMap;
-use hir::def::Def;
+use hir::def::{Def, ExportMap};
use hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use hir::map as hir_map;
use hir::map::DisambiguatedDefPathData;
/// is relevant; generated by resolve.
pub trait_map: TraitMap,
+ /// Export map produced by name resolution.
+ pub export_map: ExportMap,
+
pub named_region_map: resolve_lifetime::NamedRegionMap,
pub region_maps: RegionMaps,
region_maps: region_maps,
variance_computed: Cell::new(false),
trait_map: resolutions.trait_map,
+ export_map: resolutions.export_map,
fulfilled_predicates: RefCell::new(fulfilled_predicates),
hir: hir,
maps: maps::Maps::new(dep_graph, providers),
} else {
// for local crates, check whether type info is
// available; typeck might not have completed yet
- self.maps.impl_trait_ref.borrow().contains_key(&impl_def_id)
+ self.maps.impl_trait_ref.borrow().contains_key(&impl_def_id) &&
+ self.maps.ty.borrow().contains_key(&impl_def_id)
};
if !use_types {
use dep_graph::{DepGraph, DepNode, DepTrackingMap, DepTrackingMapConfig};
use hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use middle::const_val::ConstVal;
+use middle::privacy::AccessLevels;
use mir;
-use ty::{self, Ty, TyCtxt};
+use ty::{self, CrateInherentImpls, Ty, TyCtxt};
use rustc_data_structures::indexed_vec::IndexVec;
use std::cell::{RefCell, RefMut};
}
}
-impl<'tcx> QueryDescription for queries::coherent_inherent_impls<'tcx> {
+impl<'tcx> QueryDescription for queries::crate_inherent_impls<'tcx> {
+ fn describe(_: TyCtxt, k: CrateNum) -> String {
+ format!("all inherent impls defined in crate `{:?}`", k)
+ }
+}
+
+impl<'tcx> QueryDescription for queries::crate_inherent_impls_overlap_check<'tcx> {
fn describe(_: TyCtxt, _: CrateNum) -> String {
- format!("coherence checking all inherent impls")
+ format!("check for overlap between inherent impls defined in this crate")
}
}
}
}
+impl<'tcx> QueryDescription for queries::privacy_access_levels<'tcx> {
+ fn describe(_: TyCtxt, _: CrateNum) -> String {
+ format!("privacy access levels")
+ }
+}
+
macro_rules! define_maps {
(<$tcx:tt>
$($(#[$attr:meta])*
/// Maps a DefId of a type to a list of its inherent impls.
/// Contains implementations of methods that are inherent to a type.
/// Methods in these implementations don't need to be exported.
- pub inherent_impls: InherentImpls(DefId) -> Vec<DefId>,
+ pub inherent_impls: InherentImpls(DefId) -> Rc<Vec<DefId>>,
/// Maps from the def-id of a function/method or const/static
/// to its MIR. Mutation is done at an item granularity to
pub closure_type: ItemSignature(DefId) -> ty::PolyFnSig<'tcx>,
/// Caches CoerceUnsized kinds for impls on custom types.
- pub custom_coerce_unsized_kind: ItemSignature(DefId)
- -> ty::adjustment::CustomCoerceUnsized,
+ pub coerce_unsized_info: ItemSignature(DefId)
+ -> ty::adjustment::CoerceUnsizedInfo,
pub typeck_tables: TypeckTables(DefId) -> &'tcx ty::TypeckTables<'tcx>,
pub coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (),
- pub coherent_inherent_impls: coherent_inherent_impls_dep_node(CrateNum) -> (),
+ /// Gets a complete map from all types to their inherent impls.
+ /// Not meant to be used directly outside of coherence.
+ /// (Defined only for LOCAL_CRATE)
+ pub crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum) -> CrateInherentImpls,
+
+ /// Checks all types in the krate for overlap in their inherent impls. Reports errors.
+ /// Not meant to be used directly outside of coherence.
+ /// (Defined only for LOCAL_CRATE)
+ pub crate_inherent_impls_overlap_check: crate_inherent_impls_dep_node(CrateNum) -> (),
/// Results of evaluating monomorphic constants embedded in
/// other items, such as enum variant explicit discriminants.
pub monomorphic_const_eval: MonomorphicConstEval(DefId) -> Result<ConstVal<'tcx>, ()>,
+ /// Performs the privacy check and computes "access levels".
+ pub privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Rc<AccessLevels>,
+
pub mir_shims: mir_shim(ty::InstanceDef<'tcx>) -> &'tcx RefCell<mir::Mir<'tcx>>
}
DepNode::CoherenceCheckTrait(def_id)
}
-fn coherent_inherent_impls_dep_node(_: CrateNum) -> DepNode<DefId> {
+fn crate_inherent_impls_dep_node(_: CrateNum) -> DepNode<DefId> {
DepNode::Coherence
}
use dep_graph::{self, DepNode};
use hir::{map as hir_map, FreevarMap, TraitMap};
-use middle;
use hir::def::{Def, CtorKind, ExportMap};
use hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
use middle::const_val::ConstVal;
use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem};
+use middle::privacy::AccessLevels;
use middle::region::{CodeExtent, ROOT_CODE_EXTENT};
use middle::resolve_lifetime::ObjectLifetimeDefault;
use mir::Mir;
use ty::util::IntTypeExt;
use ty::walk::TypeWalker;
use util::common::MemoizationMap;
-use util::nodemap::{NodeSet, FxHashMap};
+use util::nodemap::{NodeSet, DefIdMap, FxHashMap};
use serialize::{self, Encodable, Encoder};
use std::borrow::Cow;
/// The complete set of all analyses described in this module. This is
/// produced by the driver and fed to trans and later passes.
+///
+/// NB: These contents are being migrated into queries using the
+/// *on-demand* infrastructure.
#[derive(Clone)]
pub struct CrateAnalysis {
- pub export_map: ExportMap,
- pub access_levels: middle::privacy::AccessLevels,
+ pub access_levels: Rc<AccessLevels>,
pub reachable: NodeSet,
pub name: String,
pub glob_map: Option<hir::GlobMap>,
pub freevars: FreevarMap,
pub trait_map: TraitMap,
pub maybe_unused_trait_imports: NodeSet,
+ pub export_map: ExportMap,
}
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
})
}
- pub fn custom_coerce_unsized_kind(self, did: DefId) -> adjustment::CustomCoerceUnsized {
- queries::custom_coerce_unsized_kind::get(self, DUMMY_SP, did)
+ pub fn coerce_unsized_info(self, did: DefId) -> adjustment::CoerceUnsizedInfo {
+ queries::coerce_unsized_info::get(self, DUMMY_SP, did)
}
pub fn associated_item(self, def_id: DefId) -> AssociatedItem {
def.flags.get().intersects(TraitFlags::HAS_DEFAULT_IMPL)
}
- /// Populates the type context with all the inherent implementations for
- /// the given type if necessary.
- pub fn populate_inherent_implementations_for_type_if_necessary(self,
- span: Span,
- type_id: DefId) {
- if type_id.is_local() {
- // Make sure coherence of inherent impls ran already.
- ty::queries::coherent_inherent_impls::force(self, span, LOCAL_CRATE);
- return
- }
-
- // The type is not local, hence we are reading this out of
- // metadata and don't need to track edges.
- let _ignore = self.dep_graph.in_ignore();
-
- if self.populated_external_types.borrow().contains(&type_id) {
- return
- }
-
- debug!("populate_inherent_implementations_for_type_if_necessary: searching for {:?}",
- type_id);
-
- let inherent_impls = self.sess.cstore.inherent_implementations_for_type(type_id);
-
- self.maps.inherent_impls.borrow_mut().insert(type_id, inherent_impls);
- self.populated_external_types.borrow_mut().insert(type_id);
- }
-
/// Populates the type context with all the implementations for the given
/// trait if necessary.
pub fn populate_implementations_for_trait_if_necessary(self, trait_id: DefId) {
..*providers
};
}
+
+
+/// A map for the local crate mapping each type to a vector of its
+/// inherent impls. This is not meant to be used outside of coherence;
+/// rather, you should request the vector for a specific type via
+/// `ty::queries::inherent_impls::get(def_id)` so as to minimize your
+/// dependencies (constructing this map requires touching the entire
+/// crate).
+#[derive(Clone, Debug)]
+pub struct CrateInherentImpls {
+ pub inherent_impls: DefIdMap<Rc<Vec<DefId>>>,
+}
+
[dependencies]
syntax = { path = "../libsyntax" }
serialize = { path = "../libserialize" }
-log = { path = "../liblog" }
+log = "0.3"
[features]
jemalloc = []
test = false
[dependencies]
-log = { path = "../liblog" }
+log = "0.3"
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }
graphviz = { path = "../libgraphviz" }
// First, filter out duplicates
moved.sort();
moved.dedup();
- debug!("fragments 1 moved: {:?}", path_lps(&moved[..]));
+ debug!("fragments 1 moved: {:?}", path_lps(&moved));
assigned.sort();
assigned.dedup();
- debug!("fragments 1 assigned: {:?}", path_lps(&assigned[..]));
+ debug!("fragments 1 assigned: {:?}", path_lps(&assigned));
// Second, build parents from the moved and assigned.
for m in &moved {
parents.sort();
parents.dedup();
- debug!("fragments 2 parents: {:?}", path_lps(&parents[..]));
+ debug!("fragments 2 parents: {:?}", path_lps(&parents));
// Third, filter the moved and assigned fragments down to just the non-parents
- moved.retain(|f| non_member(*f, &parents[..]));
- debug!("fragments 3 moved: {:?}", path_lps(&moved[..]));
+ moved.retain(|f| non_member(*f, &parents));
+ debug!("fragments 3 moved: {:?}", path_lps(&moved));
- assigned.retain(|f| non_member(*f, &parents[..]));
- debug!("fragments 3 assigned: {:?}", path_lps(&assigned[..]));
+ assigned.retain(|f| non_member(*f, &parents));
+ debug!("fragments 3 assigned: {:?}", path_lps(&assigned));
// Fourth, build the leftover from the moved, assigned, and parents.
for m in &moved {
unmoved.sort();
unmoved.dedup();
- debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved[..]));
+ debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved));
// Fifth, filter the leftover fragments down to its core.
unmoved.retain(|f| match *f {
AllButOneFrom(_) => true,
- Just(mpi) => non_member(mpi, &parents[..]) &&
- non_member(mpi, &moved[..]) &&
- non_member(mpi, &assigned[..])
+ Just(mpi) => non_member(mpi, &parents) &&
+ non_member(mpi, &moved) &&
+ non_member(mpi, &assigned)
});
- debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved[..]));
+ debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved));
// Swap contents back in.
fragments.unmoved_fragments = unmoved;
&flowed_moves.move_data,
owner_id);
- check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans[..], body);
+ check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans, body);
}
fn build_borrowck_dataflow_data<'a, 'tcx>(this: &mut BorrowckCtxt<'a, 'tcx>,
set.push_str(", ");
}
let loan_str = self.borrowck_ctxt.loan_path_to_string(&lp);
- set.push_str(&loan_str[..]);
+ set.push_str(&loan_str);
saw_some = true;
true
});
[dependencies]
arena = { path = "../libarena" }
-log = { path = "../liblog" }
+log = "0.3"
rustc = { path = "../librustc" }
rustc_back = { path = "../librustc_back" }
rustc_const_math = { path = "../librustc_const_math" }
}).collect();
let wild_patterns: Vec<_> = wild_patterns_owned.iter().collect();
let matrix = Matrix(m.iter().flat_map(|r| {
- specialize(cx, &r[..], &ctor, &wild_patterns)
+ specialize(cx, &r, &ctor, &wild_patterns)
}).collect());
match specialize(cx, v, &ctor, &wild_patterns) {
- Some(v) => match is_useful(cx, &matrix, &v[..], witness) {
+ Some(v) => match is_useful(cx, &matrix, &v, witness) {
UsefulWithWitness(witnesses) => UsefulWithWitness(
witnesses.into_iter()
.map(|witness| witness.apply_constructor(cx, &ctor, lty))
for &(pat, hir_pat) in pats {
let v = vec![pat];
- match is_useful(cx, &seen, &v[..], LeaveOutWitness) {
+ match is_useful(cx, &seen, &v, LeaveOutWitness) {
NotUseful => {
match source {
hir::MatchSource::IfLetDesugar { .. } => {
crate-type = ["dylib"]
[dependencies]
-log = { path = "../liblog" }
+log = "0.3"
serialize = { path = "../libserialize" }
type Target = [A::Element];
fn deref(&self) -> &Self::Target {
match *self {
- AccumulateVec::Array(ref v) => &v[..],
- AccumulateVec::Heap(ref v) => &v[..],
+ AccumulateVec::Array(ref v) => v,
+ AccumulateVec::Heap(ref v) => v,
}
}
}
impl<A: Array> DerefMut for AccumulateVec<A> {
fn deref_mut(&mut self) -> &mut [A::Element] {
match *self {
- AccumulateVec::Array(ref mut v) => &mut v[..],
- AccumulateVec::Heap(ref mut v) => &mut v[..],
+ AccumulateVec::Array(ref mut v) => v,
+ AccumulateVec::Heap(ref mut v) => v,
}
}
}
#[test]
fn test_encode() {
fn test(n: u64, base: u64) {
- assert_eq!(Ok(n), u64::from_str_radix(&encode(n, base)[..], base as u32));
+ assert_eq!(Ok(n), u64::from_str_radix(&encode(n, base), base as u32));
}
for base in 2..37 {
impl ::std::fmt::Debug for Blake2bCtx {
fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> {
try!(write!(fmt, "hash: "));
- for v in &self.h[..] {
+ for v in &self.h {
try!(write!(fmt, "{:x}", v));
}
Ok(())
impl<T: Idx> Deref for IdxSetBuf<T> {
type Target = IdxSet<T>;
fn deref(&self) -> &IdxSet<T> {
- unsafe { IdxSet::from_slice(&self.bits[..]) }
+ unsafe { IdxSet::from_slice(&self.bits) }
}
}
impl<T: Idx> DerefMut for IdxSetBuf<T> {
fn deref_mut(&mut self) -> &mut IdxSet<T> {
- unsafe { IdxSet::from_slice_mut(&mut self.bits[..]) }
+ unsafe { IdxSet::from_slice_mut(&mut self.bits) }
}
}
}
pub fn words(&self) -> &[Word] {
- &self.bits[..]
+ &self.bits
}
pub fn words_mut(&mut self) -> &mut [Word] {
- &mut self.bits[..]
+ &mut self.bits
}
pub fn clone_from(&mut self, other: &IdxSet<T>) {
[dependencies]
arena = { path = "../libarena" }
graphviz = { path = "../libgraphviz" }
-log = { path = "../liblog" }
+log = { version = "0.3", features = ["release_max_level_info"] }
+env_logger = { version = "0.4", default-features = false }
proc_macro_plugin = { path = "../libproc_macro_plugin" }
rustc = { path = "../librustc" }
rustc_back = { path = "../librustc_back" }
use std::io::{self, Write};
use std::iter;
use std::path::{Path, PathBuf};
+use std::rc::Rc;
use syntax::{ast, diagnostics, visit};
use syntax::attr;
use syntax::ext::base::ExtCtxt;
result?;
- if log_enabled!(::log::INFO) {
+ if log_enabled!(::log::LogLevel::Info) {
println!("Pre-trans");
tcx.print_debug_stats();
}
let trans = phase_4_translate_to_llvm(tcx, analysis, &incremental_hashes_map);
- if log_enabled!(::log::INFO) {
+ if log_enabled!(::log::LogLevel::Info) {
println!("Post-trans");
tcx.print_debug_stats();
}
}
fn keep_ast(sess: &Session) -> bool {
- sess.opts.debugging_opts.keep_ast ||
- sess.opts.debugging_opts.save_analysis ||
- sess.opts.debugging_opts.save_analysis_csv ||
- sess.opts.debugging_opts.save_analysis_api
+ sess.opts.debugging_opts.keep_ast || ::save_analysis(sess)
}
/// The name used for source code that doesn't originate in a file
expanded_crate: krate,
defs: resolver.definitions,
analysis: ty::CrateAnalysis {
- export_map: resolver.export_map,
- access_levels: AccessLevels::default(),
+ access_levels: Rc::new(AccessLevels::default()),
reachable: NodeSet(),
name: crate_name.to_string(),
glob_map: if resolver.make_glob_map { Some(resolver.glob_map) } else { None },
},
resolutions: Resolutions {
freevars: resolver.freevars,
+ export_map: resolver.export_map,
trait_map: resolver.trait_map,
maybe_unused_trait_imports: resolver.maybe_unused_trait_imports,
},
- hir_forest: hir_forest
+ hir_forest: hir_forest,
})
}
let mut local_providers = ty::maps::Providers::default();
mir::provide(&mut local_providers);
+ rustc_privacy::provide(&mut local_providers);
typeck::provide(&mut local_providers);
ty::provide(&mut local_providers);
|| consts::check_crate(tcx));
analysis.access_levels =
- time(time_passes, "privacy checking", || {
- rustc_privacy::check_crate(tcx, &analysis.export_map)
- });
+ time(time_passes, "privacy checking", || rustc_privacy::check_crate(tcx));
time(time_passes,
"intrinsic checking",
analysis.reachable =
time(time_passes,
"reachability checking",
- || reachable::find_reachable(tcx, &analysis.access_levels));
+ || reachable::find_reachable(tcx));
- time(time_passes, "death checking", || {
- middle::dead::check_crate(tcx, &analysis.access_levels);
- });
+ time(time_passes, "death checking", || middle::dead::check_crate(tcx));
time(time_passes, "unused lib feature checking", || {
- stability::check_unused_or_stable_features(tcx, &analysis.access_levels)
+ stability::check_unused_or_stable_features(tcx)
});
- time(time_passes,
- "lint checking",
- || lint::check_crate(tcx, &analysis.access_levels));
+ time(time_passes, "lint checking", || lint::check_crate(tcx));
// The above three passes generate errors w/o aborting
if sess.err_count() > 0 {
extern crate arena;
extern crate getopts;
extern crate graphviz;
+extern crate env_logger;
extern crate libc;
extern crate rustc;
extern crate rustc_back;
use rustc_resolve as resolve;
use rustc_save_analysis as save;
+use rustc_save_analysis::DumpHandler;
use rustc_trans::back::link;
use rustc_trans::back::write::{create_target_machine, RELOC_MODEL_ARGS, CODE_GEN_MODEL_ARGS};
use rustc::dep_graph::DepGraph;
// Extract input (string or file and optional path) from matches.
fn make_input(free_matches: &[String]) -> Option<(Input, Option<PathBuf>)> {
if free_matches.len() == 1 {
- let ifile = &free_matches[0][..];
+ let ifile = &free_matches[0];
if ifile == "-" {
let mut src = String::new();
io::stdin().read_to_string(&mut src).unwrap();
state.expanded_crate.unwrap(),
state.analysis.unwrap(),
state.crate_name.unwrap(),
- state.out_dir,
- save_analysis_format(state.session))
+ DumpHandler::new(save_analysis_format(state.session),
+ state.out_dir,
+ state.crate_name.unwrap()))
});
};
control.after_analysis.run_callback_on_error = true;
for lint in lints {
let name = lint.name_lower().replace("_", "-");
println!(" {} {:7.7} {}",
- padded(&name[..]),
+ padded(&name),
lint.default_level.as_str(),
lint.desc);
}
.map(|x| x.to_string().replace("_", "-"))
.collect::<Vec<String>>()
.join(", ");
- println!(" {} {}", padded(&name[..]), desc);
+ println!(" {} {}", padded(&name), desc);
}
println!("\n");
};
.into_iter()
.map(|x| x.opt_group)
.collect();
- let matches = match getopts::getopts(&args[..], &all_groups) {
+ let matches = match getopts::getopts(&args, &all_groups) {
Ok(m) => m,
Err(f) => early_error(ErrorOutputType::default(), &f.to_string()),
};
format!("we would appreciate a bug report: {}", BUG_REPORT_URL)];
for note in &xs {
handler.emit(&MultiSpan::new(),
- ¬e[..],
+ ¬e,
errors::Level::Note);
}
if match env::var_os("RUST_BACKTRACE") {
}
pub fn main() {
+ env_logger::init().unwrap();
let result = run(|| run_compiler(&env::args().collect::<Vec<_>>(),
&mut RustcDefaultCalls,
None,
-> NodesMatchingUII<'a, 'hir> {
match *self {
ItemViaNode(node_id) => NodesMatchingDirect(Some(node_id).into_iter()),
- ItemViaPath(ref parts) => NodesMatchingSuffix(map.nodes_matching_suffix(&parts[..])),
+ ItemViaPath(ref parts) => NodesMatchingSuffix(map.nodes_matching_suffix(&parts)),
}
}
user_option,
self.reconstructed_input(),
is_wrong_because);
- sess.fatal(&message[..])
+ sess.fatal(&message)
};
let mut saw_node = ast::DUMMY_NODE_ID;
fn expand_err_details(r: io::Result<()>) -> io::Result<()> {
r.map_err(|ioerr| {
io::Error::new(io::ErrorKind::Other,
- &format!("graphviz::render failed: {}", ioerr)[..])
+ format!("graphviz::render failed: {}", ioerr))
})
}
}
pub fn t_param(&self, index: u32) -> Ty<'tcx> {
let name = format!("T{}", index);
- self.infcx.tcx.mk_param(index, Symbol::intern(&name[..]))
+ self.infcx.tcx.mk_param(index, Symbol::intern(&name))
}
pub fn re_early_bound(&self, index: u32, name: &'static str) -> &'tcx ty::Region {
rustc = { path = "../librustc" }
rustc_data_structures = { path = "../librustc_data_structures" }
serialize = { path = "../libserialize" }
-log = { path = "../liblog" }
+log = "0.3"
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }
let rustc_version_str_len = rustc_version_str_len[0] as usize;
let mut buffer = Vec::with_capacity(rustc_version_str_len);
buffer.resize(rustc_version_str_len, 0);
- file.read_exact(&mut buffer[..])?;
+ file.read_exact(&mut buffer)?;
- if &buffer[..] != rustc_version().as_bytes() {
+ if buffer != rustc_version().as_bytes() {
report_format_mismatch(sess, path, "Different compiler version");
return Ok(None);
}
test = false
[dependencies]
-log = { path = "../liblog" }
+log = "0.3"
rustc = { path = "../librustc" }
rustc_back = { path = "../librustc_back" }
rustc_const_eval = { path = "../librustc_const_eval" }
} else {
format!("{} `{}` should have a camel case name such as `{}`", sort, name, c)
};
- cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[..]);
+ cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m);
}
}
}
attr.check_name("doc") &&
match attr.meta_item_list() {
None => false,
- Some(l) => attr::list_contains_name(&l[..], "hidden"),
+ Some(l) => attr::list_contains_name(&l, "hidden"),
}
});
self.doc_hidden_stack.push(doc_hidden);
ty::TyBool => return,
ty::TyAdt(def, _) => {
let attrs = cx.tcx.get_attrs(def.did);
- check_must_use(cx, &attrs[..], s.span)
+ check_must_use(cx, &attrs, s.span)
}
_ => false,
};
cfg.flag(flag);
}
- for component in &components[..] {
+ for component in &components {
let mut flag = String::from("-DLLVM_COMPONENT_");
flag.push_str(&component.to_uppercase());
cfg.flag(&flag);
if !is_crossed {
cmd.arg("--system-libs");
}
- cmd.args(&components[..]);
+ cmd.args(&components);
for lib in output(&mut cmd).split_whitespace() {
let name = if lib.starts_with("-l") {
[dependencies]
flate = { path = "../libflate" }
-log = { path = "../liblog" }
+log = "0.3"
proc_macro = { path = "../libproc_macro" }
rustc = { path = "../librustc" }
rustc_back = { path = "../librustc_back" }
name,
config::host_triple(),
self.sess.opts.target_triple);
- span_fatal!(self.sess, span, E0456, "{}", &message[..]);
+ span_fatal!(self.sess, span, E0456, "{}", &message);
}
let root = ekrate.metadata.get_root();
self.inject_allocator_crate();
self.inject_panic_runtime(krate);
- if log_enabled!(log::INFO) {
+ if log_enabled!(log::LogLevel::Info) {
dump_crates(&self.cstore);
}
}
associated_item => { cdata.get_associated_item(def_id.index) }
impl_trait_ref => { cdata.get_impl_trait(def_id.index, tcx) }
- custom_coerce_unsized_kind => {
- cdata.get_custom_coerce_unsized_kind(def_id.index).unwrap_or_else(|| {
- bug!("custom_coerce_unsized_kind: `{:?}` is missing its kind", def_id);
+ coerce_unsized_info => {
+ cdata.get_coerce_unsized_info(def_id.index).unwrap_or_else(|| {
+ bug!("coerce_unsized_info: `{:?}` is missing its info", def_id);
})
}
mir => {
typeck_tables => { cdata.item_body_tables(def_id.index, tcx) }
closure_kind => { cdata.closure_kind(def_id.index) }
closure_type => { cdata.closure_ty(def_id.index, tcx) }
+ inherent_impls => { Rc::new(cdata.get_inherent_implementations_for_type(def_id.index)) }
}
impl CrateStore for cstore::CStore {
self.get_crate_data(did.krate).get_fn_arg_names(did.index)
}
- fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec<DefId>
- {
- self.dep_graph.read(DepNode::MetaData(def_id));
- self.get_crate_data(def_id.krate).get_inherent_implementations_for_type(def_id.index)
- }
-
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId>
{
if let Some(def_id) = filter {
self.do_extern_mod_stmt_cnum(emod_id)
}
- fn encode_metadata<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
- reexports: &def::ExportMap,
+ fn encode_metadata<'a, 'tcx>(&self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
link_meta: &LinkMeta,
reachable: &NodeSet) -> Vec<u8>
{
- encoder::encode_metadata(tcx, self, reexports, link_meta, reachable)
+ encoder::encode_metadata(tcx, self, link_meta, reachable)
}
fn metadata_encoding_version(&self) -> &[u8]
self.get_impl_data(id).polarity
}
- pub fn get_custom_coerce_unsized_kind(&self,
- id: DefIndex)
- -> Option<ty::adjustment::CustomCoerceUnsized> {
- self.get_impl_data(id).coerce_unsized_kind
+ pub fn get_coerce_unsized_info(&self,
+ id: DefIndex)
+ -> Option<ty::adjustment::CoerceUnsizedInfo> {
+ self.get_impl_data(id).coerce_unsized_info
}
pub fn get_impl_trait(&self,
use schema::*;
use rustc::middle::cstore::{LinkMeta, LinkagePreference, NativeLibrary};
-use rustc::hir::def;
use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefIndex, DefId};
use rustc::hir::map::definitions::DefPathTable;
use rustc::middle::dependency_format::Linkage;
pub struct EncodeContext<'a, 'tcx: 'a> {
opaque: opaque::Encoder<'a>,
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
- reexports: &'a def::ExportMap,
link_meta: &'a LinkMeta,
cstore: &'a cstore::CStore,
exported_symbols: &'a NodeSet,
let def_id = tcx.hir.local_def_id(id);
let data = ModData {
- reexports: match self.reexports.get(&id) {
+ reexports: match tcx.export_map.get(&id) {
Some(exports) if *vis == hir::Public => self.lazy_seq_ref(exports),
_ => LazySeq::empty(),
},
let data = ImplData {
polarity: hir::ImplPolarity::Positive,
parent_impl: None,
- coerce_unsized_kind: None,
+ coerce_unsized_info: None,
trait_ref: tcx.impl_trait_ref(def_id).map(|trait_ref| self.lazy(&trait_ref)),
};
None
};
+ // if this is an impl of `CoerceUnsized`, create its
+ // "unsized info", else just store None
+ let coerce_unsized_info =
+ trait_ref.and_then(|t| {
+ if Some(t.def_id) == tcx.lang_items.coerce_unsized_trait() {
+ Some(ty::queries::coerce_unsized_info::get(tcx, item.span, def_id))
+ } else {
+ None
+ }
+ });
+
let data = ImplData {
polarity: polarity,
parent_impl: parent,
- coerce_unsized_kind: tcx.maps.custom_coerce_unsized_kind
- .borrow()
- .get(&def_id)
- .cloned(),
+ coerce_unsized_info: coerce_unsized_info,
trait_ref: trait_ref.map(|trait_ref| self.lazy(&trait_ref)),
};
self.encode_fields(def_id);
}
hir::ItemImpl(..) => {
- for &trait_item_def_id in &self.tcx.associated_item_def_ids(def_id)[..] {
+ for &trait_item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
self.record(trait_item_def_id,
EncodeContext::encode_info_for_impl_item,
trait_item_def_id);
}
}
hir::ItemTrait(..) => {
- for &item_def_id in &self.tcx.associated_item_def_ids(def_id)[..] {
+ for &item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
self.record(item_def_id,
EncodeContext::encode_info_for_trait_item,
item_def_id);
pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
cstore: &cstore::CStore,
- reexports: &def::ExportMap,
link_meta: &LinkMeta,
exported_symbols: &NodeSet)
-> Vec<u8> {
let mut ecx = EncodeContext {
opaque: opaque::Encoder::new(&mut cursor),
tcx: tcx,
- reexports: reexports,
link_meta: link_meta,
cstore: cstore,
exported_symbols: exported_symbols,
Some(file) => file,
};
let (hash, found_kind) =
- if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rlib") {
+ if file.starts_with(&rlib_prefix) && file.ends_with(".rlib") {
(&file[(rlib_prefix.len())..(file.len() - ".rlib".len())], CrateFlavor::Rlib)
- } else if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rmeta") {
+ } else if file.starts_with(&rlib_prefix) && file.ends_with(".rmeta") {
(&file[(rlib_prefix.len())..(file.len() - ".rmeta".len())], CrateFlavor::Rmeta)
} else if file.starts_with(&dylib_prefix) &&
file.ends_with(&dypair.1) {
(&file[(dylib_prefix.len())..(file.len() - dypair.1.len())], CrateFlavor::Dylib)
} else {
- if file.starts_with(&staticlib_prefix[..]) && file.ends_with(&staticpair.1) {
+ if file.starts_with(&staticlib_prefix) && file.ends_with(&staticpair.1) {
staticlibs.push(CrateMismatch {
path: path.to_path_buf(),
got: "static".to_string(),
pub struct ImplData<'tcx> {
pub polarity: hir::ImplPolarity,
pub parent_impl: Option<DefId>,
- pub coerce_unsized_kind: Option<ty::adjustment::CustomCoerceUnsized>,
+
+ /// This is `Some` only for impls of `CoerceUnsized`.
+ pub coerce_unsized_info: Option<ty::adjustment::CoerceUnsizedInfo>,
pub trait_ref: Option<Lazy<ty::TraitRef<'tcx>>>,
}
[dependencies]
graphviz = { path = "../libgraphviz" }
-log = { path = "../liblog" }
+log = "0.3"
rustc = { path = "../librustc" }
rustc_const_eval = { path = "../librustc_const_eval" }
rustc_const_math = { path = "../librustc_const_math" }
crate-type = ["dylib"]
[dependencies]
-log = { path = "../liblog" }
+log = "0.3"
rustc = { path = "../librustc" }
rustc_const_eval = { path = "../librustc_const_eval" }
rustc_const_math = { path = "../librustc_const_math" }
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }
-rustc_errors = { path = "../librustc_errors" }
\ No newline at end of file
+rustc_errors = { path = "../librustc_errors" }
// inside this crate, so continue would spew "macro undefined"
// errors
Err(err) => {
- self.sess.span_fatal(span, &err[..])
+ self.sess.span_fatal(span, &err)
}
};
unsafe {
let registrar =
- match lib.symbol(&symbol[..]) {
+ match lib.symbol(&symbol) {
Ok(registrar) => {
mem::transmute::<*mut u8,PluginRegistrarFun>(registrar)
}
// again fatal if we can't register macros
Err(err) => {
- self.sess.span_fatal(span, &err[..])
+ self.sess.span_fatal(span, &err)
}
};
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
+#![cfg_attr(stage0, feature(field_init_shorthand))]
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_private)]
#![feature(staged_api)]
#[macro_use] extern crate syntax;
extern crate syntax_pos;
-use rustc::dep_graph::DepNode;
use rustc::hir::{self, PatKind};
-use rustc::hir::def::{self, Def};
-use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
+use rustc::hir::def::Def;
+use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, CrateNum, DefId};
use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
use rustc::hir::itemlikevisit::DeepVisitor;
use rustc::hir::pat_util::EnumerateAndAdjustIterator;
use rustc::middle::privacy::{AccessLevel, AccessLevels};
use rustc::ty::{self, TyCtxt, Ty, TypeFoldable};
use rustc::ty::fold::TypeVisitor;
+use rustc::ty::maps::Providers;
use rustc::util::nodemap::NodeSet;
use syntax::ast;
-use syntax_pos::Span;
+use syntax_pos::{DUMMY_SP, Span};
use std::cmp;
use std::mem::replace;
+use std::rc::Rc;
pub mod diagnostics;
struct EmbargoVisitor<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- export_map: &'a def::ExportMap,
// Accessibility levels for reachable nodes
access_levels: AccessLevels,
// This code is here instead of in visit_item so that the
// crate module gets processed as well.
if self.prev_level.is_some() {
- if let Some(exports) = self.export_map.get(&id) {
+ if let Some(exports) = self.tcx.export_map.get(&id) {
for export in exports {
if let Some(node_id) = self.tcx.hir.as_local_node_id(export.def.def_id()) {
self.update(node_id, Some(AccessLevel::Exported));
fn visit_pat(&mut self, _: &'tcx hir::Pat) {}
}
-pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- export_map: &def::ExportMap)
- -> AccessLevels {
- let _task = tcx.dep_graph.in_task(DepNode::Privacy);
+pub fn provide(providers: &mut Providers) {
+ *providers = Providers {
+ privacy_access_levels,
+ ..*providers
+ };
+}
+
+pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Rc<AccessLevels> {
+ tcx.dep_graph.with_ignore(|| { // FIXME
+ ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE)
+ })
+}
+
+fn privacy_access_levels<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ krate: CrateNum)
+ -> Rc<AccessLevels> {
+ assert_eq!(krate, LOCAL_CRATE);
let krate = tcx.hir.krate();
// items which are reachable from external crates based on visibility.
let mut visitor = EmbargoVisitor {
tcx: tcx,
- export_map: export_map,
access_levels: Default::default(),
prev_level: Some(AccessLevel::Public),
changed: false,
krate.visit_all_item_likes(&mut DeepVisitor::new(&mut visitor));
}
- visitor.access_levels
+ Rc::new(visitor.access_levels)
}
__build_diagnostic_array! { librustc_privacy, DIAGNOSTICS }
test = false
[dependencies]
-log = { path = "../liblog" }
+log = "0.3"
syntax = { path = "../libsyntax" }
rustc = { path = "../librustc" }
arena = { path = "../libarena" }
binding: &'a NameBinding<'a>,
span: Span,
allow_shadowing: bool) {
- if self.builtin_macros.insert(name, binding).is_some() && !allow_shadowing {
+ if self.global_macros.insert(name, binding).is_some() && !allow_shadowing {
let msg = format!("`{}` is already in scope", name);
let note =
"macro-expanded `#[macro_use]`s may not shadow existing macros (see RFC 1560)";
use std::rc::Rc;
use resolve_imports::{ImportDirective, ImportDirectiveSubclass, NameResolution, ImportResolver};
-use macros::{InvocationData, LegacyBinding, LegacyScope};
+use macros::{InvocationData, LegacyBinding, LegacyScope, MacroBinding};
// NB: This module needs to be declared first so diagnostics are
// registered before they are used.
crate_loader: &'a mut CrateLoader,
macro_names: FxHashSet<Name>,
- builtin_macros: FxHashMap<Name, &'a NameBinding<'a>>,
+ global_macros: FxHashMap<Name, &'a NameBinding<'a>>,
lexical_macro_resolutions: Vec<(Name, &'a Cell<LegacyScope<'a>>)>,
macro_map: FxHashMap<DefId, Rc<SyntaxExtension>>,
macro_defs: FxHashMap<Mark, DefId>,
crate_loader: crate_loader,
macro_names: FxHashSet(),
- builtin_macros: FxHashMap(),
+ global_macros: FxHashMap(),
lexical_macro_resolutions: Vec::new(),
macro_map: FxHashMap(),
macro_exports: Vec::new(),
};
}
}
- let is_builtin = self.builtin_macros.get(&path[0].name).cloned()
+ let is_global = self.global_macros.get(&path[0].name).cloned()
.map(|binding| binding.get_macro(self).kind() == MacroKind::Bang).unwrap_or(false);
- if primary_ns != MacroNS && (is_builtin || self.macro_names.contains(&path[0].name)) {
+ if primary_ns != MacroNS && (is_global || self.macro_names.contains(&path[0].name)) {
// Return some dummy definition, it's enough for error reporting.
return Some(
PathResolution::new(Def::Macro(DefId::local(CRATE_DEF_INDEX), MacroKind::Bang))
self.resolve_ident_in_module(module, ident, ns, false, record_used)
} else if opt_ns == Some(MacroNS) {
self.resolve_lexical_macro_path_segment(ident, ns, record_used)
+ .map(MacroBinding::binding)
} else {
match self.resolve_ident_in_lexical_scope(ident, ns, record_used) {
Some(LexicalScopeBinding::Item(binding)) => Ok(binding),
};
let msg1 = format!("`{}` could refer to the name {} here", name, participle(b1));
let msg2 = format!("`{}` could also refer to the name {} here", name, participle(b2));
- let note = if !lexical && b1.is_glob_import() {
+ let note = if b1.expansion == Mark::root() || !lexical && b1.is_glob_import() {
format!("consider adding an explicit import of `{}` to disambiguate", name)
} else if let Def::Macro(..) = b1.def() {
format!("macro-expanded {} do not shadow",
let msg = format!("`{}` is ambiguous", name);
self.session.add_lint(lint::builtin::LEGACY_IMPORTS, id, span, msg);
} else {
- self.session.struct_span_err(span, &format!("`{}` is ambiguous", name))
- .span_note(b1.span, &msg1)
- .span_note(b2.span, &msg2)
- .note(¬e)
- .emit();
+ let mut err =
+ self.session.struct_span_err(span, &format!("`{}` is ambiguous", name));
+ err.span_note(b1.span, &msg1);
+ match b2.def() {
+ Def::Macro(..) if b2.span == DUMMY_SP =>
+ err.note(&format!("`{}` is also a builtin macro", name)),
+ _ => err.span_note(b2.span, &msg2),
+ };
+ err.note(¬e).emit();
}
}
if self.proc_macro_enabled { return; }
for attr in attrs {
- let name = unwrap_or!(attr.name(), continue);
- let maybe_binding = self.builtin_macros.get(&name).cloned().or_else(|| {
- let ident = Ident::with_empty_ctxt(name);
- self.resolve_lexical_macro_path_segment(ident, MacroNS, None).ok()
- });
-
- if let Some(binding) = maybe_binding {
- if let SyntaxExtension::AttrProcMacro(..) = *binding.get_macro(self) {
+ if attr.path.segments.len() > 1 {
+ continue
+ }
+ let ident = attr.path.segments[0].identifier;
+ let result = self.resolve_lexical_macro_path_segment(ident, MacroNS, None);
+ if let Ok(binding) = result {
+ if let SyntaxExtension::AttrProcMacro(..) = *binding.binding().get_macro(self) {
attr::mark_known(attr);
let msg = "attribute procedural macros are experimental";
feature_err(&self.session.parse_sess, feature,
attr.span, GateIssue::Language, msg)
- .span_note(binding.span, "procedural macro imported here")
+ .span_note(binding.span(), "procedural macro imported here")
.emit();
}
}
pub span: Span,
}
+#[derive(Copy, Clone)]
pub enum MacroBinding<'a> {
Legacy(&'a LegacyBinding<'a>),
+ Global(&'a NameBinding<'a>),
Modern(&'a NameBinding<'a>),
}
+impl<'a> MacroBinding<'a> {
+ pub fn span(self) -> Span {
+ match self {
+ MacroBinding::Legacy(binding) => binding.span,
+ MacroBinding::Global(binding) | MacroBinding::Modern(binding) => binding.span,
+ }
+ }
+
+ pub fn binding(self) -> &'a NameBinding<'a> {
+ match self {
+ MacroBinding::Global(binding) | MacroBinding::Modern(binding) => binding,
+ MacroBinding::Legacy(_) => panic!("unexpected MacroBinding::Legacy"),
+ }
+ }
+}
+
impl<'a> base::Resolver for Resolver<'a> {
fn next_node_id(&mut self) -> ast::NodeId {
self.session.next_node_id()
vis: ty::Visibility::Invisible,
expansion: Mark::root(),
});
- self.builtin_macros.insert(ident.name, binding);
+ self.global_macros.insert(ident.name, binding);
}
fn resolve_imports(&mut self) {
attr::mark_known(&attrs[i]);
}
- match self.builtin_macros.get(&name).cloned() {
+ match self.global_macros.get(&name).cloned() {
Some(binding) => match *binding.get_macro(self) {
MultiModifier(..) | MultiDecorator(..) | SyntaxExtension::AttrProcMacro(..) => {
return Some(attrs.remove(i))
}
let trait_name = traits[j].segments[0].identifier.name;
let legacy_name = Symbol::intern(&format!("derive_{}", trait_name));
- if !self.builtin_macros.contains_key(&legacy_name) {
+ if !self.global_macros.contains_key(&legacy_name) {
continue
}
let span = traits.remove(j).span;
}
let name = path[0].name;
- let result = match self.resolve_legacy_scope(&invocation.legacy_scope, name, false) {
- Some(MacroBinding::Legacy(binding)) => Ok(Def::Macro(binding.def_id, MacroKind::Bang)),
- Some(MacroBinding::Modern(binding)) => Ok(binding.def_ignoring_ambiguity()),
- None => match self.resolve_lexical_macro_path_segment(path[0], MacroNS, None) {
- Ok(binding) => Ok(binding.def_ignoring_ambiguity()),
- Err(Determinacy::Undetermined) if !force =>
- return Err(Determinacy::Undetermined),
+ let legacy_resolution = self.resolve_legacy_scope(&invocation.legacy_scope, name, false);
+ let result = if let Some(MacroBinding::Legacy(binding)) = legacy_resolution {
+ Ok(Def::Macro(binding.def_id, MacroKind::Bang))
+ } else {
+ match self.resolve_lexical_macro_path_segment(path[0], MacroNS, None) {
+ Ok(binding) => Ok(binding.binding().def_ignoring_ambiguity()),
+ Err(Determinacy::Undetermined) if !force => return Err(Determinacy::Undetermined),
Err(_) => {
self.found_unresolved_macro = true;
Err(Determinacy::Determined)
}
- },
+ }
};
self.current_module.legacy_macro_resolutions.borrow_mut()
ident: Ident,
ns: Namespace,
record_used: Option<Span>)
- -> Result<&'a NameBinding<'a>, Determinacy> {
- let mut module = self.current_module;
- let mut potential_expanded_shadower: Option<&NameBinding> = None;
+ -> Result<MacroBinding<'a>, Determinacy> {
+ let mut module = Some(self.current_module);
+ let mut potential_illegal_shadower = Err(Determinacy::Determined);
+ let determinacy =
+ if record_used.is_some() { Determinacy::Determined } else { Determinacy::Undetermined };
loop {
- // Since expanded macros may not shadow the lexical scope (enforced below),
- // we can ignore unresolved invocations (indicated by the penultimate argument).
- match self.resolve_ident_in_module(module, ident, ns, true, record_used) {
+ let result = if let Some(module) = module {
+ // Since expanded macros may not shadow the lexical scope and
+ // globs may not shadow global macros (both enforced below),
+ // we resolve with restricted shadowing (indicated by the penultimate argument).
+ self.resolve_ident_in_module(module, ident, ns, true, record_used)
+ .map(MacroBinding::Modern)
+ } else {
+ self.global_macros.get(&ident.name).cloned().ok_or(determinacy)
+ .map(MacroBinding::Global)
+ };
+
+ match result.map(MacroBinding::binding) {
Ok(binding) => {
let span = match record_used {
Some(span) => span,
- None => return Ok(binding),
+ None => return result,
};
- match potential_expanded_shadower {
- Some(shadower) if shadower.def() != binding.def() => {
+ if let Ok(MacroBinding::Modern(shadower)) = potential_illegal_shadower {
+ if shadower.def() != binding.def() {
let name = ident.name;
self.ambiguity_errors.push(AmbiguityError {
span: span, name: name, b1: shadower, b2: binding, lexical: true,
legacy: false,
});
- return Ok(shadower);
+ return potential_illegal_shadower;
}
- _ if binding.expansion == Mark::root() => return Ok(binding),
- _ => potential_expanded_shadower = Some(binding),
+ }
+ if binding.expansion != Mark::root() ||
+ (binding.is_glob_import() && module.unwrap().def().is_some()) {
+ potential_illegal_shadower = result;
+ } else {
+ return result;
}
},
Err(Determinacy::Undetermined) => return Err(Determinacy::Undetermined),
Err(Determinacy::Determined) => {}
}
- match module.kind {
- ModuleKind::Block(..) => module = module.parent.unwrap(),
- ModuleKind::Def(..) => return match potential_expanded_shadower {
- Some(binding) => Ok(binding),
- None if record_used.is_some() => Err(Determinacy::Determined),
- None => Err(Determinacy::Undetermined),
+ module = match module {
+ Some(module) => match module.kind {
+ ModuleKind::Block(..) => module.parent,
+ ModuleKind::Def(..) => None,
},
+ None => return potential_illegal_shadower,
}
}
}
let binding = if let Some(binding) = binding {
MacroBinding::Legacy(binding)
- } else if let Some(binding) = self.builtin_macros.get(&name).cloned() {
+ } else if let Some(binding) = self.global_macros.get(&name).cloned() {
if !self.use_extern_macros {
self.record_use(Ident::with_empty_ctxt(name), MacroNS, binding, DUMMY_SP);
}
- MacroBinding::Modern(binding)
+ MacroBinding::Global(binding)
} else {
return None;
};
let legacy_resolution = self.resolve_legacy_scope(legacy_scope, ident.name, true);
let resolution = self.resolve_lexical_macro_path_segment(ident, MacroNS, Some(span));
match (legacy_resolution, resolution) {
- (Some(legacy_resolution), Ok(resolution)) => {
- let (legacy_span, participle) = match legacy_resolution {
- MacroBinding::Modern(binding)
- if binding.def() == resolution.def() => continue,
- MacroBinding::Modern(binding) => (binding.span, "imported"),
- MacroBinding::Legacy(binding) => (binding.span, "defined"),
- };
- let msg1 = format!("`{}` could refer to the macro {} here", ident, participle);
+ (Some(MacroBinding::Legacy(legacy_binding)), Ok(MacroBinding::Modern(binding))) => {
+ let msg1 = format!("`{}` could refer to the macro defined here", ident);
let msg2 = format!("`{}` could also refer to the macro imported here", ident);
self.session.struct_span_err(span, &format!("`{}` is ambiguous", ident))
- .span_note(legacy_span, &msg1)
- .span_note(resolution.span, &msg2)
+ .span_note(legacy_binding.span, &msg1)
+ .span_note(binding.span, &msg2)
.emit();
},
- (Some(MacroBinding::Modern(binding)), Err(_)) => {
+ (Some(MacroBinding::Global(binding)), Ok(MacroBinding::Global(_))) => {
self.record_use(ident, MacroNS, binding, span);
self.err_if_macro_use_proc_macro(ident.name, span, binding);
},
find_best_match_for_name(self.macro_names.iter(), name, None)
} else {
None
- // Then check builtin macros.
+ // Then check global macros.
}.or_else(|| {
// FIXME: get_macro needs an &mut Resolver, can we do it without cloning?
- let builtin_macros = self.builtin_macros.clone();
- let names = builtin_macros.iter().filter_map(|(name, binding)| {
+ let global_macros = self.global_macros.clone();
+ let names = global_macros.iter().filter_map(|(name, binding)| {
if binding.get_macro(self).kind() == kind {
Some(name)
} else {
module: Module<'a>,
ident: Ident,
ns: Namespace,
- ignore_unresolved_invocations: bool,
+ restricted_shadowing: bool,
record_used: Option<Span>)
-> Result<&'a NameBinding<'a>, Determinacy> {
self.populate_module_if_necessary(module);
if let Some(binding) = resolution.binding {
if let Some(shadowed_glob) = resolution.shadows_glob {
let name = ident.name;
- // If we ignore unresolved invocations, we must forbid
- // expanded shadowing to avoid time travel.
- if ignore_unresolved_invocations &&
+ // Forbid expanded shadowing to avoid time travel.
+ if restricted_shadowing &&
binding.expansion != Mark::root() &&
ns != MacroNS && // In MacroNS, `try_define` always forbids this shadowing
binding.def() != shadowed_glob.def() {
}
let no_unresolved_invocations =
- ignore_unresolved_invocations || module.unresolved_invocations.borrow().is_empty();
+ restricted_shadowing || module.unresolved_invocations.borrow().is_empty();
match resolution.binding {
// In `MacroNS`, expanded bindings do not shadow (enforced in `try_define`).
Some(binding) if no_unresolved_invocations || ns == MacroNS =>
}
// Check if the globs are determined
+ if restricted_shadowing && module.def().is_some() {
+ return Err(Determined);
+ }
for directive in module.globs.borrow().iter() {
if self.is_accessible(directive.vis.get()) {
if let Some(module) = directive.imported_module.get() {
crate-type = ["dylib"]
[dependencies]
-log = { path = "../liblog" }
+log = "0.3"
rustc = { path = "../librustc" }
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }
let strs = pairs.map(|(f, v)| format!(",{},\"{}\"", f, escape(String::from(v))));
strs.fold(String::new(), |mut s, ss| {
- s.push_str(&ss[..]);
+ s.push_str(&ss);
s
})
}
use data::{self, VariableKind};
use dump::Dump;
-pub struct JsonDumper<'b, W: Write + 'b> {
- output: &'b mut W,
+pub struct JsonDumper<O: DumpOutput> {
result: Analysis,
+ output: O,
}
-impl<'b, W: Write> JsonDumper<'b, W> {
- pub fn new(writer: &'b mut W) -> JsonDumper<'b, W> {
- JsonDumper { output: writer, result: Analysis::new() }
- }
+pub trait DumpOutput {
+ fn dump(&mut self, result: &Analysis);
}
-impl<'b, W: Write> Drop for JsonDumper<'b, W> {
- fn drop(&mut self) {
- if let Err(_) = write!(self.output, "{}", as_json(&self.result)) {
+pub struct WriteOutput<'b, W: Write + 'b> {
+ output: &'b mut W,
+}
+
+impl<'b, W: Write> DumpOutput for WriteOutput<'b, W> {
+ fn dump(&mut self, result: &Analysis) {
+ if let Err(_) = write!(self.output, "{}", as_json(&result)) {
error!("Error writing output");
}
}
}
+pub struct CallbackOutput<'b> {
+ callback: &'b mut FnMut(&Analysis),
+}
+
+impl<'b> DumpOutput for CallbackOutput<'b> {
+ fn dump(&mut self, result: &Analysis) {
+ (self.callback)(result)
+ }
+}
+
+impl<'b, W: Write> JsonDumper<WriteOutput<'b, W>> {
+ pub fn new(writer: &'b mut W) -> JsonDumper<WriteOutput<'b, W>> {
+ JsonDumper { output: WriteOutput { output: writer }, result: Analysis::new() }
+ }
+}
+
+impl<'b> JsonDumper<CallbackOutput<'b>> {
+ pub fn with_callback(callback: &'b mut FnMut(&Analysis)) -> JsonDumper<CallbackOutput<'b>> {
+ JsonDumper { output: CallbackOutput { callback: callback }, result: Analysis::new() }
+ }
+}
+
+impl<O: DumpOutput> Drop for JsonDumper<O> {
+ fn drop(&mut self) {
+ self.output.dump(&self.result);
+ }
+}
+
macro_rules! impl_fn {
($fn_name: ident, $data_type: ident, $bucket: ident) => {
fn $fn_name(&mut self, data: $data_type) {
}
}
-impl<'b, W: Write + 'b> Dump for JsonDumper<'b, W> {
+impl<'b, O: DumpOutput + 'b> Dump for JsonDumper<O> {
fn crate_prelude(&mut self, data: CratePreludeData) {
self.result.prelude = Some(data)
}
use rustc::hir::map::Node;
use rustc::hir::def_id::DefId;
use rustc::session::config::CrateType::CrateTypeExecutable;
+use rustc::session::Session;
use rustc::ty::{self, TyCtxt};
use std::env;
}
}
-pub fn process_crate<'l, 'tcx>(tcx: TyCtxt<'l, 'tcx, 'tcx>,
- krate: &ast::Crate,
- analysis: &'l ty::CrateAnalysis,
- cratename: &str,
- odir: Option<&Path>,
- format: Format) {
- let _ignore = tcx.dep_graph.in_ignore();
+/// Defines what to do with the results of saving the analysis.
+pub trait SaveHandler {
+ fn save<'l, 'tcx>(&mut self,
+ save_ctxt: SaveContext<'l, 'tcx>,
+ krate: &ast::Crate,
+ cratename: &str);
+}
- assert!(analysis.glob_map.is_some());
+/// Dump the save-analysis results to a file.
+pub struct DumpHandler<'a> {
+ format: Format,
+ odir: Option<&'a Path>,
+ cratename: String
+}
- info!("Dumping crate {}", cratename);
+impl<'a> DumpHandler<'a> {
+ pub fn new(format: Format, odir: Option<&'a Path>, cratename: &str) -> DumpHandler<'a> {
+ DumpHandler {
+ format: format,
+ odir: odir,
+ cratename: cratename.to_owned()
+ }
+ }
- // find a path to dump our data to
- let mut root_path = match env::var_os("RUST_SAVE_ANALYSIS_FOLDER") {
- Some(val) => PathBuf::from(val),
- None => match odir {
- Some(val) => val.join("save-analysis"),
- None => PathBuf::from("save-analysis-temp"),
- },
- };
+ fn output_file(&self, sess: &Session) -> File {
+ let mut root_path = match env::var_os("RUST_SAVE_ANALYSIS_FOLDER") {
+ Some(val) => PathBuf::from(val),
+ None => match self.odir {
+ Some(val) => val.join("save-analysis"),
+ None => PathBuf::from("save-analysis-temp"),
+ },
+ };
- if let Err(e) = std::fs::create_dir_all(&root_path) {
- tcx.sess.err(&format!("Could not create directory {}: {}",
- root_path.display(),
- e));
+ if let Err(e) = std::fs::create_dir_all(&root_path) {
+ error!("Could not create directory {}: {}", root_path.display(), e);
+ }
+
+ {
+ let disp = root_path.display();
+ info!("Writing output to {}", disp);
+ }
+
+ let executable = sess.crate_types.borrow().iter().any(|ct| *ct == CrateTypeExecutable);
+ let mut out_name = if executable {
+ "".to_owned()
+ } else {
+ "lib".to_owned()
+ };
+ out_name.push_str(&self.cratename);
+ out_name.push_str(&sess.opts.cg.extra_filename);
+ out_name.push_str(self.format.extension());
+ root_path.push(&out_name);
+ let output_file = File::create(&root_path).unwrap_or_else(|e| {
+ let disp = root_path.display();
+ sess.fatal(&format!("Could not open {}: {}", disp, e));
+ });
+ root_path.pop();
+ output_file
}
+}
+
+impl<'a> SaveHandler for DumpHandler<'a> {
+ fn save<'l, 'tcx>(&mut self,
+ save_ctxt: SaveContext<'l, 'tcx>,
+ krate: &ast::Crate,
+ cratename: &str) {
+ macro_rules! dump {
+ ($new_dumper: expr) => {{
+ let mut dumper = $new_dumper;
+ let mut visitor = DumpVisitor::new(save_ctxt, &mut dumper);
+
+ visitor.dump_crate_info(cratename, krate);
+ visit::walk_crate(&mut visitor, krate);
+ }}
+ }
+
+ let output = &mut self.output_file(&save_ctxt.tcx.sess);
- {
- let disp = root_path.display();
- info!("Writing output to {}", disp);
+ match self.format {
+ Format::Csv => dump!(CsvDumper::new(output)),
+ Format::Json => dump!(JsonDumper::new(output)),
+ Format::JsonApi => dump!(JsonApiDumper::new(output)),
+ }
}
+}
- // Create output file.
- let executable = tcx.sess.crate_types.borrow().iter().any(|ct| *ct == CrateTypeExecutable);
- let mut out_name = if executable {
- "".to_owned()
- } else {
- "lib".to_owned()
- };
- out_name.push_str(&cratename);
- out_name.push_str(&tcx.sess.opts.cg.extra_filename);
- out_name.push_str(format.extension());
- root_path.push(&out_name);
- let mut output_file = File::create(&root_path).unwrap_or_else(|e| {
- let disp = root_path.display();
- tcx.sess.fatal(&format!("Could not open {}: {}", disp, e));
- });
- root_path.pop();
- let output = &mut output_file;
+/// Call a callback with the results of save-analysis.
+pub struct CallbackHandler<'b> {
+ pub callback: &'b mut FnMut(&rls_data::Analysis),
+}
+
+impl<'b> SaveHandler for CallbackHandler<'b> {
+ fn save<'l, 'tcx>(&mut self,
+ save_ctxt: SaveContext<'l, 'tcx>,
+ krate: &ast::Crate,
+ cratename: &str) {
+ macro_rules! dump {
+ ($new_dumper: expr) => {{
+ let mut dumper = $new_dumper;
+ let mut visitor = DumpVisitor::new(save_ctxt, &mut dumper);
+
+ visitor.dump_crate_info(cratename, krate);
+ visit::walk_crate(&mut visitor, krate);
+ }}
+ }
+
+ // We're using the JsonDumper here because it has the format of the
+ // save-analysis results that we will pass to the callback. IOW, we are
+ // using the JsonDumper to collect the save-analysis results, but not
+ // actually to dump them to a file. This is all a bit convoluted and
+ // there is certainly a simpler design here trying to get out (FIXME).
+ dump!(JsonDumper::with_callback(self.callback))
+ }
+}
+
+pub fn process_crate<'l, 'tcx, H: SaveHandler>(tcx: TyCtxt<'l, 'tcx, 'tcx>,
+ krate: &ast::Crate,
+ analysis: &'l ty::CrateAnalysis,
+ cratename: &str,
+ mut handler: H) {
+ let _ignore = tcx.dep_graph.in_ignore();
+
+ assert!(analysis.glob_map.is_some());
+
+ info!("Dumping crate {}", cratename);
let save_ctxt = SaveContext {
tcx: tcx,
span_utils: SpanUtils::new(&tcx.sess),
};
- macro_rules! dump {
- ($new_dumper: expr) => {{
- let mut dumper = $new_dumper;
- let mut visitor = DumpVisitor::new(save_ctxt, &mut dumper);
-
- visitor.dump_crate_info(cratename, krate);
- visit::walk_crate(&mut visitor, krate);
- }}
- }
-
- match format {
- Format::Csv => dump!(CsvDumper::new(output)),
- Format::Json => dump!(JsonDumper::new(output)),
- Format::JsonApi => dump!(JsonApiDumper::new(output)),
- }
+ handler.save(save_ctxt, krate, cratename)
}
// Utility functions for the module.
[dependencies]
flate = { path = "../libflate" }
-log = { path = "../liblog" }
+log = "0.3"
rustc = { path = "../librustc" }
rustc_back = { path = "../librustc_back" }
rustc_bitflags = { path = "../librustc_bitflags" }
match sig.inputs().last().unwrap().sty {
ty::TyTuple(ref tupled_arguments, _) => {
inputs = &sig.inputs()[0..sig.inputs().len() - 1];
- &tupled_arguments[..]
+ &tupled_arguments
}
_ => {
bug!("argument to function with \"rust-call\" ABI \
variant_fill].iter().cloned().collect();
match name {
None => {
- Type::struct_(cx, &fields[..], false)
+ Type::struct_(cx, &fields, false)
}
Some(name) => {
let mut llty = Type::named_struct(cx, name);
- llty.set_struct_body(&fields[..], false);
+ llty.set_struct_body(&fields, false);
llty
}
}
alignment: Alignment,
) -> ValueRef {
let llptrptr = bcx.gepi(scrutinee,
- &discrfield.iter().map(|f| *f as usize).collect::<Vec<_>>()[..]);
+ &discrfield.iter().map(|f| *f as usize).collect::<Vec<_>>());
let llptr = bcx.load(llptrptr, alignment.to_align());
let cmp = if nndiscr == 0 { IntEQ } else { IntNE };
bcx.icmp(cmp, llptr, C_null(val_ty(llptr)))
base::call_memset(bcx, llptr, fill_byte, size, align, false);
} else {
let path = discrfield.iter().map(|&i| i as usize).collect::<Vec<_>>();
- let llptrptr = bcx.gepi(val, &path[..]);
+ let llptrptr = bcx.gepi(val, &path);
let llptrty = val_ty(llptrptr).element_type();
bcx.store(C_null(llptrty), llptrptr, None);
}
.chain(arch_clobbers.iter().map(|s| s.to_string()))
.collect::<Vec<String>>().join(",");
- debug!("Asm Constraints: {}", &all_constraints[..]);
+ debug!("Asm Constraints: {}", &all_constraints);
// Depending on how many outputs we have, the return type is different
let num_outputs = output_types.len();
let output_type = match num_outputs {
0 => Type::void(bcx.ccx),
1 => output_types[0],
- _ => Type::struct_(bcx.ccx, &output_types[..], false)
+ _ => Type::struct_(bcx.ccx, &output_types, false)
};
let dialect = match ia.dialect {
for path in search_paths {
debug!("looking for {} inside {:?}", name, path);
- let test = path.join(&oslibname[..]);
+ let test = path.join(&oslibname);
if test.exists() { return test }
if oslibname != unixlibname {
- let test = path.join(&unixlibname[..]);
+ let test = path.join(&unixlibname);
if test.exists() { return test }
}
}
attrs: &[ast::Attribute],
input: &Input) -> String {
let validate = |s: String, span: Option<Span>| {
- cstore::validate_crate_name(sess, &s[..], span);
+ cstore::validate_crate_name(sess, &s, span);
s
};
let msg = format!("--crate-name and #[crate_name] are \
required to match, but `{}` != `{}`",
s, name);
- sess.span_err(attr.span, &msg[..]);
+ sess.span_err(attr.span, &msg);
}
}
return validate(s.clone(), None);
outputs: &OutputFilenames)
-> Vec<PathBuf> {
trans.modules.iter().map(|module| {
- outputs.temp_path(OutputType::Object, Some(&module.name[..]))
+ outputs.temp_path(OutputType::Object, Some(&module.name))
}).collect()
}
e))
}
- let bc_data_deflated = flate::deflate_bytes(&bc_data[..]);
+ let bc_data_deflated = flate::deflate_bytes(&bc_data);
let mut bc_file_deflated = match fs::File::create(&bc_deflated_filename) {
Ok(file) => file,
pname,
prog.status))
.note(&format!("{:?}", &cmd))
- .note(&escape_string(&output[..]))
+ .note(&escape_string(&output))
.emit();
sess.abort_if_errors();
}
- info!("linker stderr:\n{}", escape_string(&prog.stderr[..]));
- info!("linker stdout:\n{}", escape_string(&prog.stdout[..]));
+ info!("linker stderr:\n{}", escape_string(&prog.stderr));
+ info!("linker stdout:\n{}", escape_string(&prog.stdout));
},
Err(e) => {
sess.struct_err(&format!("could not exec the linker `{}`: {}", pname, e))
}
let export_threshold =
- symbol_export::crates_export_threshold(&sess.crate_types.borrow()[..]);
+ symbol_export::crates_export_threshold(&sess.crate_types.borrow());
let symbol_filter = &|&(ref name, level): &(String, _)| {
if symbol_export::is_below_threshold(level, export_threshold) {
bc_decoded.len() as libc::size_t) {
write::llvm_err(sess.diagnostic(),
format!("failed to load bc of `{}`",
- &name[..]));
+ name));
}
});
}
let libs = config.used_crates.clone();
let libs = libs.into_iter().filter_map(|(_, l)| l.option()).collect::<Vec<_>>();
- let rpaths = get_rpaths(config, &libs[..]);
- flags.extend_from_slice(&rpaths_to_flags(&rpaths[..]));
+ let rpaths = get_rpaths(config, &libs);
+ flags.extend_from_slice(&rpaths_to_flags(&rpaths));
// Use DT_RUNPATH instead of DT_RPATH if available
if config.linker_is_gnu {
}
}
- log_rpaths("relative", &rel_rpaths[..]);
- log_rpaths("fallback", &fallback_rpaths[..]);
+ log_rpaths("relative", &rel_rpaths);
+ log_rpaths("fallback", &fallback_rpaths);
let mut rpaths = rel_rpaths;
- rpaths.extend_from_slice(&fallback_rpaths[..]);
+ rpaths.extend_from_slice(&fallback_rpaths);
// Remove duplicates
- let rpaths = minimize_rpaths(&rpaths[..]);
+ let rpaths = minimize_rpaths(&rpaths);
return rpaths;
}
let mut set = HashSet::new();
let mut minimized = Vec::new();
for rpath in rpaths {
- if set.insert(&rpath[..]) {
+ if set.insert(rpath) {
minimized.push(rpath.clone());
}
}
cnum: CrateNum)
-> &[(String, SymbolExportLevel)] {
match self.exports.get(&cnum) {
- Some(exports) => &exports[..],
+ Some(exports) => exports,
None => &[]
}
}
{
for &(ref name, export_level) in self.exported_symbols(cnum) {
if is_below_threshold(export_level, export_threshold) {
- f(&name[..], export_level)
+ f(&name, export_level)
}
}
}
if !result.is_empty() &&
result.as_bytes()[0] != '_' as u8 &&
! (result.as_bytes()[0] as char).is_xid_start() {
- return format!("_{}", &result[..]);
+ return format!("_{}", result);
}
return result;
Some(ref code) => {
handler.emit_with_code(&MultiSpan::new(),
&diag.msg,
- &code[..],
+ &code,
diag.lvl);
},
None => {
let fdata_sections = ffunction_sections;
let code_model_arg = match sess.opts.cg.code_model {
- Some(ref s) => &s[..],
- None => &sess.target.target.options.code_model[..],
+ Some(ref s) => &s,
+ None => &sess.target.target.options.code_model,
};
let code_model = match CODE_GEN_MODEL_ARGS.iter().find(
let msg = llvm::build_string(|s| llvm::LLVMRustWriteSMDiagnosticToString(diag, s))
.expect("non-UTF8 SMDiagnostic");
- report_inline_asm(cgcx, &msg[..], cookie);
+ report_inline_asm(cgcx, &msg, cookie);
}
unsafe extern "C" fn diagnostic_handler(info: DiagnosticInfoRef, user: *mut c_void) {
if trans.modules.len() == 1 {
// 1) Only one codegen unit. In this case it's no difficulty
// to copy `foo.0.x` to `foo.x`.
- let module_name = Some(&(trans.modules[0].name)[..]);
+ let module_name = Some(&trans.modules[0].name[..]);
let path = crate_output.temp_path(output_type, module_name);
copy_gracefully(&path,
&crate_output.path(output_type));
if metadata_config.emit_bc && !user_wants_bitcode {
let path = crate_output.temp_path(OutputType::Bitcode,
- Some(&trans.metadata_module.name[..]));
+ Some(&trans.metadata_module.name));
remove(sess, &path);
}
}
n_bytes: ValueRef,
align: u32) {
let ccx = b.ccx;
- let ptr_width = &ccx.sess().target.target.target_pointer_width[..];
+ let ptr_width = &ccx.sess().target.target.target_pointer_width;
let key = format!("llvm.memcpy.p0i8.p0i8.i{}", ptr_width);
let memcpy = ccx.get_intrinsic(&key);
let src_ptr = b.pointercast(src, Type::i8p(ccx));
size: ValueRef,
align: ValueRef,
volatile: bool) -> ValueRef {
- let ptr_width = &b.ccx.sess().target.target.target_pointer_width[..];
+ let ptr_width = &b.ccx.sess().target.target.target_pointer_width;
let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
let llintrinsicfn = b.ccx.get_intrinsic(&intrinsic_key);
let volatile = C_bool(b.ccx, volatile);
let cstore = &cx.tcx().sess.cstore;
let metadata = cstore.encode_metadata(cx.tcx(),
- cx.export_map(),
cx.link_meta(),
exported_symbols);
if kind == MetadataKind::Uncompressed {
let mut compressed = cstore.metadata_encoding_version().to_vec();
compressed.extend_from_slice(&flate::deflate_bytes(&metadata));
- let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[..]);
+ let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed);
let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false);
let name = cx.metadata_symbol_name();
let buf = CString::new(name).unwrap();
symbol_map: &SymbolMap<'tcx>,
exported_symbols: &ExportedSymbols) {
let export_threshold =
- symbol_export::crates_export_threshold(&sess.crate_types.borrow()[..]);
+ symbol_export::crates_export_threshold(&sess.crate_types.borrow());
let exported_symbols = exported_symbols
.exported_symbols(LOCAL_CRATE)
(generics.parent_types == 0 && generics.types.is_empty()) &&
// Functions marked with #[inline] are only ever translated
// with "internal" linkage and are never exported.
- !attr::requests_inline(&attributes[..])
+ !attr::requests_inline(&attributes)
}
_ => false
// particular items that will be processed.
let krate = tcx.hir.krate();
- let ty::CrateAnalysis { export_map, reachable, name, .. } = analysis;
+ let ty::CrateAnalysis { reachable, name, .. } = analysis;
let exported_symbols = find_exported_symbols(tcx, reachable);
let check_overflow = tcx.sess.overflow_checks();
let link_meta = link::build_link_meta(incremental_hashes_map, &name);
let shared_ccx = SharedCrateContext::new(tcx,
- export_map,
link_meta.clone(),
exported_symbols,
check_overflow);
cgus.dedup();
for &(ref cgu_name, linkage) in cgus.iter() {
output.push_str(" ");
- output.push_str(&cgu_name[..]);
+ output.push_str(&cgu_name);
let linkage_abbrev = match linkage {
llvm::Linkage::ExternalLinkage => "External",
} else {
let v = ixs.iter().map(|i| C_i32(self.ccx, *i as i32)).collect::<Vec<ValueRef>>();
self.count_insn("gepi");
- self.inbounds_gep(base, &v[..])
+ self.inbounds_gep(base, &v)
}
}
let s = format!("{} ({})",
text,
self.ccx.sess().codemap().span_to_string(sp));
- debug!("{}", &s[..]);
- self.add_comment(&s[..]);
+ debug!("{}", s);
+ self.add_comment(&s);
}
}
DepTrackingMapConfig, WorkProduct};
use middle::cstore::LinkMeta;
use rustc::hir;
-use rustc::hir::def::ExportMap;
use rustc::hir::def_id::DefId;
use rustc::traits;
use debuginfo;
metadata_llmod: ModuleRef,
metadata_llcx: ContextRef,
- export_map: ExportMap,
exported_symbols: NodeSet,
link_meta: LinkMeta,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> {
pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>,
- export_map: ExportMap,
link_meta: LinkMeta,
exported_symbols: NodeSet,
check_overflow: bool)
SharedCrateContext {
metadata_llmod: metadata_llmod,
metadata_llcx: metadata_llcx,
- export_map: export_map,
exported_symbols: exported_symbols,
link_meta: link_meta,
empty_param_env: tcx.empty_parameter_environment(),
self.metadata_llcx
}
- pub fn export_map<'a>(&'a self) -> &'a ExportMap {
- &self.export_map
- }
-
pub fn exported_symbols<'a>(&'a self) -> &'a NodeSet {
&self.exported_symbols
}
unsafe { llvm::LLVMRustGetModuleDataLayout(self.llmod()) }
}
- pub fn export_map<'a>(&'a self) -> &'a ExportMap {
- &self.shared.export_map
- }
-
pub fn exported_symbols<'a>(&'a self) -> &'a NodeSet {
&self.shared.exported_symbols
}
match fulfill_obligation(scx, DUMMY_SP, trait_ref) {
traits::VtableImpl(traits::VtableImplData { impl_def_id, .. }) => {
- scx.tcx().custom_coerce_unsized_kind(impl_def_id)
+ scx.tcx().coerce_unsized_info(impl_def_id).custom_kind.unwrap()
}
vtable => {
bug!("invalid CoerceUnsized vtable: {:?}", vtable);
test = false
[dependencies]
-log = { path = "../liblog" }
+log = "0.3"
syntax = { path = "../libsyntax" }
arena = { path = "../libarena" }
fmt_macros = { path = "../libfmt_macros" }
}
fn assemble_inherent_impl_candidates_for_type(&mut self, def_id: DefId) {
- // Read the inherent implementation candidates for this type from the
- // metadata if necessary.
- self.tcx.populate_inherent_implementations_for_type_if_necessary(self.span, def_id);
-
- if let Some(impl_infos) = self.tcx.maps.inherent_impls.borrow().get(&def_id) {
- for &impl_def_id in impl_infos.iter() {
- self.assemble_inherent_impl_probe(impl_def_id);
- }
+ let impl_def_ids = ty::queries::inherent_impls::get(self.tcx, self.span, def_id);
+ for &impl_def_id in impl_def_ids.iter() {
+ self.assemble_inherent_impl_probe(impl_def_id);
}
}
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::ParameterEnvironment;
use rustc::ty::TypeFoldable;
+use rustc::ty::adjustment::CoerceUnsizedInfo;
use rustc::ty::subst::Subst;
use rustc::ty::util::CopyImplementationError;
use rustc::infer;
}
fn visit_implementation_of_coerce_unsized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- coerce_unsized_trait: DefId,
+ _: DefId,
impl_did: DefId) {
debug!("visit_implementation_of_coerce_unsized: impl_did={:?}",
impl_did);
+ // Just compute this for the side-effects, in particular reporting
+ // errors; other parts of the code may demand it for the info of
+ // course.
+ if impl_did.is_local() {
+ let span = tcx.def_span(impl_did);
+ ty::queries::coerce_unsized_info::get(tcx, span, impl_did);
+ }
+}
+
+pub fn coerce_unsized_info<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ impl_did: DefId)
+ -> CoerceUnsizedInfo {
+ debug!("compute_coerce_unsized_info(impl_did={:?})", impl_did);
+ let coerce_unsized_trait = tcx.lang_items.coerce_unsized_trait().unwrap();
+
let unsize_trait = match tcx.lang_items.require(UnsizeTraitLangItem) {
Ok(id) => id,
Err(err) => {
}
};
- let impl_node_id = if let Some(n) = tcx.hir.as_local_node_id(impl_did) {
- n
- } else {
- debug!("visit_implementation_of_coerce_unsized(): impl not \
- in this crate");
- return;
- };
+ // this provider should only get invoked for local def-ids
+ let impl_node_id = tcx.hir.as_local_node_id(impl_did).unwrap_or_else(|| {
+ bug!("coerce_unsized_info: invoked for non-local def-id {:?}", impl_did)
+ });
let source = tcx.item_type(impl_did);
let trait_ref = tcx.impl_trait_ref(impl_did).unwrap();
+ assert_eq!(trait_ref.def_id, coerce_unsized_trait);
let target = trait_ref.substs.type_at(1);
debug!("visit_implementation_of_coerce_unsized: {:?} -> {:?} (bound)",
source,
let target = target.subst(tcx, ¶m_env.free_substs);
assert!(!source.has_escaping_regions());
+ let err_info = CoerceUnsizedInfo { custom_kind: None };
+
debug!("visit_implementation_of_coerce_unsized: {:?} -> {:?} (free)",
source,
target);
definition; expected {}, found {}",
source_path,
target_path);
- return;
+ return err_info;
}
let fields = &def_a.struct_variant().fields;
"the trait `CoerceUnsized` may only be implemented \
for a coercion between structures with one field \
being coerced, none found");
- return;
+ return err_info;
} else if diff_fields.len() > 1 {
let item = tcx.hir.expect_item(impl_node_id);
let span = if let ItemImpl(.., Some(ref t), _, _) = item.node {
.join(", ")));
err.span_label(span, &format!("requires multiple coercions"));
err.emit();
- return;
+ return err_info;
}
let (i, a, b) = diff_fields[0];
E0376,
"the trait `CoerceUnsized` may only be implemented \
for a coercion between structures");
- return;
+ return err_info;
}
};
.caller_bounds);
infcx.resolve_regions_and_report_errors(&free_regions, impl_node_id);
- if let Some(kind) = kind {
- tcx.maps.custom_coerce_unsized_kind.borrow_mut().insert(impl_did, kind);
+ CoerceUnsizedInfo {
+ custom_kind: kind
}
- });
+ })
}
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc::dep_graph::DepNode;
-use rustc::hir::def_id::DefId;
-use rustc::hir;
-use rustc::hir::itemlikevisit::ItemLikeVisitor;
-use rustc::lint;
-use rustc::traits::{self, Reveal};
-use rustc::ty::{self, TyCtxt};
-
-use syntax::ast;
-use syntax_pos::Span;
-
-struct InherentCollect<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>
-}
-
-impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentCollect<'a, 'tcx> {
- fn visit_item(&mut self, item: &hir::Item) {
- let (unsafety, ty) = match item.node {
- hir::ItemImpl(unsafety, .., None, ref ty, _) => (unsafety, ty),
- _ => return
- };
-
- match unsafety {
- hir::Unsafety::Normal => {
- // OK
- }
- hir::Unsafety::Unsafe => {
- span_err!(self.tcx.sess,
- item.span,
- E0197,
- "inherent impls cannot be declared as unsafe");
- }
- }
-
- let def_id = self.tcx.hir.local_def_id(item.id);
- let self_ty = self.tcx.item_type(def_id);
- match self_ty.sty {
- ty::TyAdt(def, _) => {
- self.check_def_id(item, def.did);
- }
- ty::TyDynamic(ref data, ..) if data.principal().is_some() => {
- self.check_def_id(item, data.principal().unwrap().def_id());
- }
- ty::TyChar => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.char_impl(),
- "char",
- "char",
- item.span);
- }
- ty::TyStr => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.str_impl(),
- "str",
- "str",
- item.span);
- }
- ty::TySlice(_) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.slice_impl(),
- "slice",
- "[T]",
- item.span);
- }
- ty::TyRawPtr(ty::TypeAndMut { ty: _, mutbl: hir::MutImmutable }) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.const_ptr_impl(),
- "const_ptr",
- "*const T",
- item.span);
- }
- ty::TyRawPtr(ty::TypeAndMut { ty: _, mutbl: hir::MutMutable }) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.mut_ptr_impl(),
- "mut_ptr",
- "*mut T",
- item.span);
- }
- ty::TyInt(ast::IntTy::I8) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.i8_impl(),
- "i8",
- "i8",
- item.span);
- }
- ty::TyInt(ast::IntTy::I16) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.i16_impl(),
- "i16",
- "i16",
- item.span);
- }
- ty::TyInt(ast::IntTy::I32) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.i32_impl(),
- "i32",
- "i32",
- item.span);
- }
- ty::TyInt(ast::IntTy::I64) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.i64_impl(),
- "i64",
- "i64",
- item.span);
- }
- ty::TyInt(ast::IntTy::I128) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.i128_impl(),
- "i128",
- "i128",
- item.span);
- }
- ty::TyInt(ast::IntTy::Is) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.isize_impl(),
- "isize",
- "isize",
- item.span);
- }
- ty::TyUint(ast::UintTy::U8) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.u8_impl(),
- "u8",
- "u8",
- item.span);
- }
- ty::TyUint(ast::UintTy::U16) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.u16_impl(),
- "u16",
- "u16",
- item.span);
- }
- ty::TyUint(ast::UintTy::U32) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.u32_impl(),
- "u32",
- "u32",
- item.span);
- }
- ty::TyUint(ast::UintTy::U64) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.u64_impl(),
- "u64",
- "u64",
- item.span);
- }
- ty::TyUint(ast::UintTy::U128) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.u128_impl(),
- "u128",
- "u128",
- item.span);
- }
- ty::TyUint(ast::UintTy::Us) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.usize_impl(),
- "usize",
- "usize",
- item.span);
- }
- ty::TyFloat(ast::FloatTy::F32) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.f32_impl(),
- "f32",
- "f32",
- item.span);
- }
- ty::TyFloat(ast::FloatTy::F64) => {
- self.check_primitive_impl(def_id,
- self.tcx.lang_items.f64_impl(),
- "f64",
- "f64",
- item.span);
- }
- ty::TyError => {
- return;
- }
- _ => {
- struct_span_err!(self.tcx.sess,
- ty.span,
- E0118,
- "no base type found for inherent implementation")
- .span_label(ty.span, &format!("impl requires a base type"))
- .note(&format!("either implement a trait on it or create a newtype \
- to wrap it instead"))
- .emit();
- return;
- }
- }
- }
-
- fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {
- }
-
- fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
- }
-}
-
-impl<'a, 'tcx> InherentCollect<'a, 'tcx> {
- fn check_def_id(&self, item: &hir::Item, def_id: DefId) {
- if def_id.is_local() {
- // Add the implementation to the mapping from implementation to base
- // type def ID, if there is a base type for this implementation and
- // the implementation does not have any associated traits.
- let impl_def_id = self.tcx.hir.local_def_id(item.id);
-
- // Subtle: it'd be better to collect these into a local map
- // and then write the vector only once all items are known,
- // but that leads to degenerate dep-graphs. The problem is
- // that the write of that big vector winds up having reads
- // from *all* impls in the krate, since we've lost the
- // precision basically. This would be ok in the firewall
- // model so once we've made progess towards that we can modify
- // the strategy here. In the meantime, using `push` is ok
- // because we are doing this as a pre-pass before anyone
- // actually reads from `inherent_impls` -- and we know this is
- // true beacuse we hold the refcell lock.
- self.tcx.maps.inherent_impls.borrow_mut().push(def_id, impl_def_id);
- } else {
- struct_span_err!(self.tcx.sess,
- item.span,
- E0116,
- "cannot define inherent `impl` for a type outside of the crate \
- where the type is defined")
- .span_label(item.span,
- &format!("impl for type defined outside of crate."))
- .note("define and implement a trait or new type instead")
- .emit();
- }
- }
-
- fn check_primitive_impl(&self,
- impl_def_id: DefId,
- lang_def_id: Option<DefId>,
- lang: &str,
- ty: &str,
- span: Span) {
- match lang_def_id {
- Some(lang_def_id) if lang_def_id == impl_def_id => {
- // OK
- }
- _ => {
- struct_span_err!(self.tcx.sess,
- span,
- E0390,
- "only a single inherent implementation marked with `#[lang = \
- \"{}\"]` is allowed for the `{}` primitive",
- lang,
- ty)
- .span_help(span, "consider using a trait to implement these methods")
- .emit();
- }
- }
- }
-}
-
-struct InherentOverlapChecker<'a, 'tcx: 'a> {
- tcx: TyCtxt<'a, 'tcx, 'tcx>
-}
-
-impl<'a, 'tcx> InherentOverlapChecker<'a, 'tcx> {
- fn check_for_common_items_in_impls(&self, impl1: DefId, impl2: DefId) {
- #[derive(Copy, Clone, PartialEq)]
- enum Namespace {
- Type,
- Value,
- }
-
- let name_and_namespace = |def_id| {
- let item = self.tcx.associated_item(def_id);
- (item.name, match item.kind {
- ty::AssociatedKind::Type => Namespace::Type,
- ty::AssociatedKind::Const |
- ty::AssociatedKind::Method => Namespace::Value,
- })
- };
-
- let impl_items1 = self.tcx.associated_item_def_ids(impl1);
- let impl_items2 = self.tcx.associated_item_def_ids(impl2);
-
- for &item1 in &impl_items1[..] {
- let (name, namespace) = name_and_namespace(item1);
-
- for &item2 in &impl_items2[..] {
- if (name, namespace) == name_and_namespace(item2) {
- let msg = format!("duplicate definitions with name `{}`", name);
- let node_id = self.tcx.hir.as_local_node_id(item1).unwrap();
- self.tcx.sess.add_lint(lint::builtin::OVERLAPPING_INHERENT_IMPLS,
- node_id,
- self.tcx.span_of_impl(item1).unwrap(),
- msg);
- }
- }
- }
- }
-
- fn check_for_overlapping_inherent_impls(&self, ty_def_id: DefId) {
- let _task = self.tcx.dep_graph.in_task(DepNode::CoherenceOverlapInherentCheck(ty_def_id));
-
- let inherent_impls = self.tcx.maps.inherent_impls.borrow();
- let impls = match inherent_impls.get(&ty_def_id) {
- Some(impls) => impls,
- None => return,
- };
-
- for (i, &impl1_def_id) in impls.iter().enumerate() {
- for &impl2_def_id in &impls[(i + 1)..] {
- self.tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| {
- if traits::overlapping_impls(&infcx, impl1_def_id, impl2_def_id).is_some() {
- self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id)
- }
- });
- }
- }
- }
-}
-
-impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentOverlapChecker<'a, 'tcx> {
- fn visit_item(&mut self, item: &'v hir::Item) {
- match item.node {
- hir::ItemEnum(..) |
- hir::ItemStruct(..) |
- hir::ItemTrait(..) |
- hir::ItemUnion(..) => {
- let type_def_id = self.tcx.hir.local_def_id(item.id);
- self.check_for_overlapping_inherent_impls(type_def_id);
- }
- _ => {}
- }
- }
-
- fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {
- }
-
- fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
- }
-}
-
-pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- tcx.visit_all_item_likes_in_krate(DepNode::CoherenceCheckImpl,
- &mut InherentCollect { tcx });
- tcx.visit_all_item_likes_in_krate(DepNode::CoherenceOverlapCheckSpecial,
- &mut InherentOverlapChecker { tcx });
-}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! The code in this module gathers up all of the inherent impls in
+//! the current crate and organizes them in a map. It winds up
+//! touching the whole crate and thus must be recomputed completely
+//! for any change, but it is very cheap to compute. In practice, most
+//! code in the compiler never *directly* requests this map. Instead,
+//! it requests the inherent impls specific to some type (via
+//! `ty::queries::inherent_impls::get(def_id)`). That value, however,
+//! is computed by selecting an idea from this table.
+
+use rustc::dep_graph::DepNode;
+use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
+use rustc::hir;
+use rustc::hir::itemlikevisit::ItemLikeVisitor;
+use rustc::ty::{self, CrateInherentImpls, TyCtxt};
+use rustc::util::nodemap::DefIdMap;
+
+use std::rc::Rc;
+use syntax::ast;
+use syntax_pos::{DUMMY_SP, Span};
+
+/// On-demand query: yields a map containing all types mapped to their inherent impls.
+pub fn crate_inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ crate_num: CrateNum)
+ -> CrateInherentImpls {
+ assert_eq!(crate_num, LOCAL_CRATE);
+
+ let krate = tcx.hir.krate();
+ let mut collect = InherentCollect {
+ tcx,
+ impls_map: CrateInherentImpls {
+ inherent_impls: DefIdMap()
+ }
+ };
+ krate.visit_all_item_likes(&mut collect);
+ collect.impls_map
+}
+
+/// On-demand query: yields a vector of the inherent impls for a specific type.
+pub fn inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ ty_def_id: DefId)
+ -> Rc<Vec<DefId>> {
+ assert!(ty_def_id.is_local());
+
+ // NB. Until we adopt the red-green dep-tracking algorithm (see
+ // [the plan] for details on that), we do some hackery here to get
+ // the dependencies correct. Basically, we use a `with_ignore` to
+ // read the result we want. If we didn't have the `with_ignore`,
+ // we would wind up with a dependency on the entire crate, which
+ // we don't want. Then we go and add dependencies on all the impls
+ // in the result (which is what we wanted).
+ //
+ // The result is a graph with an edge from `Hir(I)` for every impl
+ // `I` defined on some type `T` to `CoherentInherentImpls(T)`,
+ // thus ensuring that if any of those impls change, the set of
+ // inherent impls is considered dirty.
+ //
+ // [the plan]: https://github.com/rust-lang/rust-roadmap/issues/4
+
+ let result = tcx.dep_graph.with_ignore(|| {
+ let crate_map = ty::queries::crate_inherent_impls::get(tcx, DUMMY_SP, ty_def_id.krate);
+ match crate_map.inherent_impls.get(&ty_def_id) {
+ Some(v) => v.clone(),
+ None => Rc::new(vec![]),
+ }
+ });
+
+ for &impl_def_id in &result[..] {
+ tcx.dep_graph.read(DepNode::Hir(impl_def_id));
+ }
+
+ result
+}
+
+struct InherentCollect<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ impls_map: CrateInherentImpls,
+}
+
+impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentCollect<'a, 'tcx> {
+ fn visit_item(&mut self, item: &hir::Item) {
+ let (unsafety, ty) = match item.node {
+ hir::ItemImpl(unsafety, .., None, ref ty, _) => (unsafety, ty),
+ _ => return
+ };
+
+ match unsafety {
+ hir::Unsafety::Normal => {
+ // OK
+ }
+ hir::Unsafety::Unsafe => {
+ span_err!(self.tcx.sess,
+ item.span,
+ E0197,
+ "inherent impls cannot be declared as unsafe");
+ }
+ }
+
+ let def_id = self.tcx.hir.local_def_id(item.id);
+ let self_ty = self.tcx.item_type(def_id);
+ match self_ty.sty {
+ ty::TyAdt(def, _) => {
+ self.check_def_id(item, def.did);
+ }
+ ty::TyDynamic(ref data, ..) if data.principal().is_some() => {
+ self.check_def_id(item, data.principal().unwrap().def_id());
+ }
+ ty::TyChar => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.char_impl(),
+ "char",
+ "char",
+ item.span);
+ }
+ ty::TyStr => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.str_impl(),
+ "str",
+ "str",
+ item.span);
+ }
+ ty::TySlice(_) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.slice_impl(),
+ "slice",
+ "[T]",
+ item.span);
+ }
+ ty::TyRawPtr(ty::TypeAndMut { ty: _, mutbl: hir::MutImmutable }) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.const_ptr_impl(),
+ "const_ptr",
+ "*const T",
+ item.span);
+ }
+ ty::TyRawPtr(ty::TypeAndMut { ty: _, mutbl: hir::MutMutable }) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.mut_ptr_impl(),
+ "mut_ptr",
+ "*mut T",
+ item.span);
+ }
+ ty::TyInt(ast::IntTy::I8) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.i8_impl(),
+ "i8",
+ "i8",
+ item.span);
+ }
+ ty::TyInt(ast::IntTy::I16) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.i16_impl(),
+ "i16",
+ "i16",
+ item.span);
+ }
+ ty::TyInt(ast::IntTy::I32) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.i32_impl(),
+ "i32",
+ "i32",
+ item.span);
+ }
+ ty::TyInt(ast::IntTy::I64) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.i64_impl(),
+ "i64",
+ "i64",
+ item.span);
+ }
+ ty::TyInt(ast::IntTy::I128) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.i128_impl(),
+ "i128",
+ "i128",
+ item.span);
+ }
+ ty::TyInt(ast::IntTy::Is) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.isize_impl(),
+ "isize",
+ "isize",
+ item.span);
+ }
+ ty::TyUint(ast::UintTy::U8) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.u8_impl(),
+ "u8",
+ "u8",
+ item.span);
+ }
+ ty::TyUint(ast::UintTy::U16) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.u16_impl(),
+ "u16",
+ "u16",
+ item.span);
+ }
+ ty::TyUint(ast::UintTy::U32) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.u32_impl(),
+ "u32",
+ "u32",
+ item.span);
+ }
+ ty::TyUint(ast::UintTy::U64) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.u64_impl(),
+ "u64",
+ "u64",
+ item.span);
+ }
+ ty::TyUint(ast::UintTy::U128) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.u128_impl(),
+ "u128",
+ "u128",
+ item.span);
+ }
+ ty::TyUint(ast::UintTy::Us) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.usize_impl(),
+ "usize",
+ "usize",
+ item.span);
+ }
+ ty::TyFloat(ast::FloatTy::F32) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.f32_impl(),
+ "f32",
+ "f32",
+ item.span);
+ }
+ ty::TyFloat(ast::FloatTy::F64) => {
+ self.check_primitive_impl(def_id,
+ self.tcx.lang_items.f64_impl(),
+ "f64",
+ "f64",
+ item.span);
+ }
+ ty::TyError => {
+ return;
+ }
+ _ => {
+ struct_span_err!(self.tcx.sess,
+ ty.span,
+ E0118,
+ "no base type found for inherent implementation")
+ .span_label(ty.span, &format!("impl requires a base type"))
+ .note(&format!("either implement a trait on it or create a newtype \
+ to wrap it instead"))
+ .emit();
+ return;
+ }
+ }
+ }
+
+ fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {
+ }
+
+ fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
+ }
+}
+
+impl<'a, 'tcx> InherentCollect<'a, 'tcx> {
+ fn check_def_id(&mut self, item: &hir::Item, def_id: DefId) {
+ if def_id.is_local() {
+ // Add the implementation to the mapping from implementation to base
+ // type def ID, if there is a base type for this implementation and
+ // the implementation does not have any associated traits.
+ let impl_def_id = self.tcx.hir.local_def_id(item.id);
+ let mut rc_vec = self.impls_map.inherent_impls
+ .entry(def_id)
+ .or_insert_with(|| Rc::new(vec![]));
+
+ // At this point, there should not be any clones of the
+ // `Rc`, so we can still safely push into it in place:
+ Rc::get_mut(&mut rc_vec).unwrap().push(impl_def_id);
+ } else {
+ struct_span_err!(self.tcx.sess,
+ item.span,
+ E0116,
+ "cannot define inherent `impl` for a type outside of the crate \
+ where the type is defined")
+ .span_label(item.span,
+ &format!("impl for type defined outside of crate."))
+ .note("define and implement a trait or new type instead")
+ .emit();
+ }
+ }
+
+ fn check_primitive_impl(&self,
+ impl_def_id: DefId,
+ lang_def_id: Option<DefId>,
+ lang: &str,
+ ty: &str,
+ span: Span) {
+ match lang_def_id {
+ Some(lang_def_id) if lang_def_id == impl_def_id => {
+ // OK
+ }
+ _ => {
+ struct_span_err!(self.tcx.sess,
+ span,
+ E0390,
+ "only a single inherent implementation marked with `#[lang = \
+ \"{}\"]` is allowed for the `{}` primitive",
+ lang,
+ ty)
+ .span_help(span, "consider using a trait to implement these methods")
+ .emit();
+ }
+ }
+ }
+}
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
+use rustc::hir;
+use rustc::hir::itemlikevisit::ItemLikeVisitor;
+use rustc::lint;
+use rustc::traits::{self, Reveal};
+use rustc::ty::{self, TyCtxt};
+
+use syntax_pos::DUMMY_SP;
+
+pub fn crate_inherent_impls_overlap_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ crate_num: CrateNum) {
+ assert_eq!(crate_num, LOCAL_CRATE);
+ let krate = tcx.hir.krate();
+ krate.visit_all_item_likes(&mut InherentOverlapChecker { tcx });
+}
+
+struct InherentOverlapChecker<'a, 'tcx: 'a> {
+ tcx: TyCtxt<'a, 'tcx, 'tcx>
+}
+
+impl<'a, 'tcx> InherentOverlapChecker<'a, 'tcx> {
+ fn check_for_common_items_in_impls(&self, impl1: DefId, impl2: DefId) {
+ #[derive(Copy, Clone, PartialEq)]
+ enum Namespace {
+ Type,
+ Value,
+ }
+
+ let name_and_namespace = |def_id| {
+ let item = self.tcx.associated_item(def_id);
+ (item.name, match item.kind {
+ ty::AssociatedKind::Type => Namespace::Type,
+ ty::AssociatedKind::Const |
+ ty::AssociatedKind::Method => Namespace::Value,
+ })
+ };
+
+ let impl_items1 = self.tcx.associated_item_def_ids(impl1);
+ let impl_items2 = self.tcx.associated_item_def_ids(impl2);
+
+ for &item1 in &impl_items1[..] {
+ let (name, namespace) = name_and_namespace(item1);
+
+ for &item2 in &impl_items2[..] {
+ if (name, namespace) == name_and_namespace(item2) {
+ let msg = format!("duplicate definitions with name `{}`", name);
+ let node_id = self.tcx.hir.as_local_node_id(item1).unwrap();
+ self.tcx.sess.add_lint(lint::builtin::OVERLAPPING_INHERENT_IMPLS,
+ node_id,
+ self.tcx.span_of_impl(item1).unwrap(),
+ msg);
+ }
+ }
+ }
+ }
+
+ fn check_for_overlapping_inherent_impls(&self, ty_def_id: DefId) {
+ let impls = ty::queries::inherent_impls::get(self.tcx, DUMMY_SP, ty_def_id);
+
+ for (i, &impl1_def_id) in impls.iter().enumerate() {
+ for &impl2_def_id in &impls[(i + 1)..] {
+ self.tcx.infer_ctxt((), Reveal::UserFacing).enter(|infcx| {
+ if traits::overlapping_impls(&infcx, impl1_def_id, impl2_def_id).is_some() {
+ self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id)
+ }
+ });
+ }
+ }
+ }
+}
+
+impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for InherentOverlapChecker<'a, 'tcx> {
+ fn visit_item(&mut self, item: &'v hir::Item) {
+ match item.node {
+ hir::ItemEnum(..) |
+ hir::ItemStruct(..) |
+ hir::ItemTrait(..) |
+ hir::ItemUnion(..) => {
+ let type_def_id = self.tcx.hir.local_def_id(item.id);
+ self.check_for_overlapping_inherent_impls(type_def_id);
+ }
+ _ => {}
+ }
+ }
+
+ fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem) {
+ }
+
+ fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem) {
+ }
+}
+
use syntax_pos::DUMMY_SP;
mod builtin;
-mod inherent;
+mod inherent_impls;
+mod inherent_impls_overlap;
mod orphan;
mod overlap;
mod unsafety;
}
pub fn provide(providers: &mut Providers) {
+ use self::builtin::coerce_unsized_info;
+ use self::inherent_impls::{crate_inherent_impls, inherent_impls};
+ use self::inherent_impls_overlap::crate_inherent_impls_overlap_check;
+
*providers = Providers {
coherent_trait,
- coherent_inherent_impls,
+ crate_inherent_impls,
+ inherent_impls,
+ crate_inherent_impls_overlap_check,
+ coerce_unsized_info,
..*providers
};
}
builtin::check_trait(tcx, def_id);
}
-fn coherent_inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, _: CrateNum) {
- inherent::check(tcx);
-}
-
pub fn check_coherence<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let _task = tcx.dep_graph.in_task(DepNode::Coherence);
for &trait_def_id in tcx.hir.krate().trait_impls.keys() {
orphan::check(tcx);
overlap::check_default_impls(tcx);
- ty::queries::coherent_inherent_impls::get(tcx, DUMMY_SP, LOCAL_CRATE);
+ // these queries are executed for side-effects (error reporting):
+ ty::queries::crate_inherent_impls::get(tcx, DUMMY_SP, LOCAL_CRATE);
+ ty::queries::crate_inherent_impls_overlap_check::get(tcx, DUMMY_SP, LOCAL_CRATE);
}
[dependencies]
arena = { path = "../libarena" }
+env_logger = { version = "0.4", default-features = false }
+log = "0.3"
rustc = { path = "../librustc" }
rustc_back = { path = "../librustc_back" }
rustc_const_eval = { path = "../librustc_const_eval" }
-rustc_driver = { path = "../librustc_driver" }
rustc_data_structures = { path = "../librustc_data_structures" }
+rustc_driver = { path = "../librustc_driver" }
rustc_errors = { path = "../librustc_errors" }
rustc_lint = { path = "../librustc_lint" }
rustc_metadata = { path = "../librustc_metadata" }
serialize = { path = "../libserialize" }
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }
-log = { path = "../liblog" }
[build-dependencies]
build_helper = { path = "../build_helper" }
pub fn build_impls(cx: &DocContext, did: DefId) -> Vec<clean::Item> {
let tcx = cx.tcx;
- tcx.populate_inherent_implementations_for_type_if_necessary(DUMMY_SP, did);
let mut impls = Vec::new();
- if let Some(i) = tcx.maps.inherent_impls.borrow().get(&did) {
- for &did in i.iter() {
- build_impl(cx, did, &mut impls);
- }
+ for &did in ty::queries::inherent_impls::get(tcx, DUMMY_SP, did).iter() {
+ build_impl(cx, did, &mut impls);
}
+
// If this is the first time we've inlined something from another crate, then
// we inline *all* impls from all the crates into this crate. Note that there's
// currently no way for us to filter this based on type, and we likely need
use rustc::dep_graph::DepGraph;
use rustc::session::{self, config};
use rustc::hir::def_id::DefId;
-use rustc::hir::def::{Def, ExportMap};
+use rustc::hir::def::Def;
use rustc::middle::privacy::AccessLevels;
use rustc::ty::{self, TyCtxt, GlobalArenas};
use rustc::hir::map as hir_map;
pub ty_substs: RefCell<FxHashMap<Def, clean::Type>>,
/// Table node id of lifetime parameter definition -> substituted lifetime
pub lt_substs: RefCell<FxHashMap<ast::NodeId, clean::Lifetime>>,
- pub export_map: ExportMap,
}
impl<'a, 'tcx> DocContext<'a, 'tcx> {
sess.fatal("Compilation failed, aborting rustdoc");
}
- let ty::CrateAnalysis { access_levels, export_map, .. } = analysis;
+ let ty::CrateAnalysis { access_levels, .. } = analysis;
// Convert from a NodeId set to a DefId set since we don't always have easy access
// to the map from defid -> nodeid
let access_levels = AccessLevels {
- map: access_levels.map.into_iter()
- .map(|(k, v)| (tcx.hir.local_def_id(k), v))
+ map: access_levels.map.iter()
+ .map(|(&k, &v)| (tcx.hir.local_def_id(k), v))
.collect()
};
renderinfo: Default::default(),
ty_substs: Default::default(),
lt_substs: Default::default(),
- export_map: export_map,
};
debug!("crate: {:?}", tcx.hir.krate());
if attr.is_word() {
Some(format!("{}", name))
} else if let Some(v) = attr.value_str() {
- Some(format!("{} = {:?}", name, &v.as_str()[..]))
+ Some(format!("{} = {:?}", name, v.as_str()))
} else if let Some(values) = attr.meta_item_list() {
let display: Vec<_> = values.iter().filter_map(|attr| {
attr.meta_item().and_then(|mi| render_attribute(mi))
for attr in &it.attrs.other_attrs {
let name = attr.name().unwrap();
- if !ATTRIBUTE_WHITELIST.contains(&&name.as_str()[..]) {
+ if !ATTRIBUTE_WHITELIST.contains(&&*name.as_str()) {
continue;
}
if let Some(s) = render_attribute(&attr.meta().unwrap()) {
extern crate arena;
extern crate getopts;
+extern crate env_logger;
extern crate libc;
extern crate rustc;
extern crate rustc_const_eval;
pub fn main() {
const STACK_SIZE: usize = 32_000_000; // 32MB
+ env_logger::init().unwrap();
let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
let s = env::args().collect::<Vec<_>>();
main_args(&s)
self.visit_item(item, None, &mut om);
}
self.inside_public_path = orig_inside_public_path;
- if let Some(exports) = self.cx.export_map.get(&id) {
+ if let Some(exports) = self.cx.tcx.export_map.get(&id) {
for export in exports {
if let Def::Macro(def_id, ..) = export.def {
if def_id.krate == LOCAL_CRATE {
/// If an I/O error is encountered then all bytes read so far will be
/// present in `buf` and its length will have been adjusted appropriately.
///
- /// # Examples
- ///
- /// A locked standard input implements `BufRead`. In this example, we'll
- /// read from standard input until we see an `a` byte.
- ///
/// [`fill_buf`]: #tymethod.fill_buf
/// [`ErrorKind::Interrupted`]: enum.ErrorKind.html#variant.Interrupted
///
- /// ```
- /// use std::io;
- /// use std::io::prelude::*;
+ /// # Examples
///
- /// fn foo() -> io::Result<()> {
- /// let stdin = io::stdin();
- /// let mut stdin = stdin.lock();
- /// let mut buffer = Vec::new();
+ /// [`std::io::Cursor`][`Cursor`] is a type that implements `BufRead`. In
+ /// this example, we use [`Cursor`] to read all the bytes in a byte slice
+ /// in hyphen delimited segments:
///
- /// stdin.read_until(b'a', &mut buffer)?;
+ /// [`Cursor`]: struct.Cursor.html
///
- /// println!("{:?}", buffer);
- /// # Ok(())
- /// # }
+ /// ```
+ /// use std::io::{self, BufRead};
+ ///
+ /// let mut cursor = io::Cursor::new(b"lorem-ipsum");
+ /// let mut buf = vec![];
+ ///
+ /// // cursor is at 'l'
+ /// let num_bytes = cursor.read_until(b'-', &mut buf)
+ /// .expect("reading from cursor won't fail");
+ /// assert_eq!(num_bytes, 6);
+ /// assert_eq!(buf, b"lorem-");
+ /// buf.clear();
+ ///
+ /// // cursor is at 'i'
+ /// let num_bytes = cursor.read_until(b'-', &mut buf)
+ /// .expect("reading from cursor won't fail");
+ /// assert_eq!(num_bytes, 5);
+ /// assert_eq!(buf, b"ipsum");
+ /// buf.clear();
+ ///
+ /// // cursor is at EOF
+ /// let num_bytes = cursor.read_until(b'-', &mut buf)
+ /// .expect("reading from cursor won't fail");
+ /// assert_eq!(num_bytes, 0);
+ /// assert_eq!(buf, b"");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn read_until(&mut self, byte: u8, buf: &mut Vec<u8>) -> Result<usize> {
///
/// # Examples
///
- /// A locked standard input implements `BufRead`. In this example, we'll
- /// read all of the lines from standard input. If we were to do this in
- /// an actual project, the [`lines`] method would be easier, of
- /// course.
+ /// [`std::io::Cursor`][`Cursor`] is a type that implements `BufRead`. In
+ /// this example, we use [`Cursor`] to read all the lines in a byte slice:
///
- /// [`lines`]: #method.lines
- /// [`read_until`]: #method.read_until
+ /// [`Cursor`]: struct.Cursor.html
///
/// ```
- /// use std::io;
- /// use std::io::prelude::*;
- ///
- /// let stdin = io::stdin();
- /// let mut stdin = stdin.lock();
- /// let mut buffer = String::new();
- ///
- /// while stdin.read_line(&mut buffer).unwrap() > 0 {
- /// // work with buffer
- /// println!("{:?}", buffer);
- ///
- /// buffer.clear();
- /// }
+ /// use std::io::{self, BufRead};
+ ///
+ /// let mut cursor = io::Cursor::new(b"foo\nbar");
+ /// let mut buf = String::new();
+ ///
+ /// // cursor is at 'f'
+ /// let num_bytes = cursor.read_line(&mut buf)
+ /// .expect("reading from cursor won't fail");
+ /// assert_eq!(num_bytes, 4);
+ /// assert_eq!(buf, "foo\n");
+ /// buf.clear();
+ ///
+ /// // cursor is at 'b'
+ /// let num_bytes = cursor.read_line(&mut buf)
+ /// .expect("reading from cursor won't fail");
+ /// assert_eq!(num_bytes, 3);
+ /// assert_eq!(buf, "bar");
+ /// buf.clear();
+ ///
+ /// // cursor is at EOF
+ /// let num_bytes = cursor.read_line(&mut buf)
+ /// .expect("reading from cursor won't fail");
+ /// assert_eq!(num_bytes, 0);
+ /// assert_eq!(buf, "");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn read_line(&mut self, buf: &mut String) -> Result<usize> {
/// This function will yield errors whenever [`read_until`] would have
/// also yielded an error.
///
- /// # Examples
- ///
- /// A locked standard input implements `BufRead`. In this example, we'll
- /// read some input from standard input, splitting on commas.
- ///
/// [`io::Result`]: type.Result.html
/// [`Vec<u8>`]: ../vec/struct.Vec.html
/// [`read_until`]: #method.read_until
///
+ /// # Examples
+ ///
+ /// [`std::io::Cursor`][`Cursor`] is a type that implements `BufRead`. In
+ /// this example, we use [`Cursor`] to iterate over all hyphen delimited
+ /// segments in a byte slice
+ ///
+ /// [`Cursor`]: struct.Cursor.html
+ ///
/// ```
- /// use std::io;
- /// use std::io::prelude::*;
+ /// use std::io::{self, BufRead};
///
- /// let stdin = io::stdin();
+ /// let cursor = io::Cursor::new(b"lorem-ipsum-dolor");
///
- /// for content in stdin.lock().split(b',') {
- /// println!("{:?}", content.unwrap());
- /// }
+ /// let mut split_iter = cursor.split(b'-').map(|l| l.unwrap());
+ /// assert_eq!(split_iter.next(), Some(b"lorem".to_vec()));
+ /// assert_eq!(split_iter.next(), Some(b"ipsum".to_vec()));
+ /// assert_eq!(split_iter.next(), Some(b"dolor".to_vec()));
+ /// assert_eq!(split_iter.next(), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn split(self, byte: u8) -> Split<Self> where Self: Sized {
///
/// # Examples
///
- /// A locked standard input implements `BufRead`:
+ /// [`std::io::Cursor`][`Cursor`] is a type that implements `BufRead`. In
+ /// this example, we use [`Cursor`] to iterate over all the lines in a byte
+ /// slice.
+ ///
+ /// [`Cursor`]: struct.Cursor.html
///
/// ```
- /// use std::io;
- /// use std::io::prelude::*;
+ /// use std::io::{self, BufRead};
///
- /// let stdin = io::stdin();
+ /// let cursor = io::Cursor::new(b"lorem\nipsum\r\ndolor");
///
- /// for line in stdin.lock().lines() {
- /// println!("{}", line.unwrap());
- /// }
+ /// let mut lines_iter = cursor.lines().map(|l| l.unwrap());
+ /// assert_eq!(lines_iter.next(), Some(String::from("lorem")));
+ /// assert_eq!(lines_iter.next(), Some(String::from("ipsum")));
+ /// assert_eq!(lines_iter.next(), Some(String::from("dolor")));
+ /// assert_eq!(lines_iter.next(), None);
/// ```
///
/// # Errors
use fmt;
-#[cfg(any(target_os = "android",
- target_os = "emscripten",
+#[cfg(any(target_os = "emscripten",
all(target_os = "linux", any(target_arch = "aarch64",
target_arch = "arm",
target_arch = "powerpc",
target_arch = "powerpc64",
target_arch = "s390x")),
+ all(target_os = "android", any(target_arch = "aarch64",
+ target_arch = "arm")),
all(target_os = "fuchsia", target_arch = "aarch64")))]
#[stable(feature = "raw_os", since = "1.1.0")] pub type c_char = u8;
-#[cfg(not(any(target_os = "android",
- target_os = "emscripten",
+#[cfg(not(any(target_os = "emscripten",
all(target_os = "linux", any(target_arch = "aarch64",
target_arch = "arm",
target_arch = "powerpc",
target_arch = "powerpc64",
target_arch = "s390x")),
+ all(target_os = "android", any(target_arch = "aarch64",
+ target_arch = "arm")),
all(target_os = "fuchsia", target_arch = "aarch64"))))]
#[stable(feature = "raw_os", since = "1.1.0")] pub type c_char = i8;
#[stable(feature = "raw_os", since = "1.1.0")] pub type c_schar = i8;
let inner = Socket::new_raw(libc::AF_UNIX, libc::SOCK_STREAM)?;
let (addr, len) = sockaddr_un(path)?;
- cvt(libc::bind(*inner.as_inner(), &addr as *const _ as *const _, len))?;
+ cvt(libc::bind(*inner.as_inner(), &addr as *const _ as *const _, len as _))?;
cvt(libc::listen(*inner.as_inner(), 128))?;
Ok(UnixListener(inner))
let socket = UnixDatagram::unbound()?;
let (addr, len) = sockaddr_un(path)?;
- cvt(libc::bind(*socket.0.as_inner(), &addr as *const _ as *const _, len))?;
+ cvt(libc::bind(*socket.0.as_inner(), &addr as *const _ as *const _, len as _))?;
Ok(socket)
}
}
}
+ // Android with api less than 21 define sig* functions inline, so it is not
+ // available for dynamic link. Implementing sigemptyset and sigaddset allow us
+ // to support older Android version (independent of libc version).
+ // The following implementations are based on https://git.io/vSkNf
+
#[cfg(not(target_os = "android"))]
extern {
+ #[cfg_attr(target_os = "netbsd", link_name = "__sigemptyset14")]
+ fn sigemptyset(set: *mut libc::sigset_t) -> libc::c_int;
+
#[cfg_attr(target_os = "netbsd", link_name = "__sigaddset14")]
fn sigaddset(set: *mut libc::sigset_t, signum: libc::c_int) -> libc::c_int;
}
+ #[cfg(target_os = "android")]
+ unsafe fn sigemptyset(set: *mut libc::sigset_t) -> libc::c_int {
+ libc::memset(set as *mut _, 0, mem::size_of::<libc::sigset_t>());
+ return 0;
+ }
+
#[cfg(target_os = "android")]
unsafe fn sigaddset(set: *mut libc::sigset_t, signum: libc::c_int) -> libc::c_int {
use slice;
let mut set: libc::sigset_t = mem::uninitialized();
let mut old_set: libc::sigset_t = mem::uninitialized();
- t!(cvt(libc::sigemptyset(&mut set)));
+ t!(cvt(sigemptyset(&mut set)));
t!(cvt(sigaddset(&mut set, libc::SIGINT)));
t!(cvt(libc::pthread_sigmask(libc::SIG_SETMASK, &set, &mut old_set)));
// need to clean things up now to avoid confusing the program
// we're about to run.
let mut set: libc::sigset_t = mem::uninitialized();
- t!(cvt(libc::sigemptyset(&mut set)));
+ if cfg!(target_os = "android") {
+ // Implementing sigemptyset allow us to support older Android
+ // versions. See the comment about Android and sig* functions in
+ // process_common.rs
+ libc::memset(&mut set as *mut _ as *mut _,
+ 0,
+ mem::size_of::<libc::sigset_t>());
+ } else {
+ t!(cvt(libc::sigemptyset(&mut set)));
+ }
t!(cvt(libc::pthread_sigmask(libc::SIG_SETMASK, &set,
ptr::null_mut())));
let ret = sys::signal(libc::SIGPIPE, libc::SIG_DFL);
// INVALID_HANDLE_VALUE.
Stdio::Inherit => {
match stdio::get(stdio_id) {
- Ok(io) => io.handle().duplicate(0, true,
- c::DUPLICATE_SAME_ACCESS),
+ Ok(io) => {
+ let io = Handle::new(io.handle());
+ let ret = io.duplicate(0, true,
+ c::DUPLICATE_SAME_ACCESS);
+ io.into_raw();
+ return ret
+ }
Err(..) => Ok(Handle::new(c::INVALID_HANDLE_VALUE)),
}
}
use sys::handle::Handle;
use sys_common::io::read_to_end_uninitialized;
-pub struct NoClose(Option<Handle>);
-
pub enum Output {
- Console(NoClose),
- Pipe(NoClose),
+ Console(c::HANDLE),
+ Pipe(c::HANDLE),
}
pub struct Stdin {
- handle: Output,
utf8: Mutex<io::Cursor<Vec<u8>>>,
}
-pub struct Stdout(Output);
-pub struct Stderr(Output);
+pub struct Stdout;
+pub struct Stderr;
pub fn get(handle: c::DWORD) -> io::Result<Output> {
let handle = unsafe { c::GetStdHandle(handle) };
if handle == c::INVALID_HANDLE_VALUE {
Err(io::Error::last_os_error())
} else if handle.is_null() {
- Err(io::Error::new(io::ErrorKind::Other,
- "no stdio handle available for this process"))
+ Err(io::Error::from_raw_os_error(c::ERROR_INVALID_HANDLE as i32))
} else {
- let ret = NoClose::new(handle);
let mut out = 0;
match unsafe { c::GetConsoleMode(handle, &mut out) } {
- 0 => Ok(Output::Pipe(ret)),
- _ => Ok(Output::Console(ret)),
+ 0 => Ok(Output::Pipe(handle)),
+ _ => Ok(Output::Console(handle)),
}
}
}
-fn write(out: &Output, data: &[u8]) -> io::Result<usize> {
- let handle = match *out {
- Output::Console(ref c) => c.get().raw(),
- Output::Pipe(ref p) => return p.get().write(data),
+fn write(handle: c::DWORD, data: &[u8]) -> io::Result<usize> {
+ let handle = match try!(get(handle)) {
+ Output::Console(c) => c,
+ Output::Pipe(p) => {
+ let handle = Handle::new(p);
+ let ret = handle.write(data);
+ handle.into_raw();
+ return ret
+ }
};
+
// As with stdin on windows, stdout often can't handle writes of large
// sizes. For an example, see #14940. For this reason, don't try to
// write the entire output buffer on windows.
impl Stdin {
pub fn new() -> io::Result<Stdin> {
- get(c::STD_INPUT_HANDLE).map(|handle| {
- Stdin {
- handle: handle,
- utf8: Mutex::new(Cursor::new(Vec::new())),
- }
+ Ok(Stdin {
+ utf8: Mutex::new(Cursor::new(Vec::new())),
})
}
pub fn read(&self, buf: &mut [u8]) -> io::Result<usize> {
- let handle = match self.handle {
- Output::Console(ref c) => c.get().raw(),
- Output::Pipe(ref p) => return p.get().read(buf),
+ let handle = match try!(get(c::STD_INPUT_HANDLE)) {
+ Output::Console(c) => c,
+ Output::Pipe(p) => {
+ let handle = Handle::new(p);
+ let ret = handle.read(buf);
+ handle.into_raw();
+ return ret
+ }
};
let mut utf8 = self.utf8.lock().unwrap();
// Read more if the buffer is empty
Ok(utf8) => utf8.into_bytes(),
Err(..) => return Err(invalid_encoding()),
};
- if let Output::Console(_) = self.handle {
- if let Some(&last_byte) = data.last() {
- if last_byte == CTRL_Z {
- data.pop();
- }
+ if let Some(&last_byte) = data.last() {
+ if last_byte == CTRL_Z {
+ data.pop();
}
}
*utf8 = Cursor::new(data);
impl Stdout {
pub fn new() -> io::Result<Stdout> {
- get(c::STD_OUTPUT_HANDLE).map(Stdout)
+ Ok(Stdout)
}
pub fn write(&self, data: &[u8]) -> io::Result<usize> {
- write(&self.0, data)
+ write(c::STD_OUTPUT_HANDLE, data)
}
pub fn flush(&self) -> io::Result<()> {
impl Stderr {
pub fn new() -> io::Result<Stderr> {
- get(c::STD_ERROR_HANDLE).map(Stderr)
+ Ok(Stderr)
}
pub fn write(&self, data: &[u8]) -> io::Result<usize> {
- write(&self.0, data)
+ write(c::STD_ERROR_HANDLE, data)
}
pub fn flush(&self) -> io::Result<()> {
}
}
-impl NoClose {
- fn new(handle: c::HANDLE) -> NoClose {
- NoClose(Some(Handle::new(handle)))
- }
-
- fn get(&self) -> &Handle { self.0.as_ref().unwrap() }
-}
-
-impl Drop for NoClose {
- fn drop(&mut self) {
- self.0.take().unwrap().into_raw();
- }
-}
-
impl Output {
- pub fn handle(&self) -> &Handle {
- let nc = match *self {
- Output::Console(ref c) => c,
- Output::Pipe(ref c) => c,
- };
- nc.0.as_ref().unwrap()
+ pub fn handle(&self) -> c::HANDLE {
+ match *self {
+ Output::Console(c) => c,
+ Output::Pipe(c) => c,
+ }
}
}
// Bind our new socket
let (addrp, len) = addr.into_inner();
- cvt(unsafe { c::bind(*sock.as_inner(), addrp, len) })?;
+ cvt(unsafe { c::bind(*sock.as_inner(), addrp, len as _) })?;
// Start listening
cvt(unsafe { c::listen(*sock.as_inner(), 128) })?;
let sock = Socket::new(addr, c::SOCK_DGRAM)?;
let (addrp, len) = addr.into_inner();
- cvt(unsafe { c::bind(*sock.as_inner(), addrp, len) })?;
+ cvt(unsafe { c::bind(*sock.as_inner(), addrp, len as _) })?;
Ok(UdpSocket { inner: sock })
}
[dependencies]
serialize = { path = "../libserialize" }
-log = { path = "../liblog" }
+log = "0.3"
rustc_bitflags = { path = "../librustc_bitflags" }
syntax_pos = { path = "../libsyntax_pos" }
rustc_errors = { path = "../librustc_errors" }
impl<'a, 'b> Folder for PlaceholderExpander<'a, 'b> {
fn fold_item(&mut self, item: P<ast::Item>) -> SmallVector<P<ast::Item>> {
match item.node {
- ast::ItemKind::Mac(ref mac) if !mac.node.path.segments.is_empty() => {}
ast::ItemKind::Mac(_) => return self.remove(item.id).make_items(),
+ ast::ItemKind::MacroDef(_) => return SmallVector::one(item),
_ => {}
}
};
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), false);
p.root_module_name = cx.current_expansion.module.mod_path.last()
- .map(|id| (*id.name.as_str()).to_owned());
+ .map(|id| id.name.as_str().to_string());
p.check_unknown_macro_variable();
// Let the context choose how to interpret the result.
impl GatedCfg {
pub fn gate(cfg: &ast::MetaItem) -> Option<GatedCfg> {
- let name = &*cfg.name().as_str();
+ let name = cfg.name().as_str();
GATED_CFGS.iter()
.position(|info| info.0 == name)
.map(|idx| {
impl<'a> Context<'a> {
fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) {
debug!("check_attribute(attr = {:?})", attr);
- let name = unwrap_or!(attr.name(), return);
-
+ let name = unwrap_or!(attr.name(), return).as_str();
for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES {
if name == n {
if let &Gated(_, ref name, ref desc, ref has_feature) = gateage {
return;
}
}
- if name.as_str().starts_with("rustc_") {
+ if name.starts_with("rustc_") {
gate_feature!(self, rustc_attrs, attr.span,
"unless otherwise specified, attributes \
with the prefix `rustc_` \
are reserved for internal compiler diagnostics");
- } else if name.as_str().starts_with("derive_") {
+ } else if name.starts_with("derive_") {
gate_feature!(self, custom_derive, attr.span, EXPLAIN_DERIVE_UNDERSCORE);
} else if !attr::is_known(attr) {
// Only run the custom attribute lint during regular
fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) {
if let Some(path) = attr::first_attr_value_str_by_name(attrs, "path") {
- self.directory.path.push(&*path.as_str());
+ self.directory.path.push(&path.as_str());
self.directory.ownership = DirectoryOwnership::Owned;
} else {
- self.directory.path.push(&*id.name.as_str());
+ self.directory.path.push(&id.name.as_str());
}
}
pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option<PathBuf> {
- attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d.as_str()))
+ attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&d.as_str()))
}
/// Returns either a path to a module, or .
}
}
-impl<'a> PartialEq<&'a str> for Symbol {
- fn eq(&self, other: &&str) -> bool {
- *self.as_str() == **other
+impl<T: ::std::ops::Deref<Target=str>> PartialEq<T> for Symbol {
+ fn eq(&self, other: &T) -> bool {
+ self.as_str() == other.deref()
}
}
/// destroyed. In particular, they must not access string contents. This can
/// be fixed in the future by just leaking all strings until thread death
/// somehow.
-#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)]
+#[derive(Clone, Hash, PartialOrd, Eq, Ord)]
pub struct InternedString {
string: &'static str,
}
+impl<U: ?Sized> ::std::convert::AsRef<U> for InternedString where str: ::std::convert::AsRef<U> {
+ fn as_ref(&self) -> &U {
+ self.string.as_ref()
+ }
+}
+
+impl<T: ::std::ops::Deref<Target = str>> ::std::cmp::PartialEq<T> for InternedString {
+ fn eq(&self, other: &T) -> bool {
+ self.string == other.deref()
+ }
+}
+
+impl ::std::cmp::PartialEq<InternedString> for str {
+ fn eq(&self, other: &InternedString) -> bool {
+ self == other.string
+ }
+}
+
+impl<'a> ::std::cmp::PartialEq<InternedString> for &'a str {
+ fn eq(&self, other: &InternedString) -> bool {
+ *self == other.string
+ }
+}
+
+impl ::std::cmp::PartialEq<InternedString> for String {
+ fn eq(&self, other: &InternedString) -> bool {
+ self == other.string
+ }
+}
+
+impl<'a> ::std::cmp::PartialEq<InternedString> for &'a String {
+ fn eq(&self, other: &InternedString) -> bool {
+ *self == other.string
+ }
+}
+
impl !Send for InternedString { }
impl ::std::ops::Deref for InternedString {
fn is_test_crate(krate: &ast::Crate) -> bool {
match attr::find_crate_name(&krate.attrs) {
- Some(s) if "test" == &*s.as_str() => true,
+ Some(s) if "test" == s.as_str() => true,
_ => false
}
}
[dependencies]
fmt_macros = { path = "../libfmt_macros" }
-log = { path = "../liblog" }
+log = "0.3"
proc_macro = { path = "../libproc_macro" }
rustc_errors = { path = "../librustc_errors" }
syntax = { path = "../libsyntax" }
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:two_macros.rs
+
+#![feature(use_extern_macros)]
+
+mod foo {
+ extern crate two_macros;
+ pub use self::two_macros::m as panic;
+}
+
+mod m1 {
+ use foo::panic; // ok
+ fn f() { panic!(); }
+}
+
+mod m2 {
+ use foo::*; //~ NOTE `panic` could refer to the name imported here
+ fn f() { panic!(); } //~ ERROR ambiguous
+ //~| NOTE `panic` is also a builtin macro
+ //~| NOTE consider adding an explicit import of `panic` to disambiguate
+}
+
+mod m3 {
+ ::two_macros::m!(use foo::panic;); //~ NOTE `panic` could refer to the name imported here
+ //~| NOTE in this expansion
+ fn f() { panic!(); } //~ ERROR ambiguous
+ //~| NOTE `panic` is also a builtin macro
+ //~| NOTE macro-expanded macro imports do not shadow
+}
+
+mod m4 {
+ macro_rules! panic { () => {} } // ok
+ panic!();
+}
+
+mod m5 {
+ macro_rules! m { () => {
+ macro_rules! panic { () => {} } //~ ERROR `panic` is already in scope
+ //~| NOTE macro-expanded `macro_rules!`s may not shadow existing macros
+ } }
+ m!(); //~ NOTE in this expansion
+ //~| NOTE in this expansion
+ panic!();
+}
+
+#[macro_use(n)] //~ NOTE `n` could also refer to the name imported here
+extern crate two_macros;
+mod bar {
+ pub use two_macros::m as n;
+}
+
+mod m6 {
+ use bar::n; // ok
+ n!();
+}
+
+mod m7 {
+ use bar::*; //~ NOTE `n` could refer to the name imported here
+ n!(); //~ ERROR ambiguous
+ //~| NOTE consider adding an explicit import of `n` to disambiguate
+}
+
+fn main() {}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ [0; ..10];
+ //~^ ERROR mismatched types
+ //~| expected type `usize`
+ //~| found type `std::ops::RangeTo<{integer}>`
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn f<'a: 'static>(_: &'a i32) {} //~WARN unnecessary lifetime parameter `'a`
+
+fn main() {
+ let x = 0;
+ f(&x); //~ERROR does not live long enough
+}
+++ /dev/null
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![feature(rustc_private)]
-
-#[macro_use] extern crate log;
-
-pub fn foo<T>() {
- fn death() -> isize { panic!() }
- debug!("{}", (||{ death() })());
-}
+++ /dev/null
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags: -C debug-assertions=no
-// exec-env:RUST_LOG=conditional-debug-macro-off=4
-
-
-#![feature(rustc_private)]
-
-#[macro_use]
-extern crate log;
-
-pub fn main() {
- // only panics if println! evaluates its argument.
- debug!("{:?}", { if true { panic!() } });
-}
+++ /dev/null
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// compile-flags:-C debug-assertions=no
-// exec-env:RUST_LOG=logging-enabled-debug=debug
-
-
-#![feature(rustc_private)]
-
-#[macro_use]
-extern crate log;
-
-pub fn main() {
- if log_enabled!(log::DEBUG) {
- panic!("what?! debugging?");
- }
-}
+++ /dev/null
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// exec-env:RUST_LOG=logging_enabled=info
-// ignore-emscripten: FIXME(#31622)
-
-
-#![feature(rustc_private)]
-
-#[macro_use]
-extern crate log;
-
-pub fn main() {
- if log_enabled!(log::DEBUG) {
- panic!("what?! debugging?");
- }
- if !log_enabled!(log::INFO) {
- panic!("what?! no info?");
- }
-}
+++ /dev/null
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// aux-build:logging_right_crate.rs
-// exec-env:RUST_LOG=logging-right-crate=debug
-
-// This is a test for issue #3046 to make sure that when we monomorphize a
-// function from one crate to another the right top-level logging name is
-// preserved.
-//
-// It used to be the case that if logging were turned on for this crate, all
-// monomorphized functions from other crates had logging turned on (their
-// logging module names were all incorrect). This test ensures that this no
-// longer happens by enabling logging for *this* crate and then invoking a
-// function in an external crate which will panic when logging is enabled.
-
-// pretty-expanded FIXME #23616
-
-extern crate logging_right_crate;
-
-pub fn main() {
- // this function panicks if logging is turned on
- logging_right_crate::foo::<isize>();
-}
+++ /dev/null
-// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// ignore-windows
-// exec-env:RUST_LOG=debug
-// compile-flags:-C debug-assertions=y
-// ignore-emscripten: FIXME(#31622)
-
-#![feature(rustc_private)]
-
-#[macro_use]
-extern crate log;
-
-use std::process::Command;
-use std::env;
-use std::str;
-
-fn main() {
- let args: Vec<String> = env::args().collect();
- if args.len() > 1 && args[1] == "child" {
- debug!("foo");
- debug!("bar");
- return
- }
-
- let p = Command::new(&args[0])
- .arg("child")
- .output().unwrap();
- assert!(p.status.success());
- let mut lines = str::from_utf8(&p.stderr).unwrap().lines();
- assert!(lines.next().unwrap().contains("foo"));
- assert!(lines.next().unwrap().contains("bar"));
-}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// exec-env:RUST_LOG=rust_log_filter/foo
-// ignore-emscripten no threads support
-
-#![allow(unknown_features)]
-#![feature(box_syntax, std_misc, rustc_private)]
-
-#[macro_use]
-extern crate log;
-
-use std::sync::mpsc::{channel, Sender, Receiver};
-use std::thread;
-
-pub struct ChannelLogger {
- tx: Sender<String>
-}
-
-impl ChannelLogger {
- pub fn new() -> (Box<ChannelLogger>, Receiver<String>) {
- let (tx, rx) = channel();
- (box ChannelLogger { tx: tx }, rx)
- }
-}
-
-impl log::Logger for ChannelLogger {
- fn log(&mut self, record: &log::LogRecord) {
- self.tx.send(format!("{}", record.args)).unwrap();
- }
-}
-
-pub fn main() {
- let (logger, rx) = ChannelLogger::new();
-
- let t = thread::spawn(move|| {
- log::set_logger(logger);
-
- info!("foo");
- info!("bar");
- info!("foo bar");
- info!("bar foo");
- });
-
- assert_eq!(rx.recv().unwrap(), "foo");
- assert_eq!(rx.recv().unwrap(), "foo bar");
- assert_eq!(rx.recv().unwrap(), "bar foo");
- assert!(rx.recv().is_err());
-
- t.join();
-}
--- /dev/null
+// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(rustc_private)]
+
+extern crate rustc_back;
+
+use std::fs::File;
+use std::io::{Read, Write};
+
+use rustc_back::tempdir::TempDir;
+
+#[cfg(unix)]
+fn switch_stdout_to(file: File) {
+ use std::os::unix::prelude::*;
+
+ extern {
+ fn dup2(old: i32, new: i32) -> i32;
+ }
+
+ unsafe {
+ assert_eq!(dup2(file.as_raw_fd(), 1), 1);
+ }
+}
+
+#[cfg(windows)]
+fn switch_stdout_to(file: File) {
+ use std::os::windows::prelude::*;
+
+ extern "system" {
+ fn SetStdHandle(nStdHandle: u32, handle: *mut u8) -> i32;
+ }
+
+ const STD_OUTPUT_HANDLE: u32 = (-11i32) as u32;
+
+ unsafe {
+ let rc = SetStdHandle(STD_OUTPUT_HANDLE,
+ file.into_raw_handle() as *mut _);
+ assert!(rc != 0);
+ }
+}
+
+fn main() {
+ let td = TempDir::new("foo").unwrap();
+ let path = td.path().join("bar");
+ let f = File::create(&path).unwrap();
+
+ println!("foo");
+ std::io::stdout().flush().unwrap();
+ switch_stdout_to(f);
+ println!("bar");
+ std::io::stdout().flush().unwrap();
+
+ let mut contents = String::new();
+ File::open(&path).unwrap().read_to_string(&mut contents).unwrap();
+ assert_eq!(contents, "bar\n");
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// exec-env:RUST_LOG=conditional-debug-macro-on=4
-
pub fn main() {
// exits early if println! evaluates its arguments, otherwise it
// will hit the panic.
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+macro_rules! m {
+ ($e:expr) => {
+ macro_rules! n { () => { $e } }
+ }
+}
+
+fn main() {
+ m!(foo!());
+}
[dependencies]
log = "0.3"
-env_logger = { version = "0.3.5", default-features = false }
+env_logger = { version = "0.4", default-features = false }
rustc-serialize = "0.3"
filetime = "0.1"