# "alternate" deployments, these are "nightlies" but have LLVM assertions
# turned on, they're deployed to a different location primarily for
# additional testing.
- - env: IMAGE=dist-x86_64-linux DEPLOY_ALT=1
+ - env: IMAGE=dist-x86_64-linux DEPLOY_ALT=1 CI_JOB_NAME=dist-x86_64-linux-alt
if: branch = try OR branch = auto
- env: >
MACOSX_DEPLOYMENT_TARGET=10.7
NO_LLVM_ASSERTIONS=1
NO_DEBUG_ASSERTIONS=1
+ CI_JOB_NAME=dist-x86_64-apple-alt
os: osx
osx_image: xcode9.3-moar
if: branch = auto
MACOSX_STD_DEPLOYMENT_TARGET=10.7
NO_LLVM_ASSERTIONS=1
NO_DEBUG_ASSERTIONS=1
+ CI_JOB_NAME=x86_64-apple
os: osx
osx_image: xcode9.3-moar
if: branch = auto
MACOSX_STD_DEPLOYMENT_TARGET=10.7
NO_LLVM_ASSERTIONS=1
NO_DEBUG_ASSERTIONS=1
+ CI_JOB_NAME=i686-apple
os: osx
osx_image: xcode9.3-moar
if: branch = auto
MACOSX_DEPLOYMENT_TARGET=10.7
NO_LLVM_ASSERTIONS=1
NO_DEBUG_ASSERTIONS=1
+ CI_JOB_NAME=dist-i686-apple
os: osx
osx_image: xcode9.3-moar
if: branch = auto
MACOSX_DEPLOYMENT_TARGET=10.7
NO_LLVM_ASSERTIONS=1
NO_DEBUG_ASSERTIONS=1
+ CI_JOB_NAME=dist-x86_64-apple
os: osx
osx_image: xcode9.3-moar
if: branch = auto
if: branch = auto
- env: IMAGE=x86_64-gnu-distcheck
if: branch = auto
- - env: IMAGE=x86_64-gnu-incremental
- if: branch = auto
- stage: publish toolstate
if: branch = master AND type = push
- MSYS_BITS: 64
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-profiler
SCRIPT: python x.py test
+ CI_JOB_NAME: x86_64-msvc
- MSYS_BITS: 32
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc
SCRIPT: make appveyor-subset-1
+ CI_JOB_NAME: i686-msvc-1
- MSYS_BITS: 32
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc
SCRIPT: make appveyor-subset-2
+ CI_JOB_NAME: i686-msvc-2
# MSVC aux tests
- MSYS_BITS: 64
RUST_CHECK_TARGET: check-aux EXCLUDE_CARGO=1
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc
+ CI_JOB_NAME: x86_64-msvc-aux
- MSYS_BITS: 64
SCRIPT: python x.py test src/tools/cargotest src/tools/cargo
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc
+ CI_JOB_NAME: x86_64-msvc-cargo
# MSVC tools tests
- MSYS_BITS: 64
SCRIPT: src/ci/docker/x86_64-gnu-tools/checktools.sh x.py /tmp/toolstates.json windows
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --save-toolstates=/tmp/toolstates.json --enable-test-miri
+ CI_JOB_NAME: x86_64-msvc-tools
# 32/64-bit MinGW builds.
#
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
MINGW_DIR: mingw32
+ CI_JOB_NAME: i686-mingw-1
- MSYS_BITS: 32
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu
SCRIPT: make appveyor-subset-2
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
MINGW_DIR: mingw32
+ CI_JOB_NAME: i686-mingw-2
- MSYS_BITS: 64
SCRIPT: python x.py test
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
MINGW_DIR: mingw64
+ CI_JOB_NAME: x86_64-mingw
# 32/64 bit MSVC and GNU deployment
- RUST_CONFIGURE_ARGS: >
--enable-profiler
SCRIPT: python x.py dist
DEPLOY: 1
+ CI_JOB_NAME: dist-x86_64-msvc
- RUST_CONFIGURE_ARGS: >
--build=i686-pc-windows-msvc
--target=i586-pc-windows-msvc
--enable-profiler
SCRIPT: python x.py dist
DEPLOY: 1
+ CI_JOB_NAME: dist-i686-msvc
- MSYS_BITS: 32
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --enable-full-tools
SCRIPT: python x.py dist
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
MINGW_DIR: mingw32
DEPLOY: 1
+ CI_JOB_NAME: dist-i686-mingw
- MSYS_BITS: 64
SCRIPT: python x.py dist
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu --enable-full-tools
MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
MINGW_DIR: mingw64
DEPLOY: 1
+ CI_JOB_NAME: dist-x86_64-mingw
# "alternate" deployment, see .travis.yml for more info
- MSYS_BITS: 64
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-extended --enable-profiler
SCRIPT: python x.py dist
DEPLOY_ALT: 1
+ CI_JOB_NAME: dist-x86_64-msvc-alt
matrix:
fast_finish: true
# rustc to execute.
#lld = false
+# Whether to deny warnings in crates
+#deny-warnings = true
+
# =============================================================================
# Options for specific targets
#
name = "alloc"
version = "0.0.0"
dependencies = [
+ "compiler_builtins 0.0.0",
"core 0.0.0",
"rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"std_unicode 0.0.0",
"alloc_system 0.0.0",
"build_helper 0.1.0",
"cc 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.0.0",
"core 0.0.0",
"libc 0.0.0",
]
version = "0.0.0"
dependencies = [
"alloc 0.0.0",
+ "compiler_builtins 0.0.0",
"core 0.0.0",
"dlmalloc 0.0.0",
"libc 0.0.0",
]
+[[package]]
+name = "ammonia"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "html5ever 0.22.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tendril 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "ansi_term"
version = "0.11.0"
"winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "debug_unreachable"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "deglob"
version = "0.1.0"
version = "0.0.0"
dependencies = [
"alloc 0.0.0",
+ "compiler_builtins 0.0.0",
"core 0.0.0",
]
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "elasticlunr-rs"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "ena"
version = "0.9.2"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "futf"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "debug_unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "futures"
version = "0.1.20"
"winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "html5ever"
+version = "0.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "markup5ever 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "humantime"
version = "1.1.1"
name = "libc"
version = "0.0.0"
dependencies = [
+ "compiler_builtins 0.0.0",
"core 0.0.0",
]
"pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "mac"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "maplit"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
+[[package]]
+name = "markup5ever"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_json 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
+ "string_cache 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "tendril 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "matches"
version = "0.1.6"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
+ "ammonia 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"chrono 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.31.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "elasticlunr-rs 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
"error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"handlebars 0.32.0 (registry+https://github.com/rust-lang/crates.io-index)",
name = "panic_abort"
version = "0.0.0"
dependencies = [
+ "compiler_builtins 0.0.0",
"core 0.0.0",
"libc 0.0.0",
]
version = "0.0.0"
dependencies = [
"alloc 0.0.0",
+ "compiler_builtins 0.0.0",
"core 0.0.0",
"libc 0.0.0",
"unwind 0.0.0",
"ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "phf"
+version = "0.7.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "phf_codegen"
+version = "0.7.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "phf_generator"
+version = "0.7.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "phf_shared"
+version = "0.7.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "siphasher 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "pkg-config"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "precomputed-hash"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "pretty_assertions"
version = "0.5.1"
version = "0.0.0"
dependencies = [
"cc 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.0.0",
"core 0.0.0",
]
"nibble_vec 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "rand"
+version = "0.3.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "rand"
version = "0.4.2"
[[package]]
name = "rustc-ap-rustc_cratesio_shim"
-version = "73.0.0"
+version = "91.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "rustc-ap-rustc_data_structures"
-version = "73.0.0"
+version = "91.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-serialize 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-serialize 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustc-ap-rustc_errors"
-version = "73.0.0"
+version = "91.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"atty 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-rustc_data_structures 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-serialize 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-syntax_pos 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_data_structures 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-serialize 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-syntax_pos 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "rustc-ap-serialize"
-version = "73.0.0"
+version = "91.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "rustc-ap-syntax"
-version = "73.0.0"
+version = "91.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-rustc_cratesio_shim 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-rustc_data_structures 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-rustc_errors 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-serialize 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-syntax_pos 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_cratesio_shim 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_data_structures 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_errors 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-serialize 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-syntax_pos 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustc-ap-syntax_pos"
-version = "73.0.0"
+version = "91.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "rustc-ap-rustc_data_structures 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-serialize 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-rustc_data_structures 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-serialize 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"alloc_system 0.0.0",
"build_helper 0.1.0",
"cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.0.0",
"core 0.0.0",
]
"alloc_system 0.0.0",
"build_helper 0.1.0",
"cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.0.0",
"core 0.0.0",
]
"alloc_system 0.0.0",
"build_helper 0.1.0",
"cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.0.0",
"core 0.0.0",
]
"alloc_system 0.0.0",
"build_helper 0.1.0",
"cmake 0.1.29 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.0.0",
"core 0.0.0",
]
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "rustc-ap-syntax 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-ap-syntax 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "siphasher"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "smallvec"
version = "0.6.0"
name = "std_unicode"
version = "0.0.0"
dependencies = [
+ "compiler_builtins 0.0.0",
"core 0.0.0",
]
+[[package]]
+name = "string_cache"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "debug_unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)",
+ "string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "string_cache_codegen"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)",
+ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "string_cache_shared"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "strsim"
version = "0.7.0"
"winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "tendril"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "futf 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "mac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "utf-8 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "term"
version = "0.0.0"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "unreachable"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "unreachable"
version = "1.0.0"
name = "unwind"
version = "0.0.0"
dependencies = [
+ "compiler_builtins 0.0.0",
"core 0.0.0",
"libc 0.0.0",
]
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "utf-8"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
[[package]]
name = "utf8-ranges"
version = "1.0.0"
[metadata]
"checksum aho-corasick 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d6531d44de723825aa81398a6415283229725a00fa30713812ab9323faa82fc4"
+"checksum ammonia 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd4c682378117e4186a492b2252b9537990e1617f44aed9788b9a1149de45477"
"checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
"checksum ar 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "35c7a5669cb64f085739387e1308b74e6d44022464b7f1b63bbd4ceb6379ec31"
"checksum arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)" = "a1e964f9e24d588183fcb43503abda40d288c8657dfc27311516ce2f05675aef"
"checksum crypto-hash 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "09de9ee0fc255ace04c7fa0763c9395a945c37c8292bb554f8d48361d1dcf1b4"
"checksum curl 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b70fd6394677d3c0e239ff4be6f2b3176e171ffd1c23ffdc541e78dea2b8bb5e"
"checksum curl-sys 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f46e49c7125131f5afaded06944d6888b55cbdf8eba05dae73c954019b907961"
+"checksum debug_unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9a032eac705ca39214d169f83e3d3da290af06d8d1d344d1baad2fd002dca4b3"
"checksum derive-new 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6fcb923bab47a948f1b01cec2f758fdebba95c9ebc255458654b2b88efe59d71"
"checksum diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3c2b69f912779fbb121ceb775d74d51e915af17aaebc38d28a592843a2dd0a3a"
"checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
"checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab"
"checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0"
+"checksum elasticlunr-rs 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "19ab5f8db0ffb76b5d87454566ceb502c3650e29057c053f93e884d3b884e344"
"checksum ena 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f8b449f3b18c89d2dbe40548d2ee4fa58ea0a08b761992da6ecb9788e4688834"
"checksum endian-type 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d"
"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180"
"checksum fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213"
"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
+"checksum futf 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "51f93f3de6ba1794dcd5810b3546d004600a59a98266487c8407bc4b24e398f3"
"checksum futures 0.1.20 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5a3176836efa0b37f0e321b86672dfada1564aeb516fbed67b7c24050a0263"
"checksum getopts 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "b900c08c1939860ce8b54dc6a89e26e00c04c380fd0e09796799bd7f12861e05"
"checksum git2 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f41c0035c37ec11ed3f1e1946a76070b0c740393687e9a9c7612f6a709036b3"
"checksum handlebars 0.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "07af2ff31f66f39a5c8b8b8a5dc02734a453110146763e3a2323f4931a915a76"
"checksum hex 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "459d3cf58137bb02ad4adeef5036377ff59f066dbb82517b7192e3a5462a2abc"
"checksum home 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "8f94f6fbdc000a6eba0c8cf08632b2091bb59141d36ac321a2a96d6365e5e4dc"
+"checksum html5ever 0.22.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e579ac8647178ab915d400d7d22938bda5cd351c6c62e1c294d56884ccfc75fe"
"checksum humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e"
"checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"
"checksum if_chain 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "61bb90bdd39e3af69b0172dfc6130f6cd6332bf040fbb9bdd4401d37adbd48b8"
"checksum log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "89f010e843f2b1a31dbd316b3b8d443758bc634bed37aabade59c686d644e0a2"
"checksum log_settings 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3d382732ea0fbc09790c4899db3255bdea0fc78b54bf234bd18a63bb603915b6"
"checksum lzma-sys 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c1b93b78f89e8737dac81837fc8f5521ac162abcba902e1a3db949d55346d1da"
+"checksum mac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
+"checksum maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "08cbb6b4fef96b6d77bfc40ec491b1690c779e77b05cd9f07f787ed376fd4c43"
+"checksum markup5ever 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bfedc97d5a503e96816d10fedcd5b42f760b2e525ce2f7ec71f6a41780548475"
"checksum matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "100aabe6b8ff4e4a7e32c1c13523379802df0772b82466207ac25b013f193376"
"checksum mdbook 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "326d0861da5681a13c19a00952a56c254dd04f00eb944e506fdb36e93ae6f1ca"
"checksum memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "796fba70e76612589ed2ce7f45282f5af869e0fdd7cc6199fa1aa1f1d591ba9d"
"checksum pest 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0fce5d8b5cc33983fc74f78ad552b5522ab41442c4ca91606e4236eb4b5ceefc"
"checksum pest_derive 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "ab94faafeb93f4c5e3ce81ca0e5a779529a602ad5d09ae6d21996bfb8b6a52bf"
"checksum petgraph 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "8b30dc85588cd02b9b76f5e386535db546d21dc68506cff2abebee0b6445e8e4"
+"checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc"
+"checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f"
+"checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03"
+"checksum phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "07e24b0ca9643bdecd0632f2b3da6b1b89bbb0030e0b992afc1113b23a7bc2f2"
"checksum pkg-config 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)" = "3a8b4c6b8165cd1a1cd4b9b120978131389f64bdaf456435caa41e630edba903"
+"checksum precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
"checksum pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3a029430f0d744bc3d15dd474d591bed2402b645d024583082b9f63bb936dac6"
"checksum proc-macro2 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cd07deb3c6d1d9ff827999c7f9b04cdfd66b1b17ae508e14fe47b620f2282ae0"
"checksum proc-macro2 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "388d7ea47318c5ccdeb9ba6312cee7d3f65dd2804be8580a170fce410d50b786"
"checksum quote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7b0ff51282f28dc1b53fd154298feaa2e77c5ea0dba68e1fd8b03b72fbe13d2a"
"checksum racer 2.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "40d44bc30fc8d403b665286b2c9a83466ddbf69297668fb02b785c3e58eb8e0d"
"checksum radix_trie 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "03d0d770481e8af620ca61d3d304bf014f965d7f78e923dc58545e6a545070a9"
+"checksum rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "15a732abf9d20f0ad8eeb6f909bf6868722d9a06e1e50802b6a70351f40b4eb1"
"checksum rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "eba5f8cb59cc50ed56be8880a5c7b496bfd9bd26394e176bc67884094145c2c5"
"checksum rayon 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80e811e76f1dbf68abf87a759083d34600017fc4e10b6bd5ad84a700f9dba4b1"
"checksum rayon-core 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d24ad214285a7729b174ed6d3bcfcb80177807f959d95fafd5bfc5c4f201ac8"
"checksum rls-rustc 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "885f66b92757420572cbb02e033d4a9558c7413ca9b7ac206f28fd58ffdb44ea"
"checksum rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d7c7046dc6a92f2ae02ed302746db4382e75131b9ce20ce967259f6b5867a6a"
"checksum rls-vfs 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "be231e1e559c315bc60ced5ad2cc2d7a9c208ed7d4e2c126500149836fda19bb"
-"checksum rustc-ap-rustc_cratesio_shim 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "421262e22426c06306e46057a75048f883dbc43886f78dbe1e750397a9c9b8e6"
-"checksum rustc-ap-rustc_data_structures 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8460c1207f9abb48a9720aee8be418bcfac018b6eee7b740b98a410e7799d24a"
-"checksum rustc-ap-rustc_errors 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ad2077469162e52fcd84543334e18632088b9e342fe54e3b78c37d7077d09714"
-"checksum rustc-ap-serialize 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "69943901ae255dca5f63faeae2ff08b402d34a56d1eb50d34fbff6e83e6ace60"
-"checksum rustc-ap-syntax 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1a44363359a43df753e26a4d4fef72720af183de635ebae8699686cb5d5de813"
-"checksum rustc-ap-syntax_pos 73.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "413f464657e8d5f3864de308dba1867526f21a44809b6f338b34e8c0caf88fb0"
+"checksum rustc-ap-rustc_cratesio_shim 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0dd7571780b3232786f538b4e72f4a8d7fcffbb4a951d3861e18142d3cf2f0ac"
+"checksum rustc-ap-rustc_data_structures 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ae9ebbcbe26ea53eb0f3162c109892cd69ebb5efc986f3a21bce4891adf628f"
+"checksum rustc-ap-rustc_errors 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7c8385e5cf62344a4c6b2446723da0a82dad7ec97b2988b6494a197f231fc4b9"
+"checksum rustc-ap-serialize 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d08a7e3ce1d87fda88fdf51bdfec5886f42bfd93ce7fcf1d69fcd0a23d1ab4ea"
+"checksum rustc-ap-syntax 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "06b7a6da9b99e9a2e31f9325216dc5d477eb5d9bd88c7bb05b5e97e88d06d675"
+"checksum rustc-ap-syntax_pos 91.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "582d30a1308f6598b3636bc244efacd8551c825ed6be2aa594257fbf772d1161"
"checksum rustc-demangle 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "11fb43a206a04116ffd7cfcf9bcb941f8eb6cc7ff667272246b0a1c74259a3cb"
"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
"checksum same-file 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "cfb6eded0b06a0b512c8ddbcf04089138c9b4362c2f696f3c3d76039d68f3637"
"checksum serde_json 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "5c508584d9913df116b91505eec55610a2f5b16e9ed793c46e4d0152872b3e74"
"checksum shell-escape 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "170a13e64f2a51b77a45702ba77287f5c6829375b04a69cf2222acd17d0cfab9"
"checksum shlex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2"
+"checksum siphasher 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0df90a788073e8d0235a67e50441d47db7c8ad9debd91cbf43736a2a92d36537"
"checksum smallvec 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "44db0ecb22921ef790d17ae13a3f6d15784183ff5f2a01aa32098c7498d2b4b9"
"checksum socket2 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "71ebbe82fcdd697244ba7fe6e05e63b5c45910c3927e28469a04947494ff48d8"
"checksum stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15132e0e364248108c5e2c02e3ab539be8d6f5d52a01ca9bbf27ed657316f02b"
+"checksum string_cache 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "39cb4173bcbd1319da31faa5468a7e3870683d7a237150b0b0aaafd546f6ad12"
+"checksum string_cache_codegen 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "479cde50c3539481f33906a387f2bd17c8e87cb848c35b6021d41fb81ff9b4d7"
+"checksum string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc"
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad"
"checksum syn 0.12.15 (registry+https://github.com/rust-lang/crates.io-index)" = "c97c05b8ebc34ddd6b967994d5c6e9852fa92f8b82b3858c39451f97346dcce5"
"checksum tar 0.4.14 (registry+https://github.com/rust-lang/crates.io-index)" = "1605d3388ceb50252952ffebab4b5dc43017ead7e4481b175961c283bb951195"
"checksum tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8"
"checksum tempfile 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "439d9a7c00f98b1b5ee730039bf5b1f9203d508690e3c76b509e7ad59f8f7c99"
+"checksum tendril 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9de21546595a0873061940d994bbbc5c35f024ae4fd61ec5c5b159115684f508"
"checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1"
"checksum term 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5e6b677dd1e8214ea1ef4297f85dbcbed8e8cdddb561040cc998ca2551c37561"
"checksum termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "adc4587ead41bf016f11af03e55a624c06568b5a19db4e90fde573d805074f83"
"checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb"
"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
+"checksum unreachable 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1f2ae5ddb18e1c92664717616dd9549dde73f539f01bd7b77c2edb2446bdff91"
"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
"checksum url 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f808aadd8cfec6ef90e4a14eb46f24511824d1ac596b9682703c87056c8678b7"
"checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea"
"checksum userenv-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "71d28ea36bbd9192d75bd9fa9b39f96ddb986eaee824adae5d53b6e51919b2f3"
+"checksum utf-8 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f1262dfab4c30d5cb7c07026be00ee343a6cf5027fdc0104a9160f354e5db75c"
"checksum utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "662fab6525a98beff2921d7f61a39e7d59e0b425ebc7d0d9e66d316e55124122"
"checksum vcpkg 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9e0a7d8bed3178a8fb112199d466eeca9ed09a14ba8ad67718179b4fd5487d0b"
"checksum vec_map 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "887b5b631c2ad01628bbbaa7dd4c869f80d3186688f8d0b6f58774fbe324988c"
cmd.arg("--color=always");
}
+ if env::var_os("RUSTC_DENY_WARNINGS").is_some() {
+ cmd.arg("-Dwarnings");
+ }
+
if verbose > 1 {
eprintln!("rustc command: {:?}", cmd);
eprintln!("sysroot: {:?}", sysroot);
if 'dev' in data:
build.set_dev_environment()
- # No help text depends on submodules. This check saves ~1 minute of git commands, even if
- # all the submodules are present and downloaded!
- if not help_triggered:
- build.update_submodules()
+ build.update_submodules()
# Fetch/build the bootstrap
build.build = args.build or build.build_triple()
cargo.env("RUSTC_VERBOSE", format!("{}", self.verbosity));
+ // in std, we want to avoid denying warnings for stage 0 as that makes cfg's painful.
+ if self.config.deny_warnings && !(mode == Mode::Libstd && stage == 0) {
+ cargo.env("RUSTC_DENY_WARNINGS", "1");
+ }
+
// Throughout the build Cargo can execute a number of build scripts
// compiling C/C++ code and we need to pass compilers, archivers, flags, etc
// obtained previously to those build scripts.
compiler: &Compiler,
target: Interned<String>,
cargo: &mut Command) {
- let mut features = build.std_features();
-
if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
}
- // When doing a local rebuild we tell cargo that we're stage1 rather than
- // stage0. This works fine if the local rust and being-built rust have the
- // same view of what the default allocator is, but fails otherwise. Since
- // we don't have a way to express an allocator preference yet, work
- // around the issue in the case of a local rebuild with jemalloc disabled.
- if compiler.stage == 0 && build.local_rebuild && !build.config.use_jemalloc {
- features.push_str(" force_alloc_system");
- }
+ if build.no_std(target) == Some(true) {
+ // for no-std targets we only compile a few no_std crates
+ cargo.arg("--features").arg("c mem")
+ .args(&["-p", "alloc"])
+ .args(&["-p", "compiler_builtins"])
+ .args(&["-p", "std_unicode"])
+ .arg("--manifest-path")
+ .arg(build.src.join("src/rustc/compiler_builtins_shim/Cargo.toml"));
+ } else {
+ let mut features = build.std_features();
+
+ // When doing a local rebuild we tell cargo that we're stage1 rather than
+ // stage0. This works fine if the local rust and being-built rust have the
+ // same view of what the default allocator is, but fails otherwise. Since
+ // we don't have a way to express an allocator preference yet, work
+ // around the issue in the case of a local rebuild with jemalloc disabled.
+ if compiler.stage == 0 && build.local_rebuild && !build.config.use_jemalloc {
+ features.push_str(" force_alloc_system");
+ }
- if compiler.stage != 0 && build.config.sanitizers {
- // This variable is used by the sanitizer runtime crates, e.g.
- // rustc_lsan, to build the sanitizer runtime from C code
- // When this variable is missing, those crates won't compile the C code,
- // so we don't set this variable during stage0 where llvm-config is
- // missing
- // We also only build the runtimes when --enable-sanitizers (or its
- // config.toml equivalent) is used
- let llvm_config = build.ensure(native::Llvm {
- target: build.config.build,
- emscripten: false,
- });
- cargo.env("LLVM_CONFIG", llvm_config);
- }
+ if compiler.stage != 0 && build.config.sanitizers {
+ // This variable is used by the sanitizer runtime crates, e.g.
+ // rustc_lsan, to build the sanitizer runtime from C code
+ // When this variable is missing, those crates won't compile the C code,
+ // so we don't set this variable during stage0 where llvm-config is
+ // missing
+ // We also only build the runtimes when --enable-sanitizers (or its
+ // config.toml equivalent) is used
+ let llvm_config = build.ensure(native::Llvm {
+ target: build.config.build,
+ emscripten: false,
+ });
+ cargo.env("LLVM_CONFIG", llvm_config);
+ }
- cargo.arg("--features").arg(features)
- .arg("--manifest-path")
- .arg(build.src.join("src/libstd/Cargo.toml"));
+ cargo.arg("--features").arg(features)
+ .arg("--manifest-path")
+ .arg(build.src.join("src/libstd/Cargo.toml"));
- if let Some(target) = build.config.target_config.get(&target) {
- if let Some(ref jemalloc) = target.jemalloc {
- cargo.env("JEMALLOC_OVERRIDE", jemalloc);
+ if let Some(target) = build.config.target_config.get(&target) {
+ if let Some(ref jemalloc) = target.jemalloc {
+ cargo.env("JEMALLOC_OVERRIDE", jemalloc);
+ }
}
- }
- if target.contains("musl") {
- if let Some(p) = build.musl_root(target) {
- cargo.env("MUSL_ROOT", p);
+ if target.contains("musl") {
+ if let Some(p) = build.musl_root(target) {
+ cargo.env("MUSL_ROOT", p);
+ }
}
}
}
pub incremental: bool,
pub dry_run: bool,
+ pub deny_warnings: bool,
+
// llvm codegen options
pub llvm_enabled: bool,
pub llvm_assertions: bool,
pub crt_static: Option<bool>,
pub musl_root: Option<PathBuf>,
pub qemu_rootfs: Option<PathBuf>,
+ pub no_std: bool,
}
/// Structure of the `config.toml` file that configuration is read from.
codegen_backends_dir: Option<String>,
wasm_syscall: Option<bool>,
lld: Option<bool>,
+ deny_warnings: Option<bool>,
}
/// TOML representation of how each build target is configured.
config.test_miri = false;
config.rust_codegen_backends = vec![INTERNER.intern_str("llvm")];
config.rust_codegen_backends_dir = "codegen-backends".to_owned();
+ config.deny_warnings = true;
// set by bootstrap.py
config.src = env::var_os("SRC").map(PathBuf::from).expect("'SRC' to be set");
config.incremental = flags.incremental;
config.dry_run = flags.dry_run;
config.keep_stage = flags.keep_stage;
+ if let Some(value) = flags.warnings {
+ config.deny_warnings = value;
+ }
if config.dry_run {
let dir = config.out.join("tmp-dry-run");
config.rustc_default_linker = rust.default_linker.clone();
config.musl_root = rust.musl_root.clone().map(PathBuf::from);
config.save_toolstates = rust.save_toolstates.clone().map(PathBuf::from);
+ set(&mut config.deny_warnings, rust.deny_warnings.or(flags.warnings));
if let Some(ref backends) = rust.codegen_backends {
config.rust_codegen_backends = backends.iter()
// default values for all options that we haven't otherwise stored yet.
set(&mut config.initial_rustc, build.rustc.map(PathBuf::from));
- set(&mut config.initial_rustc, build.cargo.map(PathBuf::from));
+ set(&mut config.initial_cargo, build.cargo.map(PathBuf::from));
let default = false;
config.llvm_assertions = llvm_assertions.unwrap_or(default);
o("full-bootstrap", "build.full-bootstrap", "build three compilers instead of two")
o("extended", "build.extended", "build an extended rust tool set")
-v("tools", "build.tools", "List of extended tools will be installed")
+v("tools", None, "List of extended tools will be installed")
v("build", "build.build", "GNUs ./configure syntax LLVM build triple")
v("host", None, "GNUs ./configure syntax LLVM host triples")
v("target", None, "GNUs ./configure syntax LLVM target triples")
set('target.{}.llvm-config'.format(build()), value + '/bin/llvm-config')
elif option.name == 'jemalloc-root':
set('target.{}.jemalloc'.format(build()), value + '/libjemalloc_pic.a')
+ elif option.name == 'tools':
+ set('build.tools', value.split(','))
elif option.name == 'host':
set('build.host', value.split(','))
elif option.name == 'target':
if build.hosts.iter().any(|t| t == target) {
builder.ensure(compile::Rustc { compiler, target });
} else {
- builder.ensure(compile::Test { compiler, target });
+ if build.no_std(target) == Some(true) {
+ // the `test` doesn't compile for no-std targets
+ builder.ensure(compile::Std { compiler, target });
+ } else {
+ builder.ensure(compile::Test { compiler, target });
+ }
}
let image = tmpdir(build).join(format!("{}-{}-image", name, target));
t!(symlink_dir_force(&builder.config, &out, &out_dir));
let mut cargo = builder.cargo(compiler, Mode::Librustc, target, "doc");
+ cargo.env("RUSTDOCFLAGS", "--document-private-items");
compile::rustc_cargo(build, &mut cargo);
// Only include compiler crates, no dependencies of those, such as `libc`.
pub exclude: Vec<PathBuf>,
pub rustc_error_format: Option<String>,
pub dry_run: bool,
+
+ // true => deny
+ pub warnings: Option<bool>,
}
pub enum Subcommand {
opts.optopt("", "src", "path to the root of the rust checkout", "DIR");
opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS");
opts.optflag("h", "help", "print this help message");
+ opts.optopt("", "warnings", "if value is deny, will deny warnings, otherwise use default",
+ "VALUE");
opts.optopt("", "error-format", "rustc error format", "FORMAT");
// fn usage()
};
- let mut stage = matches.opt_str("stage").map(|j| j.parse().unwrap());
-
- if matches.opt_present("incremental") && stage.is_none() {
- stage = Some(1);
- }
-
Flags {
verbose: matches.opt_count("verbose"),
- stage,
+ stage: matches.opt_str("stage").map(|j| j.parse().unwrap()),
dry_run: matches.opt_present("dry-run"),
on_fail: matches.opt_str("on-fail"),
rustc_error_format: matches.opt_str("error-format"),
incremental: matches.opt_present("incremental"),
exclude: split(matches.opt_strs("exclude"))
.into_iter().map(|p| p.into()).collect::<Vec<_>>(),
+ warnings: matches.opt_str("warnings").map(|v| v == "deny"),
}
}
}
let bindir_default = PathBuf::from("bin");
let libdir_default = PathBuf::from("lib");
let mandir_default = datadir_default.join("man");
- let prefix = build.config.prefix.as_ref().unwrap_or(&prefix_default);
+ let prefix = build.config.prefix.as_ref().map_or(prefix_default, |p| {
+ fs::canonicalize(p).expect(&format!("could not canonicalize {}", p.display()))
+ });
let sysconfdir = build.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default);
let datadir = build.config.datadir.as_ref().unwrap_or(&datadir_default);
let docdir = build.config.docdir.as_ref().unwrap_or(&docdir_default);
//! also check out the `src/bootstrap/README.md` file for more information.
#![deny(warnings)]
-#![feature(conservative_impl_trait, fs_read_write, core_intrinsics)]
-#![feature(slice_concat_ext)]
+#![feature(core_intrinsics)]
#[macro_use]
extern crate build_helper;
.map(|p| &**p)
}
+ /// Returns true if this is a no-std `target`, if defined
+ fn no_std(&self, target: Interned<String>) -> Option<bool> {
+ self.config.target_config.get(&target)
+ .map(|t| t.no_std)
+ }
+
/// Returns whether the target will be tested using the `remote-test-client`
/// and `remote-test-server` binaries.
fn remote_tested(&self, target: Interned<String>) -> bool {
fn read(&self, path: &Path) -> String {
if self.config.dry_run { return String::new(); }
- t!(fs::read_string(path))
+ t!(fs::read_to_string(path))
}
fn create_dir(&self, dir: &Path) {
panic!("the iOS target is only supported on macOS");
}
+ if target.contains("-none-") {
+ if build.no_std(*target).is_none() {
+ let target = build.config.target_config.entry(target.clone())
+ .or_insert(Default::default());
+
+ target.no_std = true;
+ }
+
+ if build.no_std(*target) == Some(false) {
+ panic!("All the *-none-* targets are no-std targets")
+ }
+ }
+
// Make sure musl-root is valid
if target.contains("musl") {
// If this is a native target (host is also musl) and no musl-root is given,
use std::env;
use std::path::PathBuf;
use std::process::{Command, exit};
-use std::slice::SliceConcatExt;
use Mode;
use Compiler;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![deny(warnings)]
use std::fs::File;
use std::path::{Path, PathBuf};
bzip2 \
patch \
libssl-dev \
- pkg-config
+ pkg-config \
+ gcc-arm-none-eabi \
+ libnewlib-arm-none-eabi
WORKDIR /build
ENV TARGETS=$TARGETS,aarch64-unknown-linux-musl
ENV TARGETS=$TARGETS,sparc64-unknown-linux-gnu
ENV TARGETS=$TARGETS,x86_64-unknown-redox
+ENV TARGETS=$TARGETS,thumbv6m-none-eabi
+ENV TARGETS=$TARGETS,thumbv7m-none-eabi
+ENV TARGETS=$TARGETS,thumbv7em-none-eabi
+ENV TARGETS=$TARGETS,thumbv7em-none-eabihf
# FIXME: remove armv5te vars after https://github.com/alexcrichton/cc-rs/issues/271
# get fixed and cc update
--env TRAVIS \
--env TRAVIS_BRANCH \
--env TOOLSTATE_REPO_ACCESS_TOKEN \
+ --env CI_JOB_NAME="${CI_JOB_NAME-$IMAGE}" \
--volume "$HOME/.cargo:/cargo" \
--volume "$HOME/rustsrc:$HOME/rustsrc" \
--init \
+++ /dev/null
-FROM ubuntu:16.04
-
-RUN apt-get update && apt-get install -y --no-install-recommends \
- g++ \
- make \
- file \
- curl \
- ca-certificates \
- python2.7 \
- git \
- cmake \
- sudo \
- gdb \
- xz-utils
-
-COPY scripts/sccache.sh /scripts/
-RUN sh /scripts/sccache.sh
-
-ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu
-ENV RUSTFLAGS -Zincremental=/tmp/rust-incr-cache
-ENV RUST_CHECK_TARGET check
-ENV CARGO_INCREMENTAL 0
set -e
+if [ -n "$CI_JOB_NAME" ]; then
+ echo "[CI_JOB_NAME=$CI_JOB_NAME]"
+fi
+
if [ "$NO_CHANGE_USER" = "" ]; then
if [ "$LOCAL_USER_ID" != "" ]; then
useradd --shell /bin/bash -u $LOCAL_USER_ID -o -c "" -m user
Many of these resources take the form of "books"; we collectively call these
"The Rust Bookshelf." Some are large, some are small.
-## Learn Rust
+# Learn Rust
If you'd like to learn Rust, this is the spot for you! All of these resources
assume that you have programmed before, but not in any specific language:
-### The Rust Programming Language
+## The Rust Programming Language
Affectionately nicknamed "the book," [The Rust Programming
Language](book/index.html) will give you an overview of the language from
first principles. You'll build a few projects along the way, and by the end,
you'll have a solid grasp of the language.
-### Rust By Example
+## Rust By Example
If reading multiple hundreds of pages about a language isn't your style, then
[Rust By Example](rust-by-example/index.html) has you covered. While the book talks about code with
a lot of words, RBE shows off a bunch of code, and keeps the talking to a
minimum. It also includes exercises!
-## Use Rust
+# Use Rust
Once you've gotten familliar with the language, these resources can help you
when you're actually using it day-to-day.
-### The Standard Library
+## The Standard Library
Rust's standard library has [extensive API documentation](std/index.html),
with explanations of how to use various things, as well as example code for
accomplishing various tasks.
-### The Cargo Book
+## The Cargo Book
[The Cargo Book](cargo/index.html) is a guide to Cargo, Rust's build tool and dependency manager.
-### The Rustdoc Book
+## The Rustdoc Book
[The Rustdoc Book](rustdoc/index.html) describes our documentation tool, `rustdoc`.
-### Extended Error Listing
+## Extended Error Listing
Many of Rust's errors come with error codes, and you can request extended
diagnostics from the compiler on those errors. You can also [read them
here](error-index.html), if you prefer to read them that way.
-## Master Rust
+# Master Rust
Once you're quite familiar with the language, you may find these advanced
resources useful.
-### The Reference
+## The Reference
[The Reference](reference/index.html) is not a formal spec, but is more detailed and
comprehensive than the book.
-### The Rustonomicon
+## The Rustonomicon
[The Rustonomicon](nomicon/index.html) is your guidebook to the dark arts of unsafe
Rust. It's also sometimes called "the 'nomicon."
-### The Unstable Book
+## The Unstable Book
[The Unstable Book](unstable-book/index.html) has documentation for unstable features.
[dependencies]
core = { path = "../libcore" }
std_unicode = { path = "../libstd_unicode" }
+compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
[dev-dependencies]
rand = "0.4"
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![deny(warnings)]
-
-#![cfg_attr(stage0, feature(i128_type))]
#![feature(rand)]
#![feature(repr_simd)]
#![feature(slice_sort_by_cached_key)]
#[unstable(feature = "pin", issue = "49150")]
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<PinBox<U>> for PinBox<T> {}
+
+#[unstable(feature = "pin", issue = "49150")]
+unsafe impl<T: ?Sized> Unpin for PinBox<T> {}
test(no_crate_inject, attr(allow(unused_variables), deny(warnings))))]
#![no_std]
#![needs_allocator]
-#![deny(warnings)]
#![deny(missing_debug_implementations)]
#![cfg_attr(test, allow(deprecated))] // rand
#![feature(fmt_internals)]
#![feature(from_ref)]
#![feature(fundamental)]
-#![feature(generic_param_attrs)]
-#![cfg_attr(stage0, feature(i128_type))]
#![feature(lang_items)]
#![feature(needs_allocator)]
#![feature(nonzero)]
#![feature(exact_chunks)]
#![feature(pointer_methods)]
#![feature(inclusive_range_fields)]
+#![cfg_attr(stage0, feature(generic_param_attrs))]
#![cfg_attr(not(test), feature(fn_traits, swap_with_slice, i128))]
#![cfg_attr(test, feature(test))]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![deny(warnings)]
-
#![feature(allocator_api)]
#![feature(alloc_system)]
#![feature(attr_literals)]
#![feature(box_syntax)]
-#![cfg_attr(stage0, feature(inclusive_range_syntax))]
#![feature(const_fn)]
#![feature(drain_filter)]
#![feature(exact_size_is_empty)]
}
}
-macro_rules! impl_spec_from_elem {
+impl<T: Clone + IsZero> SpecFromElem for T {
+ #[inline]
+ fn from_elem(elem: T, n: usize) -> Vec<T> {
+ if elem.is_zero() {
+ return Vec {
+ buf: RawVec::with_capacity_zeroed(n),
+ len: n,
+ }
+ }
+ let mut v = Vec::with_capacity(n);
+ v.extend_with(n, ExtendElement(elem));
+ v
+ }
+}
+
+unsafe trait IsZero {
+ /// Whether this value is zero
+ fn is_zero(&self) -> bool;
+}
+
+macro_rules! impl_is_zero {
($t: ty, $is_zero: expr) => {
- impl SpecFromElem for $t {
+ unsafe impl IsZero for $t {
#[inline]
- fn from_elem(elem: $t, n: usize) -> Vec<$t> {
- if $is_zero(elem) {
- return Vec {
- buf: RawVec::with_capacity_zeroed(n),
- len: n,
- }
- }
- let mut v = Vec::with_capacity(n);
- v.extend_with(n, ExtendElement(elem));
- v
+ fn is_zero(&self) -> bool {
+ $is_zero(*self)
}
}
- };
+ }
}
-impl_spec_from_elem!(i8, |x| x == 0);
-impl_spec_from_elem!(i16, |x| x == 0);
-impl_spec_from_elem!(i32, |x| x == 0);
-impl_spec_from_elem!(i64, |x| x == 0);
-impl_spec_from_elem!(i128, |x| x == 0);
-impl_spec_from_elem!(isize, |x| x == 0);
+impl_is_zero!(i8, |x| x == 0);
+impl_is_zero!(i16, |x| x == 0);
+impl_is_zero!(i32, |x| x == 0);
+impl_is_zero!(i64, |x| x == 0);
+impl_is_zero!(i128, |x| x == 0);
+impl_is_zero!(isize, |x| x == 0);
+
+impl_is_zero!(u16, |x| x == 0);
+impl_is_zero!(u32, |x| x == 0);
+impl_is_zero!(u64, |x| x == 0);
+impl_is_zero!(u128, |x| x == 0);
+impl_is_zero!(usize, |x| x == 0);
+
+impl_is_zero!(char, |x| x == '\0');
+
+impl_is_zero!(f32, |x: f32| x.to_bits() == 0);
+impl_is_zero!(f64, |x: f64| x.to_bits() == 0);
-impl_spec_from_elem!(u16, |x| x == 0);
-impl_spec_from_elem!(u32, |x| x == 0);
-impl_spec_from_elem!(u64, |x| x == 0);
-impl_spec_from_elem!(u128, |x| x == 0);
-impl_spec_from_elem!(usize, |x| x == 0);
+unsafe impl<T: ?Sized> IsZero for *const T {
+ #[inline]
+ fn is_zero(&self) -> bool {
+ (*self).is_null()
+ }
+}
+
+unsafe impl<T: ?Sized> IsZero for *mut T {
+ #[inline]
+ fn is_zero(&self) -> bool {
+ (*self).is_null()
+ }
+}
-impl_spec_from_elem!(f32, |x: f32| x.to_bits() == 0);
-impl_spec_from_elem!(f64, |x: f64| x.to_bits() == 0);
////////////////////////////////////////////////////////////////////////////////
// Common trait implementations for Vec
alloc_system = { path = "../liballoc_system" }
core = { path = "../libcore" }
libc = { path = "../rustc/libc_shim" }
+compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
[build-dependencies]
build_helper = { path = "../build_helper" }
reason = "this library is unlikely to be stabilized in its current \
form or name",
issue = "27783")]
-#![deny(warnings)]
#![feature(alloc_system)]
#![feature(libc)]
#![feature(linkage)]
alloc = { path = "../liballoc" }
core = { path = "../libcore" }
libc = { path = "../rustc/libc_shim" }
+compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
# See comments in the source for what this dependency is
[target.'cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))'.dependencies]
#![no_std]
#![allow(unused_attributes)]
-#![deny(warnings)]
#![unstable(feature = "alloc_system",
reason = "this library is unlikely to be stabilized in its current \
form or name",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/",
test(no_crate_inject, attr(deny(warnings))))]
-#![deny(warnings)]
#![feature(alloc)]
#![feature(core_intrinsics)]
#![feature(dropck_eyepatch)]
-#![feature(generic_param_attrs)]
+#![cfg_attr(stage0, feature(generic_param_attrs))]
#![cfg_attr(test, feature(test))]
#![allow(deprecated)]
/// documentation for more.
///
/// [`escape_default`]: fn.escape_default.html
-#[stable(feature = "core_ascii", since = "1.26.0")]
+#[stable(feature = "rust1", since = "1.0.0")]
pub struct EscapeDefault {
range: Range<usize>,
data: [u8; 4],
/// assert_eq!(b'9', escaped.next().unwrap());
/// assert_eq!(b'd', escaped.next().unwrap());
/// ```
-#[stable(feature = "core_ascii", since = "1.26.0")]
+#[stable(feature = "rust1", since = "1.0.0")]
pub fn escape_default(c: u8) -> EscapeDefault {
let (data, len) = match c {
b'\t' => ([b'\\', b't', 0, 0], 2),
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![deny(warnings)]
-
#![feature(flt2dec)]
#![feature(test)]
/// }
/// }
/// ```
-#[cfg_attr(not(stage0), lang = "ord")]
+#[lang = "ord"]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Ord: Eq + PartialOrd<Self> {
/// This method returns an `Ordering` between `self` and `other`.
/// assert_eq!(x < y, true);
/// assert_eq!(x.lt(&y), true);
/// ```
-#[cfg_attr(stage0, lang = "ord")]
-#[cfg_attr(not(stage0), lang = "partial_ord")]
+#[lang = "partial_ord"]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented = "can't compare `{Self}` with `{Rhs}`"]
pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
pub fn bswap<T>(x: T) -> T;
/// Reverses the bits in an integer type `T`.
- #[cfg(not(stage0))]
pub fn bitreverse<T>(x: T) -> T;
/// Performs checked integer addition.
/// Performs an exact division, resulting in undefined behavior where
/// `x % y != 0` or `y == 0` or `x == T::min_value() && y == -1`
- #[cfg(not(stage0))]
pub fn exact_div<T>(x: T, y: T) -> T;
/// Performs an unchecked division, resulting in undefined behavior
/// Probably will never become stable.
pub fn nontemporal_store<T>(ptr: *mut T, val: T);
}
-
-#[cfg(stage0)]
-pub unsafe fn exact_div<T>(a: T, b: T) -> T {
- unchecked_div(a, b)
-}
macro_rules! range_incl_trusted_len_impl {
($($t:ty)*) => ($(
- #[stable(feature = "inclusive_range", since = "1.26.0")]
+ #[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl TrustedLen for ops::RangeInclusive<$t> { }
)*)
}
#![no_core]
#![deny(missing_docs)]
#![deny(missing_debug_implementations)]
-#![deny(warnings)]
#![feature(allow_internal_unstable)]
#![feature(asm)]
#![feature(doc_spotlight)]
#![feature(fn_must_use)]
#![feature(fundamental)]
-#![cfg_attr(stage0, feature(i128_type))]
-#![cfg_attr(stage0, feature(inclusive_range_syntax))]
#![feature(intrinsics)]
#![feature(iterator_flatten)]
#![feature(iterator_repeat_with)]
#![feature(untagged_unions)]
#![feature(unwind_attributes)]
-#![cfg_attr(stage0, allow(unused_attributes))]
-#![cfg_attr(stage0, feature(never_type))]
-
#[prelude_import]
#[allow(unused)]
use prelude::v1::*;
});
}
-/// Ensure that a boolean expression is `true` at runtime.
-///
-/// This will invoke the [`panic!`] macro if the provided expression cannot be
-/// evaluated to `true` at runtime.
-///
-/// # Uses
-///
-/// Assertions are always checked in both debug and release builds, and cannot
-/// be disabled. See [`debug_assert!`] for assertions that are not enabled in
-/// release builds by default.
-///
-/// Unsafe code relies on `assert!` to enforce run-time invariants that, if
-/// violated could lead to unsafety.
-///
-/// Other use-cases of `assert!` include [testing] and enforcing run-time
-/// invariants in safe code (whose violation cannot result in unsafety).
-///
-/// # Custom Messages
-///
-/// This macro has a second form, where a custom panic message can
-/// be provided with or without arguments for formatting. See [`std::fmt`]
-/// for syntax for this form.
-///
-/// [`panic!`]: macro.panic.html
-/// [`debug_assert!`]: macro.debug_assert.html
-/// [testing]: ../book/second-edition/ch11-01-writing-tests.html#checking-results-with-the-assert-macro
-/// [`std::fmt`]: ../std/fmt/index.html
-///
-/// # Examples
-///
-/// ```
-/// // the panic message for these assertions is the stringified value of the
-/// // expression given.
-/// assert!(true);
-///
-/// fn some_computation() -> bool { true } // a very simple function
-///
-/// assert!(some_computation());
-///
-/// // assert with a custom message
-/// let x = true;
-/// assert!(x, "x wasn't true!");
-///
-/// let a = 3; let b = 27;
-/// assert!(a + b == 30, "a = {}, b = {}", a, b);
-/// ```
-#[macro_export]
-#[stable(feature = "rust1", since = "1.0.0")]
-#[cfg(stage0)]
-macro_rules! assert {
- ($cond:expr) => (
- if !$cond {
- panic!(concat!("assertion failed: ", stringify!($cond)))
- }
- );
- ($cond:expr,) => (
- assert!($cond)
- );
- ($cond:expr, $($arg:tt)+) => (
- if !$cond {
- panic!($($arg)+)
- }
- );
-}
-
/// Asserts that two expressions are equal to each other (using [`PartialEq`]).
///
/// On panic, this macro will print the values of the expressions with their
#[unstable(feature = "pin", issue = "49150")]
impl<'a, T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Pin<'a, U>> for Pin<'a, T> {}
+
+#[unstable(feature = "pin", issue = "49150")]
+unsafe impl<'a, T: ?Sized> Unpin for Pin<'a, T> {}
}
}
+#[stable(feature = "panic_hook_display", since = "1.26.0")]
impl<'a> fmt::Display for PanicInfo<'a> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("panicked at ")?;
}
}
+#[stable(feature = "panic_hook_display", since = "1.26.0")]
impl<'a> fmt::Display for Location<'a> {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}:{}:{}", self.file, self.line, self.col)
#[allow(improper_ctypes)]
extern {
#[lang = "panic_fmt"]
- #[cfg_attr(stage0, unwind)]
- #[cfg_attr(not(stage0), unwind(allowed))]
+ #[unwind(allowed)]
fn panic_impl(fmt: fmt::Arguments, file: &'static str, line: u32, col: u32) -> !;
}
let (file, line, col) = *file_line_col;
$stable_nand:meta,
$s_int_type:expr, $int_ref:expr,
$extra_feature:expr,
+ $min_fn:ident, $max_fn:ident,
$int_type:ident $atomic_type:ident $atomic_init:ident) => {
/// An integer type which can be safely shared between threads.
///
unsafe { atomic_xor(self.v.get(), val, order) }
}
}
+
+ doc_comment! {
+ concat!("Fetches the value, and applies a function to it that returns an optional
+new value. Returns a `Result` (`Ok(_)` if the function returned `Some(_)`, else `Err(_)`) of the
+previous value.
+
+Note: This may call the function multiple times if the value has been changed from other threads in
+the meantime, as long as the function returns `Some(_)`, but the function will have been applied
+but once to the stored value.
+
+# Examples
+
+```rust
+#![feature(no_more_cas)]
+", $extra_feature, "use std::sync::atomic::{", stringify!($atomic_type), ", Ordering};
+
+let x = ", stringify!($atomic_type), "::new(7);
+assert_eq!(x.fetch_update(|_| None, Ordering::SeqCst, Ordering::SeqCst), Err(7));
+assert_eq!(x.fetch_update(|x| Some(x + 1), Ordering::SeqCst, Ordering::SeqCst), Ok(7));
+assert_eq!(x.fetch_update(|x| Some(x + 1), Ordering::SeqCst, Ordering::SeqCst), Ok(8));
+assert_eq!(x.load(Ordering::SeqCst), 9);
+```"),
+ #[inline]
+ #[unstable(feature = "no_more_cas",
+ reason = "no more CAS loops in user code",
+ issue = "48655")]
+ pub fn fetch_update<F>(&self,
+ mut f: F,
+ fetch_order: Ordering,
+ set_order: Ordering) -> Result<$int_type, $int_type>
+ where F: FnMut($int_type) -> Option<$int_type> {
+ let mut prev = self.load(fetch_order);
+ while let Some(next) = f(prev) {
+ match self.compare_exchange_weak(prev, next, set_order, fetch_order) {
+ x @ Ok(_) => return x,
+ Err(next_prev) => prev = next_prev
+ }
+ }
+ Err(prev)
+ }
+ }
+
+ doc_comment! {
+ concat!("Maximum with the current value.
+
+Finds the maximum of the current value and the argument `val`, and
+sets the new value to the result.
+
+Returns the previous value.
+
+# Examples
+
+```
+#![feature(atomic_min_max)]
+", $extra_feature, "use std::sync::atomic::{", stringify!($atomic_type), ", Ordering};
+
+let foo = ", stringify!($atomic_type), "::new(23);
+assert_eq!(foo.fetch_max(42, Ordering::SeqCst), 23);
+assert_eq!(foo.load(Ordering::SeqCst), 42);
+```
+
+If you want to obtain the maximum value in one step, you can use the following:
+
+```
+#![feature(atomic_min_max)]
+", $extra_feature, "use std::sync::atomic::{", stringify!($atomic_type), ", Ordering};
+
+let foo = ", stringify!($atomic_type), "::new(23);
+let bar = 42;
+let max_foo = foo.fetch_max(bar, Ordering::SeqCst).max(bar);
+assert!(max_foo == 42);
+```"),
+ #[inline]
+ #[unstable(feature = "atomic_min_max",
+ reason = "easier and faster min/max than writing manual CAS loop",
+ issue = "48655")]
+ pub fn fetch_max(&self, val: $int_type, order: Ordering) -> $int_type {
+ unsafe { $max_fn(self.v.get(), val, order) }
+ }
+ }
+
+ doc_comment! {
+ concat!("Minimum with the current value.
+
+Finds the minimum of the current value and the argument `val`, and
+sets the new value to the result.
+
+Returns the previous value.
+
+# Examples
+
+```
+#![feature(atomic_min_max)]
+", $extra_feature, "use std::sync::atomic::{", stringify!($atomic_type), ", Ordering};
+
+let foo = ", stringify!($atomic_type), "::new(23);
+assert_eq!(foo.fetch_min(42, Ordering::Relaxed), 23);
+assert_eq!(foo.load(Ordering::Relaxed), 23);
+assert_eq!(foo.fetch_min(22, Ordering::Relaxed), 23);
+assert_eq!(foo.load(Ordering::Relaxed), 22);
+```
+
+If you want to obtain the minimum value in one step, you can use the following:
+
+```
+#![feature(atomic_min_max)]
+", $extra_feature, "use std::sync::atomic::{", stringify!($atomic_type), ", Ordering};
+
+let foo = ", stringify!($atomic_type), "::new(23);
+let bar = 12;
+let min_foo = foo.fetch_min(bar, Ordering::SeqCst).min(bar);
+assert_eq!(min_foo, 12);
+```"),
+ #[inline]
+ #[unstable(feature = "atomic_min_max",
+ reason = "easier and faster min/max than writing manual CAS loop",
+ issue = "48655")]
+ pub fn fetch_min(&self, val: $int_type, order: Ordering) -> $int_type {
+ unsafe { $min_fn(self.v.get(), val, order) }
+ }
+ }
+
}
}
}
unstable(feature = "atomic_nand", issue = "13226"),
"i8", "../../../std/primitive.i8.html",
"#![feature(integer_atomics)]\n\n",
+ atomic_min, atomic_max,
i8 AtomicI8 ATOMIC_I8_INIT
}
#[cfg(target_has_atomic = "8")]
unstable(feature = "atomic_nand", issue = "13226"),
"u8", "../../../std/primitive.u8.html",
"#![feature(integer_atomics)]\n\n",
+ atomic_umin, atomic_umax,
u8 AtomicU8 ATOMIC_U8_INIT
}
#[cfg(target_has_atomic = "16")]
unstable(feature = "atomic_nand", issue = "13226"),
"i16", "../../../std/primitive.i16.html",
"#![feature(integer_atomics)]\n\n",
+ atomic_min, atomic_max,
i16 AtomicI16 ATOMIC_I16_INIT
}
#[cfg(target_has_atomic = "16")]
unstable(feature = "atomic_nand", issue = "13226"),
"u16", "../../../std/primitive.u16.html",
"#![feature(integer_atomics)]\n\n",
+ atomic_umin, atomic_umax,
u16 AtomicU16 ATOMIC_U16_INIT
}
#[cfg(target_has_atomic = "32")]
unstable(feature = "atomic_nand", issue = "13226"),
"i32", "../../../std/primitive.i32.html",
"#![feature(integer_atomics)]\n\n",
+ atomic_min, atomic_max,
i32 AtomicI32 ATOMIC_I32_INIT
}
#[cfg(target_has_atomic = "32")]
unstable(feature = "atomic_nand", issue = "13226"),
"u32", "../../../std/primitive.u32.html",
"#![feature(integer_atomics)]\n\n",
+ atomic_umin, atomic_umax,
u32 AtomicU32 ATOMIC_U32_INIT
}
#[cfg(target_has_atomic = "64")]
unstable(feature = "atomic_nand", issue = "13226"),
"i64", "../../../std/primitive.i64.html",
"#![feature(integer_atomics)]\n\n",
+ atomic_min, atomic_max,
i64 AtomicI64 ATOMIC_I64_INIT
}
#[cfg(target_has_atomic = "64")]
unstable(feature = "atomic_nand", issue = "13226"),
"u64", "../../../std/primitive.u64.html",
"#![feature(integer_atomics)]\n\n",
+ atomic_umin, atomic_umax,
u64 AtomicU64 ATOMIC_U64_INIT
}
#[cfg(target_has_atomic = "ptr")]
unstable(feature = "atomic_nand", issue = "13226"),
"isize", "../../../std/primitive.isize.html",
"",
+ atomic_min, atomic_max,
isize AtomicIsize ATOMIC_ISIZE_INIT
}
#[cfg(target_has_atomic = "ptr")]
unstable(feature = "atomic_nand", issue = "13226"),
"usize", "../../../std/primitive.usize.html",
"",
+ atomic_umin, atomic_umax,
usize AtomicUsize ATOMIC_USIZE_INIT
}
}
}
+/// returns the max value (signed comparison)
+#[inline]
+unsafe fn atomic_max<T>(dst: *mut T, val: T, order: Ordering) -> T {
+ match order {
+ Acquire => intrinsics::atomic_max_acq(dst, val),
+ Release => intrinsics::atomic_max_rel(dst, val),
+ AcqRel => intrinsics::atomic_max_acqrel(dst, val),
+ Relaxed => intrinsics::atomic_max_relaxed(dst, val),
+ SeqCst => intrinsics::atomic_max(dst, val),
+ __Nonexhaustive => panic!("invalid memory ordering"),
+ }
+}
+
+/// returns the min value (signed comparison)
+#[inline]
+unsafe fn atomic_min<T>(dst: *mut T, val: T, order: Ordering) -> T {
+ match order {
+ Acquire => intrinsics::atomic_min_acq(dst, val),
+ Release => intrinsics::atomic_min_rel(dst, val),
+ AcqRel => intrinsics::atomic_min_acqrel(dst, val),
+ Relaxed => intrinsics::atomic_min_relaxed(dst, val),
+ SeqCst => intrinsics::atomic_min(dst, val),
+ __Nonexhaustive => panic!("invalid memory ordering"),
+ }
+}
+
+/// returns the max value (signed comparison)
+#[inline]
+unsafe fn atomic_umax<T>(dst: *mut T, val: T, order: Ordering) -> T {
+ match order {
+ Acquire => intrinsics::atomic_umax_acq(dst, val),
+ Release => intrinsics::atomic_umax_rel(dst, val),
+ AcqRel => intrinsics::atomic_umax_acqrel(dst, val),
+ Relaxed => intrinsics::atomic_umax_relaxed(dst, val),
+ SeqCst => intrinsics::atomic_umax(dst, val),
+ __Nonexhaustive => panic!("invalid memory ordering"),
+ }
+}
+
+/// returns the min value (signed comparison)
+#[inline]
+unsafe fn atomic_umin<T>(dst: *mut T, val: T, order: Ordering) -> T {
+ match order {
+ Acquire => intrinsics::atomic_umin_acq(dst, val),
+ Release => intrinsics::atomic_umin_rel(dst, val),
+ AcqRel => intrinsics::atomic_umin_acqrel(dst, val),
+ Relaxed => intrinsics::atomic_umin_relaxed(dst, val),
+ SeqCst => intrinsics::atomic_umin(dst, val),
+ __Nonexhaustive => panic!("invalid memory ordering"),
+ }
+}
+
/// An atomic fence.
///
/// Depending on the specified order, a fence prevents the compiler and CPU from
#[test]
fn static_init() {
- assert!(!S_FALSE.load(SeqCst));
- assert!(S_TRUE.load(SeqCst));
- assert!(S_INT.load(SeqCst) == 0);
- assert!(S_UINT.load(SeqCst) == 0);
+ // Note that we're not really testing the mutability here but it's important
+ // on Android at the moment (#49775)
+ assert!(!S_FALSE.swap(true, SeqCst));
+ assert!(S_TRUE.swap(false, SeqCst));
+ assert!(S_INT.fetch_add(1, SeqCst) == 0);
+ assert!(S_UINT.fetch_add(1, SeqCst) == 0);
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![deny(warnings)]
-
#![feature(ascii_ctype)]
#![feature(box_syntax)]
#![feature(core_float)]
#![feature(fmt_internals)]
#![feature(hashmap_internals)]
#![feature(iterator_step_by)]
-#![cfg_attr(stage0, feature(i128_type))]
-#![cfg_attr(stage0, feature(inclusive_range_syntax))]
#![feature(iterator_flatten)]
-#![cfg_attr(stage0, feature(conservative_impl_trait))]
#![feature(iterator_repeat_with)]
#![feature(nonzero)]
#![feature(pattern)]
html_root_url = "https://doc.rust-lang.org/nightly/",
html_playground_url = "https://play.rust-lang.org/",
test(attr(deny(warnings))))]
-#![deny(warnings)]
pub use self::Piece::*;
pub use self::Position::*;
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/",
test(attr(allow(unused_variables), deny(warnings))))]
-#![deny(warnings)]
#![feature(str_escape)]
[dependencies]
core = { path = "../libcore" }
libc = { path = "../rustc/libc_shim" }
+compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/",
issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/")]
-#![deny(warnings)]
#![panic_runtime]
#![allow(unused_features)]
core = { path = "../libcore" }
libc = { path = "../rustc/libc_shim" }
unwind = { path = "../libunwind" }
+compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
// See docs in the `unwind` module.
#[cfg(all(target_os="windows", target_arch = "x86", target_env="gnu"))]
#[lang = "eh_unwind_resume"]
-#[cfg_attr(stage0, unwind)]
-#[cfg_attr(not(stage0), unwind(allowed))]
+#[unwind(allowed)]
unsafe extern "C" fn rust_eh_unwind_resume(panic_ctx: *mut u8) -> ! {
uw::_Unwind_Resume(panic_ctx as *mut uw::_Unwind_Exception);
}
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/",
issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/")]
-#![deny(warnings)]
#![feature(alloc)]
#![feature(core_intrinsics)]
// Entry point for raising an exception, just delegates to the platform-specific
// implementation.
#[no_mangle]
-#[cfg_attr(stage0, unwind)]
-#[cfg_attr(not(stage0), unwind(allowed))]
+#[unwind(allowed)]
pub unsafe extern "C" fn __rust_start_panic(data: usize, vtable: usize) -> u32 {
imp::panic(mem::transmute(raw::TraitObject {
data: data as *mut (),
}
#[lang = "eh_unwind_resume"]
-#[cfg_attr(stage0, unwind)]
-#[cfg_attr(not(stage0), unwind(allowed))]
+#[unwind(allowed)]
unsafe extern "C" fn rust_eh_unwind_resume(panic_ctx: c::LPVOID) -> ! {
let params = [panic_ctx as c::ULONG_PTR];
c::RaiseException(RUST_PANIC,
pub use self::EXCEPTION_DISPOSITION::*;
extern "system" {
- #[cfg_attr(stage0, unwind)]
- #[cfg_attr(not(stage0), unwind(allowed))]
+ #[unwind(allowed)]
pub fn RaiseException(dwExceptionCode: DWORD,
dwExceptionFlags: DWORD,
nNumberOfArguments: DWORD,
lpArguments: *const ULONG_PTR);
- #[cfg_attr(stage0, unwind)]
- #[cfg_attr(not(stage0), unwind(allowed))]
+ #[unwind(allowed)]
pub fn RtlUnwindEx(TargetFrame: LPVOID,
TargetIp: LPVOID,
ExceptionRecord: *const EXCEPTION_RECORD,
ReturnValue: LPVOID,
OriginalContext: *const CONTEXT,
HistoryTable: *const UNWIND_HISTORY_TABLE);
- #[cfg_attr(stage0, unwind)]
- #[cfg_attr(not(stage0), unwind(allowed))]
+ #[unwind(allowed)]
pub fn _CxxThrowException(pExceptionObject: *mut c_void, pThrowInfo: *mut u8);
}
//! See [the book](../book/first-edition/procedural-macros.html) for more.
#![stable(feature = "proc_macro_lib", since = "1.15.0")]
-#![deny(warnings)]
#![deny(missing_docs)]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
test(no_crate_inject, attr(deny(warnings))),
test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))]
-#![cfg_attr(stage0, feature(i128_type))]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(lang_items)]
use syntax::parse::{self, token};
use syntax::symbol::Symbol;
use syntax::tokenstream;
-use syntax_pos::DUMMY_SP;
+use syntax::parse::lexer::comments;
use syntax_pos::{FileMap, Pos, SyntaxContext, FileName};
use syntax_pos::hygiene::Mark;
/// The API of this type is intentionally bare-bones, but it'll be expanded over
/// time!
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
-#[derive(Clone, Debug)]
+#[derive(Clone)]
pub struct TokenStream(tokenstream::TokenStream);
/// Error returned from `TokenStream::from_str`.
_inner: (),
}
+impl TokenStream {
+ /// Returns an empty `TokenStream`.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn empty() -> TokenStream {
+ TokenStream(tokenstream::TokenStream::empty())
+ }
+
+ /// Checks if this `TokenStream` is empty.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn is_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+}
+
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl FromStr for TokenStream {
type Err = LexError;
}
}
-/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input.
-/// For example, `quote!(a + b)` will produce a expression, that, when evaluated, constructs
-/// the `TokenStream` `[Word("a"), Op('+', Alone), Word("b")]`.
-///
-/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
-/// To quote `$` itself, use `$$`.
-#[unstable(feature = "proc_macro", issue = "38356")]
-#[macro_export]
-macro_rules! quote { () => {} }
-
-#[unstable(feature = "proc_macro_internals", issue = "27812")]
-#[doc(hidden)]
-mod quote;
+#[stable(feature = "proc_macro_lib", since = "1.15.0")]
+impl fmt::Debug for TokenStream {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.write_str("TokenStream ")?;
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
#[unstable(feature = "proc_macro", issue = "38356")]
impl From<TokenTree> for TokenStream {
}
#[unstable(feature = "proc_macro", issue = "38356")]
-impl From<TokenNode> for TokenStream {
- fn from(kind: TokenNode) -> TokenStream {
- TokenTree::from(kind).into()
- }
-}
-
-#[unstable(feature = "proc_macro", issue = "38356")]
-impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
- fn from_iter<I: IntoIterator<Item = T>>(streams: I) -> Self {
+impl iter::FromIterator<TokenTree> for TokenStream {
+ fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
let mut builder = tokenstream::TokenStreamBuilder::new();
- for stream in streams {
- builder.push(stream.into().0);
+ for tree in trees {
+ builder.push(tree.to_internal());
}
TokenStream(builder.build())
}
}
+/// Implementation details for the `TokenTree` type, such as iterators.
#[unstable(feature = "proc_macro", issue = "38356")]
-impl IntoIterator for TokenStream {
- type Item = TokenTree;
- type IntoIter = TokenTreeIter;
+pub mod token_stream {
+ use syntax::tokenstream;
+ use syntax_pos::DUMMY_SP;
- fn into_iter(self) -> TokenTreeIter {
- TokenTreeIter { cursor: self.0.trees(), stack: Vec::new() }
+ use {TokenTree, TokenStream, Delimiter};
+
+ /// An iterator over `TokenTree`s.
+ #[derive(Clone)]
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub struct IntoIter {
+ cursor: tokenstream::Cursor,
+ stack: Vec<TokenTree>,
}
-}
-impl TokenStream {
- /// Returns an empty `TokenStream`.
#[unstable(feature = "proc_macro", issue = "38356")]
- pub fn empty() -> TokenStream {
- TokenStream(tokenstream::TokenStream::empty())
+ impl Iterator for IntoIter {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ loop {
+ let tree = self.stack.pop().or_else(|| {
+ let next = self.cursor.next_as_stream()?;
+ Some(TokenTree::from_internal(next, &mut self.stack))
+ })?;
+ if tree.span().0 == DUMMY_SP {
+ if let TokenTree::Group(ref group) = tree {
+ if group.delimiter() == Delimiter::None {
+ self.cursor.insert(group.stream.clone().0);
+ continue
+ }
+ }
+ }
+ return Some(tree);
+ }
+ }
}
- /// Checks if this `TokenStream` is empty.
#[unstable(feature = "proc_macro", issue = "38356")]
- pub fn is_empty(&self) -> bool {
- self.0.is_empty()
+ impl IntoIterator for TokenStream {
+ type Item = TokenTree;
+ type IntoIter = IntoIter;
+
+ fn into_iter(self) -> IntoIter {
+ IntoIter { cursor: self.0.trees(), stack: Vec::new() }
+ }
}
}
-/// A region of source code, along with macro expansion information.
+/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input.
+/// For example, `quote!(a + b)` will produce a expression, that, when evaluated, constructs
+/// the `TokenStream` `[Word("a"), Op('+', Alone), Word("b")]`.
+///
+/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term.
+/// To quote `$` itself, use `$$`.
#[unstable(feature = "proc_macro", issue = "38356")]
-#[derive(Copy, Clone, Debug, PartialEq, Eq)]
-pub struct Span(syntax_pos::Span);
+#[macro_export]
+macro_rules! quote { () => {} }
-impl Span {
- /// A span that resolves at the macro definition site.
- #[unstable(feature = "proc_macro", issue = "38356")]
- pub fn def_site() -> Span {
- ::__internal::with_sess(|(_, mark)| {
- let call_site = mark.expn_info().unwrap().call_site;
- Span(call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark)))
- })
- }
-}
+#[unstable(feature = "proc_macro_internals", issue = "27812")]
+#[doc(hidden)]
+mod quote;
/// Quote a `Span` into a `TokenStream`.
/// This is needed to implement a custom quoter.
quote::Quote::quote(span)
}
+/// A region of source code, along with macro expansion information.
+#[unstable(feature = "proc_macro", issue = "38356")]
+#[derive(Copy, Clone)]
+pub struct Span(syntax_pos::Span);
+
macro_rules! diagnostic_method {
($name:ident, $level:expr) => (
/// Create a new `Diagnostic` with the given `message` at the span
}
impl Span {
+ /// A span that resolves at the macro definition site.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn def_site() -> Span {
+ ::__internal::with_sess(|(_, mark)| {
+ let call_site = mark.expn_info().unwrap().call_site;
+ Span(call_site.with_ctxt(SyntaxContext::empty().apply_mark(mark)))
+ })
+ }
+
/// The span of the invocation of the current procedural macro.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn call_site() -> Span {
other.resolved_at(*self)
}
+ /// Compares to spans to see if they're equal.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn eq(&self, other: &Span) -> bool {
+ self.0 == other.0
+ }
+
diagnostic_method!(error, Level::Error);
diagnostic_method!(warning, Level::Warning);
diagnostic_method!(note, Level::Note);
diagnostic_method!(help, Level::Help);
}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl fmt::Debug for Span {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{:?} bytes({}..{})",
+ self.0.ctxt(),
+ self.0.lo().0,
+ self.0.hi().0)
+ }
+}
+
/// A line-column pair representing the start or end of a `Span`.
#[unstable(feature = "proc_macro", issue = "38356")]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
#[unstable(feature = "proc_macro", issue = "38356")]
-#[derive(Clone, Debug)]
-pub struct TokenTree {
- /// The `TokenTree`'s span
- pub span: Span,
- /// Description of the `TokenTree`
- pub kind: TokenNode,
+#[derive(Clone)]
+pub enum TokenTree {
+ /// A delimited tokenstream
+ Group(Group),
+ /// A unicode identifier
+ Term(Term),
+ /// A punctuation character (`+`, `,`, `$`, etc.).
+ Op(Op),
+ /// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
+ Literal(Literal),
+}
+
+impl TokenTree {
+ /// Returns the span of this token, accessing the `span` method of each of
+ /// the internal tokens.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn span(&self) -> Span {
+ match *self {
+ TokenTree::Group(ref t) => t.span(),
+ TokenTree::Term(ref t) => t.span(),
+ TokenTree::Op(ref t) => t.span(),
+ TokenTree::Literal(ref t) => t.span(),
+ }
+ }
+
+ /// Configures the span for *only this token*.
+ ///
+ /// Note that if this token is a `Group` then this method will not configure
+ /// the span of each of the internal tokens, this will simply delegate to
+ /// the `set_span` method of each variant.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn set_span(&mut self, span: Span) {
+ match *self {
+ TokenTree::Group(ref mut t) => t.set_span(span),
+ TokenTree::Term(ref mut t) => t.set_span(span),
+ TokenTree::Op(ref mut t) => t.set_span(span),
+ TokenTree::Literal(ref mut t) => t.set_span(span),
+ }
+ }
+}
+
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl fmt::Debug for TokenTree {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ // Each of these has the name in the struct type in the derived debug,
+ // so don't bother with an extra layer of indirection
+ match *self {
+ TokenTree::Group(ref tt) => tt.fmt(f),
+ TokenTree::Term(ref tt) => tt.fmt(f),
+ TokenTree::Op(ref tt) => tt.fmt(f),
+ TokenTree::Literal(ref tt) => tt.fmt(f),
+ }
+ }
+}
+
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl From<Group> for TokenTree {
+ fn from(g: Group) -> TokenTree {
+ TokenTree::Group(g)
+ }
}
#[unstable(feature = "proc_macro", issue = "38356")]
-impl From<TokenNode> for TokenTree {
- fn from(kind: TokenNode) -> TokenTree {
- TokenTree { span: Span::def_site(), kind: kind }
+impl From<Term> for TokenTree {
+ fn from(g: Term) -> TokenTree {
+ TokenTree::Term(g)
+ }
+}
+
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl From<Op> for TokenTree {
+ fn from(g: Op) -> TokenTree {
+ TokenTree::Op(g)
+ }
+}
+
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl From<Literal> for TokenTree {
+ fn from(g: Literal) -> TokenTree {
+ TokenTree::Literal(g)
}
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl fmt::Display for TokenTree {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- TokenStream::from(self.clone()).fmt(f)
+ match *self {
+ TokenTree::Group(ref t) => t.fmt(f),
+ TokenTree::Term(ref t) => t.fmt(f),
+ TokenTree::Op(ref t) => t.fmt(f),
+ TokenTree::Literal(ref t) => t.fmt(f),
+ }
}
}
-/// Description of a `TokenTree`
+/// A delimited token stream
+///
+/// A `Group` internally contains a `TokenStream` which is delimited by a
+/// `Delimiter`. Groups represent multiple tokens internally and have a `Span`
+/// for the entire stream.
#[derive(Clone, Debug)]
#[unstable(feature = "proc_macro", issue = "38356")]
-pub enum TokenNode {
- /// A delimited tokenstream.
- Group(Delimiter, TokenStream),
- /// A unicode identifier.
- Term(Term),
- /// A punctuation character (`+`, `,`, `$`, etc.).
- Op(char, Spacing),
- /// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
- Literal(Literal),
+pub struct Group {
+ delimiter: Delimiter,
+ stream: TokenStream,
+ span: Span,
}
/// Describes how a sequence of token trees is delimited.
None,
}
-/// An interned string.
-#[derive(Copy, Clone, Debug)]
-#[unstable(feature = "proc_macro", issue = "38356")]
-pub struct Term(Symbol);
+impl Group {
+ /// Creates a new `group` with the given delimiter and token stream.
+ ///
+ /// This constructor will set the span for this group to
+ /// `Span::call_site()`. To change the span you can use the `set_span`
+ /// method below.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
+ Group {
+ delimiter: delimiter,
+ stream: stream,
+ span: Span::call_site(),
+ }
+ }
-impl Term {
- /// Intern a string into a `Term`.
+ /// Returns the delimiter of this `Group`
#[unstable(feature = "proc_macro", issue = "38356")]
- pub fn intern(string: &str) -> Term {
- Term(Symbol::intern(string))
+ pub fn delimiter(&self) -> Delimiter {
+ self.delimiter
}
- /// Get a reference to the interned string.
+ /// Returns the `TokenStream` of tokens that are delimited in this `Group`.
+ ///
+ /// Note that the returned token stream does not include the delimiter
+ /// returned above.
#[unstable(feature = "proc_macro", issue = "38356")]
- pub fn as_str(&self) -> &str {
- unsafe { &*(&*self.0.as_str() as *const str) }
+ pub fn stream(&self) -> TokenStream {
+ self.stream.clone()
+ }
+
+ /// Returns the span for the delimiters of this token stream, spanning the
+ /// entire `Group`.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ /// Configures the span for this `Group`'s delimiters, but not its internal
+ /// tokens.
+ ///
+ /// This method will **not** set the span of all the internal tokens spanned
+ /// by this group, but rather it will only set the span of the delimiter
+ /// tokens at the level of the `Group`.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
}
}
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl fmt::Display for Group {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ TokenStream::from(TokenTree::from(self.clone())).fmt(f)
+ }
+}
+
+/// An `Op` is an operator like `+` or `-`, and only represents one character.
+///
+/// Operators like `+=` are represented as two instance of `Op` with different
+/// forms of `Spacing` returned.
+#[unstable(feature = "proc_macro", issue = "38356")]
+#[derive(Copy, Clone, Debug)]
+pub struct Op {
+ op: char,
+ spacing: Spacing,
+ span: Span,
+}
+
/// Whether an `Op` is either followed immediately by another `Op` or followed by whitespace.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[unstable(feature = "proc_macro", issue = "38356")]
Joint,
}
-/// A literal character (`'a'`), string (`"hello"`), or number (`2.3`).
-#[derive(Clone, Debug)]
+impl Op {
+ /// Creates a new `Op` from the given character and spacing.
+ ///
+ /// The returned `Op` will have the default span of `Span::call_site()`
+ /// which can be further configured with the `set_span` method below.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn new(op: char, spacing: Spacing) -> Op {
+ Op {
+ op: op,
+ spacing: spacing,
+ span: Span::call_site(),
+ }
+ }
+
+ /// Returns the character this operation represents, for example `'+'`
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn op(&self) -> char {
+ self.op
+ }
+
+ /// Returns the spacing of this operator, indicating whether it's a joint
+ /// operator with more operators coming next in the token stream or an
+ /// `Alone` meaning that the operator has ended.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn spacing(&self) -> Spacing {
+ self.spacing
+ }
+
+ /// Returns the span for this operator character
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ /// Configure the span for this operator's character
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+}
+
#[unstable(feature = "proc_macro", issue = "38356")]
-pub struct Literal(token::Token);
+impl fmt::Display for Op {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ TokenStream::from(TokenTree::from(self.clone())).fmt(f)
+ }
+}
+/// An interned string.
+#[derive(Copy, Clone, Debug)]
#[unstable(feature = "proc_macro", issue = "38356")]
-impl fmt::Display for Literal {
+pub struct Term {
+ sym: Symbol,
+ span: Span,
+}
+
+impl Term {
+ /// Creates a new `Term` with the given `string` as well as the specified
+ /// `span`.
+ ///
+ /// Note that `span`, currently in rustc, configures the hygiene information
+ /// for this identifier. As of this time `Span::call_site()` explicitly
+ /// opts-in to **non-hygienic** information (aka copy/pasted code) while
+ /// spans like `Span::def_site()` will opt-in to hygienic information,
+ /// meaning that code at the call site of the macro can't access this
+ /// identifier.
+ ///
+ /// Due to the current importance of hygiene this constructor, unlike other
+ /// tokens, requires a `Span` to be specified at construction.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn new(string: &str, span: Span) -> Term {
+ Term {
+ sym: Symbol::intern(string),
+ span,
+ }
+ }
+
+ /// Get a reference to the interned string.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn as_str(&self) -> &str {
+ unsafe { &*(&*self.sym.as_str() as *const str) }
+ }
+
+ /// Returns the span of this `Term`, encompassing the entire string returned
+ /// by `as_str`.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ /// Configures the span of this `Term`, possibly changing hygiene
+ /// information.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
+}
+
+#[unstable(feature = "proc_macro", issue = "38356")]
+impl fmt::Display for Term {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- TokenTree { kind: TokenNode::Literal(self.clone()), span: Span(DUMMY_SP) }.fmt(f)
+ self.as_str().fmt(f)
}
}
-macro_rules! int_literals {
- ($($int_kind:ident),*) => {$(
- /// Integer literal.
+/// A literal character (`'a'`), string (`"hello"`), a number (`2.3`), etc.
+#[derive(Clone, Debug)]
+#[unstable(feature = "proc_macro", issue = "38356")]
+pub struct Literal {
+ lit: token::Lit,
+ suffix: Option<ast::Name>,
+ span: Span,
+}
+
+macro_rules! suffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new suffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1u32` where the integer
+ /// value specified is the first part of the token and the integral is
+ /// also suffixed at the end.
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
#[unstable(feature = "proc_macro", issue = "38356")]
- pub fn $int_kind(n: $int_kind) -> Literal {
- Literal::typed_integer(n as i128, stringify!($int_kind))
+ pub fn $name(n: $kind) -> Literal {
+ Literal {
+ lit: token::Lit::Integer(Symbol::intern(&n.to_string())),
+ suffix: Some(Symbol::intern(stringify!($kind))),
+ span: Span::call_site(),
+ }
+ }
+ )*)
+}
+
+macro_rules! unsuffixed_int_literals {
+ ($($name:ident => $kind:ident,)*) => ($(
+ /// Creates a new unsuffixed integer literal with the specified value.
+ ///
+ /// This function will create an integer like `1` where the integer
+ /// value specified is the first part of the token. No suffix is
+ /// specified on this token, meaning that invocations like
+ /// `Literal::i8_unsuffixed(1)` are equivalent to
+ /// `Literal::u32_unsuffixed(1)`.
+ ///
+ /// Literals created through this method have the `Span::call_site()`
+ /// span by default, which can be configured with the `set_span` method
+ /// below.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn $name(n: $kind) -> Literal {
+ Literal {
+ lit: token::Lit::Integer(Symbol::intern(&n.to_string())),
+ suffix: None,
+ span: Span::call_site(),
+ }
}
- )*}
+ )*)
}
impl Literal {
- /// Integer literal
- #[unstable(feature = "proc_macro", issue = "38356")]
- pub fn integer(n: i128) -> Literal {
- Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), None))
+ suffixed_int_literals! {
+ u8_suffixed => u8,
+ u16_suffixed => u16,
+ u32_suffixed => u32,
+ u64_suffixed => u64,
+ u128_suffixed => u128,
+ usize_suffixed => usize,
+ i8_suffixed => i8,
+ i16_suffixed => i16,
+ i32_suffixed => i32,
+ i64_suffixed => i64,
+ i128_suffixed => i128,
+ isize_suffixed => isize,
+ }
+
+ unsuffixed_int_literals! {
+ u8_unsuffixed => u8,
+ u16_unsuffixed => u16,
+ u32_unsuffixed => u32,
+ u64_unsuffixed => u64,
+ u128_unsuffixed => u128,
+ usize_unsuffixed => usize,
+ i8_unsuffixed => i8,
+ i16_unsuffixed => i16,
+ i32_unsuffixed => i32,
+ i64_unsuffixed => i64,
+ i128_unsuffixed => i128,
+ isize_unsuffixed => isize,
}
- int_literals!(u8, i8, u16, i16, u32, i32, u64, i64, usize, isize);
- fn typed_integer(n: i128, kind: &'static str) -> Literal {
- Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())),
- Some(Symbol::intern(kind))))
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn f32_unsuffixed(n: f32) -> Literal {
+ if !n.is_finite() {
+ panic!("Invalid float literal {}", n);
+ }
+ Literal {
+ lit: token::Lit::Float(Symbol::intern(&n.to_string())),
+ suffix: None,
+ span: Span::call_site(),
+ }
}
- /// Floating point literal.
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This consturctor will create a literal like `1.0f32` where the value
+ /// specified is the preceding part of the token and `f32` is the suffix of
+ /// the token. This token will always be inferred to be an `f32` in the
+ /// compiler.
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
#[unstable(feature = "proc_macro", issue = "38356")]
- pub fn float(n: f64) -> Literal {
+ pub fn f32_suffixed(n: f32) -> Literal {
if !n.is_finite() {
panic!("Invalid float literal {}", n);
}
- Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())), None))
+ Literal {
+ lit: token::Lit::Float(Symbol::intern(&n.to_string())),
+ suffix: Some(Symbol::intern("f32")),
+ span: Span::call_site(),
+ }
}
- /// Floating point literal.
+ /// Creates a new unsuffixed floating-point literal.
+ ///
+ /// This constructor is similar to those like `Literal::i8_unsuffixed` where
+ /// the float's value is emitted directly into the token but no suffix is
+ /// used, so it may be inferred to be a `f64` later in the compiler.
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
#[unstable(feature = "proc_macro", issue = "38356")]
- pub fn f32(n: f32) -> Literal {
+ pub fn f64_unsuffixed(n: f64) -> Literal {
if !n.is_finite() {
- panic!("Invalid f32 literal {}", n);
+ panic!("Invalid float literal {}", n);
+ }
+ Literal {
+ lit: token::Lit::Float(Symbol::intern(&n.to_string())),
+ suffix: None,
+ span: Span::call_site(),
}
- Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())),
- Some(Symbol::intern("f32"))))
}
- /// Floating point literal.
+ /// Creates a new suffixed floating-point literal.
+ ///
+ /// This consturctor will create a literal like `1.0f64` where the value
+ /// specified is the preceding part of the token and `f64` is the suffix of
+ /// the token. This token will always be inferred to be an `f64` in the
+ /// compiler.
+ ///
+ /// # Panics
+ ///
+ /// This function requires that the specified float is finite, for
+ /// example if it is infinity or NaN this function will panic.
#[unstable(feature = "proc_macro", issue = "38356")]
- pub fn f64(n: f64) -> Literal {
+ pub fn f64_suffixed(n: f64) -> Literal {
if !n.is_finite() {
- panic!("Invalid f64 literal {}", n);
+ panic!("Invalid float literal {}", n);
+ }
+ Literal {
+ lit: token::Lit::Float(Symbol::intern(&n.to_string())),
+ suffix: Some(Symbol::intern("f64")),
+ span: Span::call_site(),
}
- Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())),
- Some(Symbol::intern("f64"))))
}
/// String literal.
for ch in string.chars() {
escaped.extend(ch.escape_debug());
}
- Literal(token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None))
+ Literal {
+ lit: token::Lit::Str_(Symbol::intern(&escaped)),
+ suffix: None,
+ span: Span::call_site(),
+ }
}
/// Character literal.
pub fn character(ch: char) -> Literal {
let mut escaped = String::new();
escaped.extend(ch.escape_unicode());
- Literal(token::Literal(token::Lit::Char(Symbol::intern(&escaped)), None))
+ Literal {
+ lit: token::Lit::Char(Symbol::intern(&escaped)),
+ suffix: None,
+ span: Span::call_site(),
+ }
}
/// Byte string literal.
pub fn byte_string(bytes: &[u8]) -> Literal {
let string = bytes.iter().cloned().flat_map(ascii::escape_default)
.map(Into::<char>::into).collect::<String>();
- Literal(token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None))
+ Literal {
+ lit: token::Lit::ByteStr(Symbol::intern(&string)),
+ suffix: None,
+ span: Span::call_site(),
+ }
}
-}
-/// An iterator over `TokenTree`s.
-#[derive(Clone)]
-#[unstable(feature = "proc_macro", issue = "38356")]
-pub struct TokenTreeIter {
- cursor: tokenstream::Cursor,
- stack: Vec<TokenTree>,
+ /// Returns the span encompassing this literal.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn span(&self) -> Span {
+ self.span
+ }
+
+ /// Configures the span associated for this literal.
+ #[unstable(feature = "proc_macro", issue = "38356")]
+ pub fn set_span(&mut self, span: Span) {
+ self.span = span;
+ }
}
#[unstable(feature = "proc_macro", issue = "38356")]
-impl Iterator for TokenTreeIter {
- type Item = TokenTree;
-
- fn next(&mut self) -> Option<TokenTree> {
- loop {
- let tree = self.stack.pop().or_else(|| {
- let next = self.cursor.next_as_stream()?;
- Some(TokenTree::from_internal(next, &mut self.stack))
- })?;
- if tree.span.0 == DUMMY_SP {
- if let TokenNode::Group(Delimiter::None, stream) = tree.kind {
- self.cursor.insert(stream.0);
- continue
- }
- }
- return Some(tree);
- }
+impl fmt::Display for Literal {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ TokenStream::from(TokenTree::from(self.clone())).fmt(f)
}
}
tokenstream::TokenTree::Token(span, token) => (span, token),
tokenstream::TokenTree::Delimited(span, delimed) => {
let delimiter = Delimiter::from_internal(delimed.delim);
- return TokenTree {
- span: Span(span),
- kind: TokenNode::Group(delimiter, TokenStream(delimed.tts.into())),
- };
+ let mut g = Group::new(delimiter, TokenStream(delimed.tts.into()));
+ g.set_span(Span(span));
+ return g.into()
}
};
let op_kind = if is_joint { Spacing::Joint } else { Spacing::Alone };
macro_rules! tt {
- ($e:expr) => (TokenTree { span: Span(span), kind: $e })
+ ($e:expr) => ({
+ let mut x = TokenTree::from($e);
+ x.set_span(Span(span));
+ x
+ })
}
macro_rules! op {
- ($a:expr) => (TokenNode::Op($a, op_kind));
+ ($a:expr) => (tt!(Op::new($a, op_kind)));
($a:expr, $b:expr) => ({
- stack.push(tt!(TokenNode::Op($b, op_kind).into()));
- TokenNode::Op($a, Spacing::Joint)
+ stack.push(tt!(Op::new($b, op_kind)));
+ tt!(Op::new($a, Spacing::Joint))
});
($a:expr, $b:expr, $c:expr) => ({
- stack.push(tt!(TokenNode::Op($c, op_kind)));
- stack.push(tt!(TokenNode::Op($b, Spacing::Joint)));
- TokenNode::Op($a, Spacing::Joint)
+ stack.push(tt!(Op::new($c, op_kind)));
+ stack.push(tt!(Op::new($b, Spacing::Joint)));
+ tt!(Op::new($a, Spacing::Joint))
})
}
- let kind = match token {
+ match token {
Eq => op!('='),
Lt => op!('<'),
Le => op!('<', '='),
Dollar => op!('$'),
Question => op!('?'),
- Ident(ident, false) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
- Ident(ident, true) => TokenNode::Term(Term(Symbol::intern(&format!("r#{}", ident)))),
- Literal(..) => TokenNode::Literal(self::Literal(token)),
+ Ident(ident, false) | Lifetime(ident) => {
+ tt!(Term::new(&ident.name.as_str(), Span(span)))
+ }
+ Ident(ident, true) => {
+ tt!(Term::new(&format!("r#{}", ident), Span(span)))
+ }
+ Literal(lit, suffix) => tt!(self::Literal { lit, suffix, span: Span(span) }),
DocComment(c) => {
+ let style = comments::doc_comment_style(&c.as_str());
+ let stripped = comments::strip_doc_comment_decoration(&c.as_str());
let stream = vec![
- tt!(TokenNode::Term(Term::intern("doc"))),
- tt!(op!('=')),
- tt!(TokenNode::Literal(self::Literal(Literal(Lit::Str_(c), None)))),
+ tt!(Term::new("doc", Span(span))),
+ tt!(Op::new('=', Spacing::Alone)),
+ tt!(self::Literal::string(&stripped)),
].into_iter().collect();
- stack.push(tt!(TokenNode::Group(Delimiter::Bracket, stream)));
- op!('#')
+ stack.push(tt!(Group::new(Delimiter::Bracket, stream)));
+ if style == ast::AttrStyle::Inner {
+ stack.push(tt!(Op::new('!', Spacing::Alone)));
+ }
+ tt!(Op::new('#', Spacing::Alone))
}
Interpolated(_) => {
__internal::with_sess(|(sess, _)| {
let tts = token.interpolated_to_tokenstream(sess, span);
- TokenNode::Group(Delimiter::None, TokenStream(tts))
+ tt!(Group::new(Delimiter::None, TokenStream(tts)))
})
}
DotEq => op!('.', '='),
OpenDelim(..) | CloseDelim(..) => unreachable!(),
Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
- };
-
- TokenTree { span: Span(span), kind: kind }
+ }
}
fn to_internal(self) -> tokenstream::TokenStream {
use syntax::parse::token::*;
use syntax::tokenstream::{TokenTree, Delimited};
- let (op, kind) = match self.kind {
- TokenNode::Op(op, kind) => (op, kind),
- TokenNode::Group(delimiter, tokens) => {
- return TokenTree::Delimited(self.span.0, Delimited {
- delim: delimiter.to_internal(),
- tts: tokens.0.into(),
+ let (op, kind, span) = match self {
+ self::TokenTree::Op(tt) => (tt.op(), tt.spacing(), tt.span()),
+ self::TokenTree::Group(tt) => {
+ return TokenTree::Delimited(tt.span.0, Delimited {
+ delim: tt.delimiter.to_internal(),
+ tts: tt.stream.0.into(),
}).into();
},
- TokenNode::Term(symbol) => {
- let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt() };
- let sym_str = symbol.0.as_str();
- let token =
- if sym_str.starts_with("'") { Lifetime(ident) }
- else if sym_str.starts_with("r#") {
- let name = Symbol::intern(&sym_str[2..]);
- let ident = ast::Ident { name, ctxt: self.span.0.ctxt() };
- Ident(ident, true)
- } else { Ident(ident, false) };
- return TokenTree::Token(self.span.0, token).into();
+ self::TokenTree::Term(tt) => {
+ let ident = ast::Ident::new(tt.sym, tt.span.0);
+ let sym_str = tt.sym.as_str();
+ let token = if sym_str.starts_with("'") {
+ Lifetime(ident)
+ } else if sym_str.starts_with("r#") {
+ let name = Symbol::intern(&sym_str[2..]);
+ let ident = ast::Ident::new(name, ident.span);
+ Ident(ident, true)
+ } else {
+ Ident(ident, false)
+ };
+ return TokenTree::Token(tt.span.0, token).into();
}
- TokenNode::Literal(self::Literal(Literal(Lit::Integer(ref a), b)))
+ self::TokenTree::Literal(self::Literal {
+ lit: Lit::Integer(ref a),
+ suffix,
+ span,
+ })
if a.as_str().starts_with("-") =>
{
let minus = BinOp(BinOpToken::Minus);
let integer = Symbol::intern(&a.as_str()[1..]);
- let integer = Literal(Lit::Integer(integer), b);
- let a = TokenTree::Token(self.span.0, minus);
- let b = TokenTree::Token(self.span.0, integer);
+ let integer = Literal(Lit::Integer(integer), suffix);
+ let a = TokenTree::Token(span.0, minus);
+ let b = TokenTree::Token(span.0, integer);
return vec![a, b].into_iter().collect()
}
- TokenNode::Literal(self::Literal(Literal(Lit::Float(ref a), b)))
+ self::TokenTree::Literal(self::Literal {
+ lit: Lit::Float(ref a),
+ suffix,
+ span,
+ })
if a.as_str().starts_with("-") =>
{
let minus = BinOp(BinOpToken::Minus);
let float = Symbol::intern(&a.as_str()[1..]);
- let float = Literal(Lit::Float(float), b);
- let a = TokenTree::Token(self.span.0, minus);
- let b = TokenTree::Token(self.span.0, float);
+ let float = Literal(Lit::Float(float), suffix);
+ let a = TokenTree::Token(span.0, minus);
+ let b = TokenTree::Token(span.0, float);
return vec![a, b].into_iter().collect()
}
- TokenNode::Literal(token) => {
- return TokenTree::Token(self.span.0, token.0).into()
+ self::TokenTree::Literal(tt) => {
+ let token = Literal(tt.lit, tt.suffix);
+ return TokenTree::Token(tt.span.0, token).into()
}
};
_ => panic!("unsupported character {}", op),
};
- let tree = TokenTree::Token(self.span.0, token);
+ let tree = TokenTree::Token(span.0, token);
match kind {
Spacing::Alone => tree.into(),
Spacing::Joint => tree.joint(),
//! This quasiquoter uses macros 2.0 hygiene to reliably access
//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
-use {Delimiter, Literal, Spacing, Span, Term, TokenNode, TokenStream, TokenTree};
+use {Delimiter, Literal, Spacing, Span, Term, Op, Group, TokenStream, TokenTree};
use syntax::ext::base::{ExtCtxt, ProcMacro};
use syntax::parse::token;
pub struct Quoter;
pub fn unquote<T: Into<TokenStream> + Clone>(tokens: &T) -> TokenStream {
- T::into(tokens.clone())
+ tokens.clone().into()
}
pub trait Quote {
fn quote(self) -> TokenStream;
}
+macro_rules! tt2ts {
+ ($e:expr) => (TokenStream::from(TokenTree::from($e)))
+}
+
macro_rules! quote_tok {
- (,) => { TokenNode::Op(',', Spacing::Alone) };
- (.) => { TokenNode::Op('.', Spacing::Alone) };
- (:) => { TokenNode::Op(':', Spacing::Alone) };
+ (,) => { tt2ts!(Op::new(',', Spacing::Alone)) };
+ (.) => { tt2ts!(Op::new('.', Spacing::Alone)) };
+ (:) => { tt2ts!(Op::new(':', Spacing::Alone)) };
+ (|) => { tt2ts!(Op::new('|', Spacing::Alone)) };
(::) => {
[
- TokenNode::Op(':', Spacing::Joint),
- TokenNode::Op(':', Spacing::Alone)
- ].iter().cloned().collect::<TokenStream>()
+ TokenTree::from(Op::new(':', Spacing::Joint)),
+ TokenTree::from(Op::new(':', Spacing::Alone)),
+ ].iter()
+ .cloned()
+ .map(|mut x| {
+ x.set_span(Span::def_site());
+ x
+ })
+ .collect::<TokenStream>()
};
- (!) => { TokenNode::Op('!', Spacing::Alone) };
- (<) => { TokenNode::Op('<', Spacing::Alone) };
- (>) => { TokenNode::Op('>', Spacing::Alone) };
- (_) => { TokenNode::Op('_', Spacing::Alone) };
- (0) => { TokenNode::Literal(::Literal::integer(0)) };
- (&) => { TokenNode::Op('&', Spacing::Alone) };
- ($i:ident) => { TokenNode::Term(Term::intern(stringify!($i))) };
+ (!) => { tt2ts!(Op::new('!', Spacing::Alone)) };
+ (<) => { tt2ts!(Op::new('<', Spacing::Alone)) };
+ (>) => { tt2ts!(Op::new('>', Spacing::Alone)) };
+ (_) => { tt2ts!(Op::new('_', Spacing::Alone)) };
+ (0) => { tt2ts!(Literal::i8_unsuffixed(0)) };
+ (&) => { tt2ts!(Op::new('&', Spacing::Alone)) };
+ ($i:ident) => { tt2ts!(Term::new(stringify!($i), Span::def_site())) };
}
macro_rules! quote_tree {
((unquote $($t:tt)*)) => { $($t)* };
((quote $($t:tt)*)) => { ($($t)*).quote() };
- (($($t:tt)*)) => { TokenNode::Group(Delimiter::Parenthesis, quote!($($t)*)) };
- ([$($t:tt)*]) => { TokenNode::Group(Delimiter::Bracket, quote!($($t)*)) };
- ({$($t:tt)*}) => { TokenNode::Group(Delimiter::Brace, quote!($($t)*)) };
+ (($($t:tt)*)) => { tt2ts!(Group::new(Delimiter::Parenthesis, quote!($($t)*))) };
+ ([$($t:tt)*]) => { tt2ts!(Group::new(Delimiter::Bracket, quote!($($t)*))) };
+ ({$($t:tt)*}) => { tt2ts!(Group::new(Delimiter::Brace, quote!($($t)*))) };
($t:tt) => { quote_tok!($t) };
}
macro_rules! quote {
() => { TokenStream::empty() };
($($t:tt)*) => {
- [
- $(TokenStream::from(quote_tree!($t)),)*
- ].iter().cloned().collect::<TokenStream>()
+ [$(quote_tree!($t),)*].iter()
+ .cloned()
+ .flat_map(|x| x.into_iter())
+ .collect::<TokenStream>()
};
}
let tokens = self.into_iter().filter_map(|tree| {
if after_dollar {
after_dollar = false;
- match tree.kind {
- TokenNode::Term(_) => {
+ match tree {
+ TokenTree::Term(_) => {
+ let tree = TokenStream::from(tree);
return Some(quote!(::__internal::unquote(&(unquote tree)),));
}
- TokenNode::Op('$', _) => {}
+ TokenTree::Op(ref tt) if tt.op() == '$' => {}
_ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
}
- } else if let TokenNode::Op('$', _) = tree.kind {
- after_dollar = true;
- return None;
+ } else if let TokenTree::Op(tt) = tree {
+ if tt.op() == '$' {
+ after_dollar = true;
+ return None;
+ }
}
Some(quote!(::TokenStream::from((quote tree)),))
- }).collect::<TokenStream>();
+ }).flat_map(|t| t.into_iter()).collect::<TokenStream>();
if after_dollar {
panic!("unexpected trailing `$` in `quote!`");
}
- quote!([(unquote tokens)].iter().cloned().collect::<::TokenStream>())
+ quote!(
+ [(unquote tokens)].iter()
+ .cloned()
+ .flat_map(|x| x.into_iter())
+ .collect::<::TokenStream>()
+ )
}
}
impl Quote for TokenTree {
fn quote(self) -> TokenStream {
- quote!(::TokenTree { span: (quote self.span), kind: (quote self.kind) })
+ match self {
+ TokenTree::Op(tt) => quote!(::TokenTree::Op( (quote tt) )),
+ TokenTree::Group(tt) => quote!(::TokenTree::Group( (quote tt) )),
+ TokenTree::Term(tt) => quote!(::TokenTree::Term( (quote tt) )),
+ TokenTree::Literal(tt) => quote!(::TokenTree::Literal( (quote tt) )),
+ }
}
}
-impl Quote for TokenNode {
+impl Quote for char {
fn quote(self) -> TokenStream {
- macro_rules! gen_match {
- ($($i:ident($($arg:ident),+)),*) => {
- match self {
- $(TokenNode::$i($($arg),+) => quote! {
- ::TokenNode::$i($((quote $arg)),+)
- },)*
- }
- }
- }
+ TokenTree::from(Literal::character(self)).into()
+ }
+}
- gen_match! { Op(op, kind), Group(delim, tokens), Term(term), Literal(lit) }
+impl<'a> Quote for &'a str {
+ fn quote(self) -> TokenStream {
+ TokenTree::from(Literal::string(self)).into()
}
}
-impl Quote for char {
+impl Quote for usize {
fn quote(self) -> TokenStream {
- TokenNode::Literal(Literal::character(self)).into()
+ TokenTree::from(Literal::usize_unsuffixed(self)).into()
}
}
-impl<'a> Quote for &'a str {
+impl Quote for Group {
fn quote(self) -> TokenStream {
- TokenNode::Literal(Literal::string(self)).into()
+ quote!(::Group::new((quote self.delimiter()), (quote self.stream())))
}
}
-impl Quote for usize {
+impl Quote for Op {
fn quote(self) -> TokenStream {
- TokenNode::Literal(Literal::integer(self as i128)).into()
+ quote!(::Op::new((quote self.op()), (quote self.spacing())))
}
}
impl Quote for Term {
fn quote(self) -> TokenStream {
- quote!(::Term::intern((quote self.as_str())))
+ quote!(::Term::new((quote self.as_str()), (quote self.span())))
}
}
impl LiteralKind {
pub fn with_contents_and_suffix(self, contents: Term, suffix: Option<Term>)
-> Literal {
- let contents = contents.0;
- let suffix = suffix.map(|t| t.0);
+ let sym = contents.sym;
+ let suffix = suffix.map(|t| t.sym);
match self {
$(LiteralKind::$i => {
- Literal(token::Literal(token::Lit::$i(contents), suffix))
+ Literal {
+ lit: token::Lit::$i(sym),
+ suffix,
+ span: contents.span,
+ }
})*
$(LiteralKind::$raw(n) => {
- Literal(token::Literal(token::Lit::$raw(contents, n), suffix))
+ Literal {
+ lit: token::Lit::$raw(sym, n),
+ suffix,
+ span: contents.span,
+ }
})*
}
}
impl Literal {
fn kind_contents_and_suffix(self) -> (LiteralKind, Term, Option<Term>) {
- let (lit, suffix) = match self.0 {
- token::Literal(lit, suffix) => (lit, suffix),
- _ => panic!("unsupported literal {:?}", self.0),
- };
-
- let (kind, contents) = match lit {
+ let (kind, contents) = match self.lit {
$(token::Lit::$i(contents) => (LiteralKind::$i, contents),)*
$(token::Lit::$raw(contents, n) => (LiteralKind::$raw(n), contents),)*
};
- (kind, Term(contents), suffix.map(Term))
+ let suffix = self.suffix.map(|sym| Term::new(&sym.as_str(), self.span()));
+ (kind, Term::new(&contents.as_str(), self.span()), suffix)
}
}
[dependencies]
core = { path = "../libcore" }
+compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
[build-dependencies]
cc = "1.0.1"
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![deny(warnings)]
-
#![feature(test)]
extern crate test;
[] ImplParent(DefId),
[] TraitOfItem(DefId),
[] IsReachableNonGeneric(DefId),
+ [] IsUnreachableLocalDefinition(DefId),
[] IsMirAvailable(DefId),
[] ItemAttrs(DefId),
[] TransFnAttrs(DefId),
[input] CrateDisambiguator(CrateNum),
[input] CrateHash(CrateNum),
[input] OriginalCrateName(CrateNum),
+ [input] ExtraFileName(CrateNum),
[] ImplementationsOfTrait { krate: CrateNum, trait_id: DefId },
[] AllTraitImplementations(CrateNum),
[] InstanceDefSizeEstimate { instance_def: InstanceDef<'tcx> },
- [] GetSymbolExportLevel(DefId),
-
[] WasmCustomSections(CrateNum),
[input] Features,
[] ProgramClausesFor(DefId),
[] WasmImportModuleMap(CrateNum),
[] ForeignModules(CrateNum),
+
+ [] UpstreamMonomorphizations(CrateNum),
+ [] UpstreamMonomorphizationsFor(DefId),
);
trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug {
```
"##,
+E0910: r##"
+This error indicates that a `#[non_exhaustive]` attribute was incorrectly placed
+on something other than a struct or enum.
+
+Examples of erroneous code:
+
+```compile_fail,E0910
+# #![feature(non_exhaustive)]
+
+#[non_exhaustive]
+trait Foo { }
+```
+"##,
+
+E0911: r##"
+This error indicates that a `#[non_exhaustive]` attribute had a value. The
+`#[non_exhaustive]` should be empty.
+
+Examples of erroneous code:
+
+```compile_fail,E0911
+# #![feature(non_exhaustive)]
+
+#[non_exhaustive(anything)]
+struct Foo;
+```
+"##,
}
for attr in &item.attrs {
if attr.check_name("inline") {
self.check_inline(attr, &item.span, target)
+ } else if attr.check_name("non_exhaustive") {
+ self.check_non_exhaustive(attr, item, target)
} else if attr.check_name("wasm_import_module") {
has_wasm_import_module = true;
if attr.value_str().is_none() {
}
}
+ /// Check if the `#[non_exhaustive]` attribute on an `item` is valid.
+ fn check_non_exhaustive(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) {
+ match target {
+ Target::Struct | Target::Enum => { /* Valid */ },
+ _ => {
+ struct_span_err!(self.tcx.sess,
+ attr.span,
+ E0910,
+ "attribute can only be applied to a struct or enum")
+ .span_label(item.span, "not a struct or enum")
+ .emit();
+ return;
+ }
+ }
+
+ if attr.meta_item_list().is_some() || attr.value_str().is_some() {
+ struct_span_err!(self.tcx.sess,
+ attr.span,
+ E0911,
+ "attribute should be empty")
+ .span_label(item.span, "not empty")
+ .emit();
+ }
+ }
+
/// Check if the `#[repr]` attributes on `item` are valid.
fn check_repr(&self, item: &hir::Item, target: Target) {
// Extract the names of all repr hints, e.g., [foo, bar, align] for:
fn lower_ident(&mut self, ident: Ident) -> Name {
let ident = ident.modern();
- if ident.ctxt == SyntaxContext::empty() {
+ if ident.span.ctxt() == SyntaxContext::empty() {
return ident.name;
}
*self.name_map
fn lower_label(&mut self, label: Option<Label>) -> Option<hir::Label> {
label.map(|label| hir::Label {
name: label.ident.name,
- span: label.span,
+ span: label.ident.span,
})
}
fn lower_variant(&mut self, v: &Variant) -> hir::Variant {
Spanned {
node: hir::Variant_ {
- name: v.node.name.name,
+ name: v.node.ident.name,
attrs: self.lower_attrs(&v.node.attrs),
data: self.lower_variant_data(&v.node.data),
disr_expr: v.node
}
hir::PathSegment::new(
- self.lower_ident(segment.identifier),
+ self.lower_ident(segment.ident),
parameters,
infer_types,
)
decl.inputs
.iter()
.map(|arg| match arg.pat.node {
- PatKind::Ident(_, ident, None) => respan(ident.span, ident.node.name),
+ PatKind::Ident(_, ident, None) => respan(ident.span, ident.name),
_ => respan(arg.pat.span, keywords::Invalid.name()),
})
.collect()
default: tp.default
.as_ref()
.map(|x| self.lower_ty(x, ImplTraitContext::Disallowed)),
- span: tp.span,
+ span: tp.ident.span,
pure_wrt_drop: attr::contains_name(&tp.attrs, "may_dangle"),
synthetic: tp.attrs
.iter()
}
fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime {
+ let span = l.ident.span;
match self.lower_ident(l.ident) {
- x if x == "'static" => self.new_named_lifetime(l.id, l.span, hir::LifetimeName::Static),
+ x if x == "'static" => self.new_named_lifetime(l.id, span, hir::LifetimeName::Static),
x if x == "'_" => match self.anonymous_lifetime_mode {
AnonymousLifetimeMode::CreateParameter => {
- let fresh_name = self.collect_fresh_in_band_lifetime(l.span);
- self.new_named_lifetime(l.id, l.span, fresh_name)
+ let fresh_name = self.collect_fresh_in_band_lifetime(span);
+ self.new_named_lifetime(l.id, span, fresh_name)
}
AnonymousLifetimeMode::PassThrough => {
- self.new_named_lifetime(l.id, l.span, hir::LifetimeName::Underscore)
+ self.new_named_lifetime(l.id, span, hir::LifetimeName::Underscore)
}
},
name => {
- self.maybe_collect_in_band_lifetime(l.span, name);
- self.new_named_lifetime(l.id, l.span, hir::LifetimeName::Name(name))
+ self.maybe_collect_in_band_lifetime(span, name);
+ self.new_named_lifetime(l.id, span, hir::LifetimeName::Name(name))
}
}
}
name: self.lower_ident(match f.ident {
Some(ident) => ident,
// FIXME(jseyfried) positional field hygiene
- None => Ident {
- name: Symbol::intern(&index.to_string()),
- ctxt: f.span.ctxt(),
- },
+ None => Ident::new(Symbol::intern(&index.to_string()), f.span),
}),
vis: self.lower_visibility(&f.vis, None),
ty: self.lower_ty(&f.ty, ImplTraitContext::Disallowed),
fn lower_field(&mut self, f: &Field) -> hir::Field {
hir::Field {
- name: respan(f.ident.span, self.lower_ident(f.ident.node)),
+ name: respan(f.ident.span, self.lower_ident(f.ident)),
expr: P(self.lower_expr(&f.expr)),
span: f.span,
is_shorthand: f.is_shorthand,
// Correctly resolve `self` imports
if path.segments.len() > 1
- && path.segments.last().unwrap().identifier.name == keywords::SelfValue.name()
+ && path.segments.last().unwrap().ident.name == keywords::SelfValue.name()
{
let _ = path.segments.pop();
if rename.is_none() {
- *name = path.segments.last().unwrap().identifier.name;
+ *name = path.segments.last().unwrap().ident.name;
}
}
hir::ForeignItemStatic(this.lower_ty(t, ImplTraitContext::Disallowed), m)
}
ForeignItemKind::Ty => hir::ForeignItemType,
+ ForeignItemKind::Macro(_) => panic!("shouldn't exist here"),
},
vis: this.lower_visibility(&i.vis, None),
span: i.span,
fn lower_pat(&mut self, p: &Pat) -> P<hir::Pat> {
let node = match p.node {
PatKind::Wild => hir::PatKind::Wild,
- PatKind::Ident(ref binding_mode, pth1, ref sub) => {
+ PatKind::Ident(ref binding_mode, ident, ref sub) => {
match self.resolver.get_resolution(p.id).map(|d| d.base_def()) {
// `None` can occur in body-less function signatures
def @ None | def @ Some(Def::Local(_)) => {
hir::PatKind::Binding(
self.lower_binding_mode(binding_mode),
canonical_id,
- respan(pth1.span, pth1.node.name),
+ respan(ident.span, ident.name),
sub.as_ref().map(|x| self.lower_pat(x)),
)
}
Some(def) => hir::PatKind::Path(hir::QPath::Resolved(
None,
P(hir::Path {
- span: pth1.span,
+ span: ident.span,
def,
- segments: hir_vec![hir::PathSegment::from_name(pth1.node.name)],
+ segments: hir_vec![hir::PathSegment::from_name(ident.name)],
}),
)),
}
ImplTraitContext::Disallowed,
);
let args = args.iter().map(|x| self.lower_expr(x)).collect();
- hir::ExprMethodCall(hir_seg, seg.span, args)
+ hir::ExprMethodCall(hir_seg, seg.ident.span, args)
}
ExprKind::Binary(binop, ref lhs, ref rhs) => {
let binop = self.lower_binop(binop);
),
ExprKind::Field(ref el, ident) => hir::ExprField(
P(self.lower_expr(el)),
- respan(ident.span, self.lower_ident(ident.node)),
+ respan(ident.span, self.lower_ident(ident)),
),
ExprKind::TupField(ref el, ident) => hir::ExprTupField(P(self.lower_expr(el)), ident),
ExprKind::Index(ref el, ref er) => {
let attr = {
// allow(unreachable_code)
let allow = {
- let allow_ident = self.str_to_ident("allow");
- let uc_ident = self.str_to_ident("unreachable_code");
- let uc_meta_item = attr::mk_spanned_word_item(e.span, uc_ident);
- let uc_nested = NestedMetaItemKind::MetaItem(uc_meta_item);
- let uc_spanned = respan(e.span, uc_nested);
- attr::mk_spanned_list_item(e.span, allow_ident, vec![uc_spanned])
+ let allow_ident = Ident::from_str("allow").with_span_pos(e.span);
+ let uc_ident = Ident::from_str("unreachable_code").with_span_pos(e.span);
+ let uc_nested = attr::mk_nested_word_item(uc_ident);
+ attr::mk_list_item(e.span, allow_ident, vec![uc_nested])
};
attr::mk_spanned_attr_outer(e.span, attr::mk_attr_id(), allow)
};
for v in &enum_definition.variants {
let variant_def_index =
this.create_def(v.node.data.id(),
- DefPathData::EnumVariant(v.node.name.name.as_str()),
+ DefPathData::EnumVariant(v.node.ident.name.as_str()),
REGULAR_SPACE,
v.span);
this.with_parent(variant_def_index, |this| {
}
fn visit_foreign_item(&mut self, foreign_item: &'a ForeignItem) {
+ if let ForeignItemKind::Macro(_) = foreign_item.node {
+ return self.visit_macro_invoc(foreign_item.id, false);
+ }
+
let def = self.create_def(foreign_item.id,
DefPathData::ValueNs(foreign_item.ident.name.as_str()),
REGULAR_SPACE,
lifetime_def.lifetime.id,
DefPathData::LifetimeDef(lifetime_def.lifetime.ident.name.as_str()),
REGULAR_SPACE,
- lifetime_def.lifetime.span
+ lifetime_def.lifetime.ident.span
);
}
GenericParam::Type(ref ty_param) => {
ty_param.id,
DefPathData::TypeParam(ty_param.ident.name.as_str()),
REGULAR_SPACE,
- ty_param.span
+ ty_param.ident.span
);
}
}
use util::nodemap::{DefIdMap, FxHashMap};
use arena::TypedArena;
-use std::cell::RefCell;
use std::io;
use ty::TyCtxt;
+use rustc_data_structures::sync::Lock;
+
pub mod blocks;
mod collector;
mod def_collector;
definitions: &'hir Definitions,
/// Bodies inlined from other crates are cached here.
- inlined_bodies: RefCell<DefIdMap<&'hir Body>>,
+ inlined_bodies: Lock<DefIdMap<&'hir Body>>,
/// The reverse mapping of `node_to_hir_id`.
hir_to_node_id: FxHashMap<HirId, NodeId>,
}
pub fn intern_inlined_body(&self, def_id: DefId, body: Body) -> &'hir Body {
+ let mut inlined_bodies = self.inlined_bodies.borrow_mut();
+ if let Some(&b) = inlined_bodies.get(&def_id) {
+ debug_assert_eq!(&body, b);
+ return b;
+ }
let body = self.forest.inlined_bodies.alloc(body);
- self.inlined_bodies.borrow_mut().insert(def_id, body);
+ inlined_bodies.insert(def_id, body);
body
}
map,
hir_to_node_id,
definitions,
- inlined_bodies: RefCell::new(DefIdMap()),
+ inlined_bodies: Lock::new(DefIdMap()),
};
hir_id_validator::check_crate(&map);
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
let ast::Ident {
- ref name,
- ctxt: _ // Ignore this
+ name,
+ span,
} = *self;
name.hash_stable(hcx, hasher);
+ span.hash_stable(hcx, hasher);
}
}
impl_stable_hash_for!(enum ::syntax::ast::Unsafety { Unsafe, Normal });
impl_stable_hash_for!(enum ::syntax::ast::Constness { Const, NotConst });
impl_stable_hash_for!(enum ::syntax::ast::Defaultness { Default, Final });
-impl_stable_hash_for!(struct ::syntax::ast::Lifetime { id, span, ident });
+impl_stable_hash_for!(struct ::syntax::ast::Lifetime { id, ident });
impl_stable_hash_for!(enum ::syntax::ast::StrStyle { Cooked, Raw(pounds) });
impl_stable_hash_for!(enum ::syntax::ast::AttrStyle { Outer, Inner });
style.hash_stable(hcx, hasher);
path.segments.len().hash_stable(hcx, hasher);
for segment in &path.segments {
- segment.identifier.name.hash_stable(hcx, hasher);
+ segment.ident.name.hash_stable(hcx, hasher);
}
for tt in tokens.trees() {
tt.hash_stable(hcx, hasher);
});
impl_stable_hash_for!(struct ::syntax::ast::MetaItem {
- name,
+ ident,
node,
span
});
}
}
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ty::subst::Kind<'gcx> {
+impl<'a, 'gcx, T> ToStableHashKey<StableHashingContext<'a>> for &'gcx ty::Slice<T>
+ where T: HashStable<StableHashingContext<'a>>
+{
+ type KeyType = Fingerprint;
+
+ #[inline]
+ fn to_stable_hash_key(&self, hcx: &StableHashingContext<'a>) -> Fingerprint {
+ let mut hasher = StableHasher::new();
+ let mut hcx: StableHashingContext<'a> = hcx.clone();
+ self.hash_stable(&mut hcx, &mut hasher);
+ hasher.finish()
+ }
+}
+
+impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for ty::subst::Kind<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
+ mem::discriminant(self).hash_stable(hcx, hasher);
match self {
ty::subst::UnpackedKind::Lifetime(lt) => lt.hash_stable(hcx, hasher),
ty::subst::UnpackedKind::Type(ty) => ty.hash_stable(hcx, hasher),
let tcx = tcx.expect("can't hash AllocIds during hir lowering");
if let Some(alloc) = tcx.interpret_interner.get_alloc(*self) {
AllocDiscriminant::Alloc.hash_stable(hcx, hasher);
- if !hcx.alloc_id_recursion_tracker.insert(*self) {
+ if hcx.alloc_id_recursion_tracker.insert(*self) {
tcx
.interpret_interner
.get_corresponding_static_def_id(*self)
}
}
+impl_stable_hash_for!(
+ impl<'tcx> for struct traits::ProgramClause<'tcx> {
+ goal, hypotheses
+ }
+);
+
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for traits::Clause<'tcx> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
mem::discriminant(self).hash_stable(hcx, hasher);
match self {
- Implies(hypotheses, goal) => {
- hypotheses.hash_stable(hcx, hasher);
- goal.hash_stable(hcx, hasher);
- }
- DomainGoal(domain_goal) => domain_goal.hash_stable(hcx, hasher),
+ Implies(clause) => clause.hash_stable(hcx, hasher),
ForAll(clause) => clause.hash_stable(hcx, hasher),
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use syntax::ast;
+use syntax::symbol::InternedString;
use syntax_pos::Span;
use ty::{self, Ty};
MiscVariable(Span),
NormalizeProjectionType(Span),
TypeInference(Span),
- TypeParameterDefinition(Span, ast::Name),
+ TypeParameterDefinition(Span, InternedString),
/// one of the upvars or closure kind parameters in a `ClosureSubsts`
/// (before it has been determined)
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(box_patterns)]
#![feature(box_syntax)]
-#![cfg_attr(stage0, feature(conservative_impl_trait))]
#![feature(const_fn)]
-#![cfg_attr(stage0, feature(copy_closures, clone_closures))]
#![feature(core_intrinsics)]
#![feature(drain_filter)]
#![feature(dyn_trait)]
#![feature(entry_or_default)]
#![feature(from_ref)]
#![feature(fs_read_write)]
-#![cfg_attr(stage0, feature(i128_type, i128))]
-#![cfg_attr(stage0, feature(inclusive_range_syntax))]
#![cfg_attr(windows, feature(libc))]
-#![cfg_attr(stage0, feature(match_default_bindings))]
#![feature(macro_lifetime_matcher)]
#![feature(macro_vis_matcher)]
#![feature(exhaustive_patterns)]
#![feature(slice_patterns)]
#![feature(specialization)]
#![feature(unboxed_closures)]
-#![cfg_attr(stage0, feature(underscore_lifetimes))]
-#![cfg_attr(stage0, feature(universal_impl_trait))]
#![feature(trace_macros)]
#![feature(trusted_len)]
#![feature(catch_expr)]
ast_visit::walk_ty(self, t);
}
- fn visit_ident(&mut self, sp: Span, id: ast::Ident) {
- run_lints!(self, check_ident, early_passes, sp, id);
+ fn visit_ident(&mut self, ident: ast::Ident) {
+ run_lints!(self, check_ident, early_passes, ident);
}
fn visit_mod(&mut self, m: &'a ast::Mod, s: Span, _a: &[ast::Attribute], n: ast::NodeId) {
continue
}
};
- let name = word.name();
+ let name = word.ident.name;
match store.check_lint_name(&name.as_str()) {
CheckLintNameResult::Ok(ids) => {
let src = LintSource::Node(name, li.span);
pub use self::Level::*;
pub use self::LintSource::*;
-use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::sync::{self, Lrc};
use errors::{DiagnosticBuilder, DiagnosticId};
use hir::def_id::{CrateNum, LOCAL_CRATE};
}
pub trait EarlyLintPass: LintPass {
- fn check_ident(&mut self, _: &EarlyContext, _: Span, _: ast::Ident) { }
+ fn check_ident(&mut self, _: &EarlyContext, _: ast::Ident) { }
fn check_crate(&mut self, _: &EarlyContext, _: &ast::Crate) { }
fn check_crate_post(&mut self, _: &EarlyContext, _: &ast::Crate) { }
fn check_mod(&mut self, _: &EarlyContext, _: &ast::Mod, _: Span, _: ast::NodeId) { }
}
/// A lint pass boxed up as a trait object.
-pub type EarlyLintPassObject = Box<dyn EarlyLintPass + 'static>;
-pub type LateLintPassObject = Box<dyn for<'a, 'tcx> LateLintPass<'a, 'tcx> + 'static>;
+pub type EarlyLintPassObject = Box<dyn EarlyLintPass + sync::Send + sync::Sync + 'static>;
+pub type LateLintPassObject = Box<dyn for<'a, 'tcx> LateLintPass<'a, 'tcx> + sync::Send
+ + sync::Sync + 'static>;
/// Identifies a lint known to the compiler.
#[derive(Clone, Copy, Debug)]
// except according to those terms.
use hir::def_id::{DefId, LOCAL_CRATE};
+use ich::StableHashingContext;
+use rustc_data_structures::stable_hasher::{StableHasher, HashStable,
+ StableHasherResult};
use std::cmp;
+use std::mem;
use ty;
+use ty::subst::Substs;
/// The SymbolExportLevel of a symbols specifies from which kinds of crates
/// the symbol will be exported. `C` symbols will be exported from any
}
#[derive(Eq, PartialEq, Debug, Copy, Clone, RustcEncodable, RustcDecodable)]
-pub enum ExportedSymbol {
+pub enum ExportedSymbol<'tcx> {
NonGeneric(DefId),
+ Generic(DefId, &'tcx Substs<'tcx>),
NoDefId(ty::SymbolName),
}
-impl ExportedSymbol {
- pub fn symbol_name(&self, tcx: ty::TyCtxt) -> ty::SymbolName {
+impl<'tcx> ExportedSymbol<'tcx> {
+ pub fn symbol_name(&self,
+ tcx: ty::TyCtxt<'_, 'tcx, '_>)
+ -> ty::SymbolName {
match *self {
ExportedSymbol::NonGeneric(def_id) => {
tcx.symbol_name(ty::Instance::mono(tcx, def_id))
}
+ ExportedSymbol::Generic(def_id, substs) => {
+ tcx.symbol_name(ty::Instance::new(def_id, substs))
+ }
ExportedSymbol::NoDefId(symbol_name) => {
symbol_name
}
}
}
- pub fn compare_stable(&self, tcx: ty::TyCtxt, other: &ExportedSymbol) -> cmp::Ordering {
+ pub fn compare_stable(&self,
+ tcx: ty::TyCtxt<'_, 'tcx, '_>,
+ other: &ExportedSymbol<'tcx>)
+ -> cmp::Ordering {
match *self {
- ExportedSymbol::NonGeneric(self_def_id) => {
- match *other {
- ExportedSymbol::NonGeneric(other_def_id) => {
- tcx.def_path_hash(self_def_id).cmp(&tcx.def_path_hash(other_def_id))
- }
- ExportedSymbol::NoDefId(_) => {
- cmp::Ordering::Less
- }
+ ExportedSymbol::NonGeneric(self_def_id) => match *other {
+ ExportedSymbol::NonGeneric(other_def_id) => {
+ tcx.def_path_hash(self_def_id).cmp(&tcx.def_path_hash(other_def_id))
+ }
+ ExportedSymbol::Generic(..) |
+ ExportedSymbol::NoDefId(_) => {
+ cmp::Ordering::Less
+ }
+ }
+ ExportedSymbol::Generic(..) => match *other {
+ ExportedSymbol::NonGeneric(_) => {
+ cmp::Ordering::Greater
+ }
+ ExportedSymbol::Generic(..) => {
+ self.symbol_name(tcx).cmp(&other.symbol_name(tcx))
+ }
+ ExportedSymbol::NoDefId(_) => {
+ cmp::Ordering::Less
}
}
- ExportedSymbol::NoDefId(self_symbol_name) => {
- match *other {
- ExportedSymbol::NonGeneric(_) => {
- cmp::Ordering::Greater
- }
- ExportedSymbol::NoDefId(ref other_symbol_name) => {
- self_symbol_name.cmp(other_symbol_name)
- }
+ ExportedSymbol::NoDefId(self_symbol_name) => match *other {
+ ExportedSymbol::NonGeneric(_) |
+ ExportedSymbol::Generic(..) => {
+ cmp::Ordering::Greater
+ }
+ ExportedSymbol::NoDefId(ref other_symbol_name) => {
+ self_symbol_name.cmp(other_symbol_name)
}
}
}
}
}
-impl_stable_hash_for!(enum self::ExportedSymbol {
- NonGeneric(def_id),
- NoDefId(symbol_name)
-});
-
pub fn metadata_symbol_name(tcx: ty::TyCtxt) -> String {
format!("rust_metadata_{}_{}",
tcx.original_crate_name(LOCAL_CRATE),
tcx.crate_disambiguator(LOCAL_CRATE).to_fingerprint().to_hex())
}
+
+impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for ExportedSymbol<'gcx> {
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'a>,
+ hasher: &mut StableHasher<W>) {
+ mem::discriminant(self).hash_stable(hcx, hasher);
+ match *self {
+ ExportedSymbol::NonGeneric(def_id) => {
+ def_id.hash_stable(hcx, hasher);
+ }
+ ExportedSymbol::Generic(def_id, substs) => {
+ def_id.hash_stable(hcx, hasher);
+ substs.hash_stable(hcx, hasher);
+ }
+ ExportedSymbol::NoDefId(symbol_name) => {
+ symbol_name.hash_stable(hcx, hasher);
+ }
+ }
+ }
+}
self.resolve_type_vars_or_error(expr.hir_id, self.tables.expr_ty_adjusted_opt(expr))
}
+ /// Returns the type of value that this pattern matches against.
+ /// Some non-obvious cases:
+ ///
+ /// - a `ref x` binding matches against a value of type `T` and gives
+ /// `x` the type `&T`; we return `T`.
+ /// - a pattern with implicit derefs (thanks to default binding
+ /// modes #42640) may look like `Some(x)` but in fact have
+ /// implicit deref patterns attached (e.g., it is really
+ /// `&Some(x)`). In that case, we return the "outermost" type
+ /// (e.g., `&Option<T>).
fn pat_ty(&self, pat: &hir::Pat) -> McResult<Ty<'tcx>> {
+ // Check for implicit `&` types wrapping the pattern; note
+ // that these are never attached to binding patterns, so
+ // actually this is somewhat "disjoint" from the code below
+ // that aims to account for `ref x`.
+ if let Some(vec) = self.tables.pat_adjustments().get(pat.hir_id) {
+ if let Some(first_ty) = vec.first() {
+ debug!("pat_ty(pat={:?}) found adjusted ty `{:?}`", pat, first_ty);
+ return Ok(first_ty);
+ }
+ }
+
+ self.pat_ty_unadjusted(pat)
+ }
+
+
+ /// Like `pat_ty`, but ignores implicit `&` patterns.
+ fn pat_ty_unadjusted(&self, pat: &hir::Pat) -> McResult<Ty<'tcx>> {
let base_ty = self.node_ty(pat.hir_id)?;
+ debug!("pat_ty(pat={:?}) base_ty={:?}", pat, base_ty);
+
// This code detects whether we are looking at a `ref x`,
// and if so, figures out what the type *being borrowed* is.
let ret_ty = match pat.node {
}
_ => base_ty,
};
- debug!("pat_ty(pat={:?}) base_ty={:?} ret_ty={:?}",
- pat, base_ty, ret_ty);
+ debug!("pat_ty(pat={:?}) ret_ty={:?}", pat, ret_ty);
+
Ok(ret_ty)
}
self.tcx.adt_def(enum_def).variant_with_id(def_id).fields.len())
}
Def::StructCtor(_, CtorKind::Fn) => {
- match self.pat_ty(&pat)?.sty {
+ match self.pat_ty_unadjusted(&pat)?.sty {
ty::TyAdt(adt_def, _) => {
(cmt, adt_def.non_enum_variant().fields.len())
}
PatKind::Tuple(ref subpats, ddpos) => {
// (p1, ..., pN)
- let expected_len = match self.pat_ty(&pat)?.sty {
+ let expected_len = match self.pat_ty_unadjusted(&pat)?.sty {
ty::TyTuple(ref tys) => tys.len(),
ref ty => span_bug!(pat.span, "tuple pattern unexpected type {:?}", ty),
};
/// the result of `g()` occurs after the yield (and therefore
/// doesn't). If we want to infer that, we can look at the
/// postorder traversal:
- /// ```
- /// `foo` `f` Call#1 `y` Yield `bar` `g` Call#3 Call#2 Call#0
+ /// ```plain,ignore
+ /// `foo` `f` Call#1 `y` Yield `bar` `g` Call#3 Call#2 Call#0
/// ```
///
/// In which we can easily see that `Call#1` occurs before the yield,
})
}
+/// Returns whether the specified `lang_item` doesn't actually need to be
+/// present for this compilation.
+///
+/// Not all lang items are always required for each compilation, particularly in
+/// the case of panic=abort. In these situations some lang items are injected by
+/// crates and don't actually need to be defined in libstd.
+pub fn whitelisted(tcx: TyCtxt, lang_item: lang_items::LangItem) -> bool {
+ // If we're not compiling with unwinding, we won't actually need these
+ // symbols. Other panic runtimes ensure that the relevant symbols are
+ // available to link things together, but they're never exercised.
+ if tcx.sess.panic_strategy() != PanicStrategy::Unwind {
+ return lang_item == lang_items::EhPersonalityLangItem ||
+ lang_item == lang_items::EhUnwindResumeLangItem
+ }
+
+ false
+}
+
fn verify<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
items: &lang_items::LanguageItems) {
// We only need to check for the presence of weak lang items if we're
}
}
- // If we're not compiling with unwinding, we won't actually need these
- // symbols. Other panic runtimes ensure that the relevant symbols are
- // available to link things together, but they're never exercised.
- let mut whitelisted = HashSet::new();
- if tcx.sess.panic_strategy() != PanicStrategy::Unwind {
- whitelisted.insert(lang_items::EhPersonalityLangItem);
- whitelisted.insert(lang_items::EhUnwindResumeLangItem);
- }
-
$(
if missing.contains(&lang_items::$item) &&
- !whitelisted.contains(&lang_items::$item) &&
+ !whitelisted(tcx, lang_items::$item) &&
items.$name().is_none() {
tcx.sess.err(&format!("language item required, but not found: `{}`",
stringify!($name)));
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::cell::{Ref, RefCell};
use rustc_data_structures::indexed_vec::IndexVec;
+use rustc_data_structures::sync::{RwLock, ReadGuard};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
StableHasherResult};
use ich::StableHashingContext;
#[derive(Clone, Debug)]
pub struct Cache {
- predecessors: RefCell<Option<IndexVec<BasicBlock, Vec<BasicBlock>>>>
+ predecessors: RwLock<Option<IndexVec<BasicBlock, Vec<BasicBlock>>>>
}
impl Cache {
pub fn new() -> Self {
Cache {
- predecessors: RefCell::new(None)
+ predecessors: RwLock::new(None)
}
}
*self.predecessors.borrow_mut() = None;
}
- pub fn predecessors(&self, mir: &Mir) -> Ref<IndexVec<BasicBlock, Vec<BasicBlock>>> {
+ pub fn predecessors(&self, mir: &Mir) -> ReadGuard<IndexVec<BasicBlock, Vec<BasicBlock>>> {
if self.predecessors.borrow().is_none() {
*self.predecessors.borrow_mut() = Some(calculate_predecessors(mir));
}
- Ref::map(self.predecessors.borrow(), |p| p.as_ref().unwrap())
+ ReadGuard::map(self.predecessors.borrow(), |p| p.as_ref().unwrap())
}
}
use std::slice;
use hir::{self, InlineAsm};
use std::borrow::{Cow};
-use std::cell::Ref;
+use rustc_data_structures::sync::ReadGuard;
use std::fmt::{self, Debug, Formatter, Write};
use std::{iter, mem, u32};
use std::ops::{Index, IndexMut};
}
#[inline]
- pub fn predecessors(&self) -> Ref<IndexVec<BasicBlock, Vec<BasicBlock>>> {
+ pub fn predecessors(&self) -> ReadGuard<IndexVec<BasicBlock, Vec<BasicBlock>>> {
self.cache.predecessors(self)
}
#[inline]
- pub fn predecessors_for(&self, bb: BasicBlock) -> Ref<Vec<BasicBlock>> {
- Ref::map(self.predecessors(), |p| &p[bb])
+ pub fn predecessors_for(&self, bb: BasicBlock) -> ReadGuard<Vec<BasicBlock>> {
+ ReadGuard::map(self.predecessors(), |p| &p[bb])
}
#[inline]
"embed LLVM bitcode in object files"),
strip_debuginfo_if_disabled: Option<bool> = (None, parse_opt_bool, [TRACKED],
"tell the linker to strip debuginfo when building without debuginfo enabled."),
+ share_generics: Option<bool> = (None, parse_opt_bool, [TRACKED],
+ "make the current crate share its generic instantiations"),
}
pub fn default_lib_output() -> CrateType {
} else if meta_item.is_meta_item_list() {
let msg = format!(
"invalid predicate in --cfg command line argument: `{}`",
- meta_item.name()
+ meta_item.ident
);
early_error(ErrorOutputType::default(), &msg)
}
- (meta_item.name(), meta_item.value_str())
+ (meta_item.ident.name, meta_item.value_str())
})
.collect::<ast::CrateConfig>()
}
}
for param in generics.types.iter() {
- let name = param.name.as_str().to_string();
+ let name = param.name.to_string();
let ty = trait_ref.substs.type_for_def(param);
let ty_str = ty.to_string();
flags.push((name.clone(),
TypeOutlives(ty::TypeOutlivesPredicate<'tcx>),
}
+pub type PolyDomainGoal<'tcx> = ty::Binder<DomainGoal<'tcx>>;
+
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub enum QuantifierKind {
Universal,
}
}
-impl<'tcx> From<DomainGoal<'tcx>> for Clause<'tcx> {
- fn from(domain_goal: DomainGoal<'tcx>) -> Self {
- Clause::DomainGoal(domain_goal)
+impl<'tcx> From<PolyDomainGoal<'tcx>> for Goal<'tcx> {
+ fn from(domain_goal: PolyDomainGoal<'tcx>) -> Self {
+ match domain_goal.no_late_bound_regions() {
+ Some(p) => p.into(),
+ None => Goal::Quantified(
+ QuantifierKind::Universal,
+ Box::new(domain_goal.map_bound(|p| p.into()))
+ ),
+ }
}
}
/// Harrop Formulas".
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub enum Clause<'tcx> {
- // FIXME: again, use interned refs instead of `Box`
- Implies(Vec<Goal<'tcx>>, DomainGoal<'tcx>),
- DomainGoal(DomainGoal<'tcx>),
- ForAll(Box<ty::Binder<Clause<'tcx>>>),
+ Implies(ProgramClause<'tcx>),
+ ForAll(ty::Binder<ProgramClause<'tcx>>),
+}
+
+/// A "program clause" has the form `D :- G1, ..., Gn`. It is saying
+/// that the domain goal `D` is true if `G1...Gn` are provable. This
+/// is equivalent to the implication `G1..Gn => D`; we usually write
+/// it with the reverse implication operator `:-` to emphasize the way
+/// that programs are actually solved (via backchaining, which starts
+/// with the goal to solve and proceeds from there).
+#[derive(Clone, PartialEq, Eq, Hash, Debug)]
+pub struct ProgramClause<'tcx> {
+ /// This goal will be considered true...
+ pub goal: DomainGoal<'tcx>,
+
+ /// ...if we can prove these hypotheses (there may be no hypotheses at all):
+ pub hypotheses: Vec<Goal<'tcx>>,
}
pub type Selection<'tcx> = Vtable<'tcx, PredicateObligation<'tcx>>;
for command in self.subcommands.iter().chain(Some(self)).rev() {
if let Some(ref condition) = command.condition {
if !attr::eval_condition(condition, &tcx.sess.parse_sess, &mut |c| {
- options.contains(&(c.name().as_str().to_string(),
+ options.contains(&(c.ident.name.as_str().to_string(),
match c.value_str().map(|s| s.as_str().to_string()) {
Some(s) => Some(s),
None => None
let trait_str = tcx.item_path_str(trait_ref.def_id);
let generics = tcx.generics_of(trait_ref.def_id);
let generic_map = generics.types.iter().map(|param| {
- (param.name.as_str().to_string(),
+ (param.name.to_string(),
trait_ref.substs.type_for_def(param).to_string())
}).collect::<FxHashMap<String, String>>();
/// When checking `foo`, we have to prove `T: Trait`. This basically
/// translates into this:
///
+ /// ```plain,ignore
/// (T: Trait + Sized →_\impl T: Trait), T: Trait ⊢ T: Trait
+ /// ```
///
/// When we try to prove it, we first go the first option, which
/// recurses. This shows us that the impl is "useless" - it won't
if self.can_use_global_caches(param_env) {
let mut cache = self.tcx().evaluation_cache.hashmap.borrow_mut();
if let Some(trait_ref) = self.tcx().lift_to_global(&trait_ref) {
+ debug!(
+ "insert_evaluation_cache(trait_ref={:?}, candidate={:?}) global",
+ trait_ref,
+ result,
+ );
cache.insert(trait_ref, WithDepNode::new(dep_node, result));
return;
}
}
+ debug!(
+ "insert_evaluation_cache(trait_ref={:?}, candidate={:?})",
+ trait_ref,
+ result,
+ );
self.infcx.evaluation_cache.hashmap
.borrow_mut()
.insert(trait_ref, WithDepNode::new(dep_node, result));
if self.intercrate_ambiguity_causes.is_some() {
debug!("evaluate_stack: intercrate_ambiguity_causes is some");
// Heuristics: show the diagnostics when there are no candidates in crate.
- let candidate_set = self.assemble_candidates(stack)?;
- if !candidate_set.ambiguous && candidate_set.vec.iter().all(|c| {
- !self.evaluate_candidate(stack, &c).may_apply()
- }) {
- let trait_ref = stack.obligation.predicate.skip_binder().trait_ref;
- let self_ty = trait_ref.self_ty();
- let trait_desc = trait_ref.to_string();
- let self_desc = if self_ty.has_concrete_skeleton() {
- Some(self_ty.to_string())
- } else {
- None
- };
- let cause = if let Conflict::Upstream = conflict {
- IntercrateAmbiguityCause::UpstreamCrateUpdate { trait_desc, self_desc }
- } else {
- IntercrateAmbiguityCause::DownstreamCrate { trait_desc, self_desc }
- };
- debug!("evaluate_stack: pushing cause = {:?}", cause);
- self.intercrate_ambiguity_causes.as_mut().unwrap().push(cause);
+ if let Ok(candidate_set) = self.assemble_candidates(stack) {
+ if !candidate_set.ambiguous && candidate_set.vec.iter().all(|c| {
+ !self.evaluate_candidate(stack, &c).may_apply()
+ }) {
+ let trait_ref = stack.obligation.predicate.skip_binder().trait_ref;
+ let self_ty = trait_ref.self_ty();
+ let trait_desc = trait_ref.to_string();
+ let self_desc = if self_ty.has_concrete_skeleton() {
+ Some(self_ty.to_string())
+ } else {
+ None
+ };
+ let cause = if let Conflict::Upstream = conflict {
+ IntercrateAmbiguityCause::UpstreamCrateUpdate {
+ trait_desc,
+ self_desc,
+ }
+ } else {
+ IntercrateAmbiguityCause::DownstreamCrate { trait_desc, self_desc }
+ };
+ debug!("evaluate_stack: pushing cause = {:?}", cause);
+ self.intercrate_ambiguity_causes.as_mut().unwrap().push(cause);
+ }
}
}
return Ok(None);
let mut cache = tcx.selection_cache.hashmap.borrow_mut();
if let Some(trait_ref) = tcx.lift_to_global(&trait_ref) {
if let Some(candidate) = tcx.lift_to_global(&candidate) {
+ debug!(
+ "insert_candidate_cache(trait_ref={:?}, candidate={:?}) global",
+ trait_ref,
+ candidate,
+ );
cache.insert(trait_ref, WithDepNode::new(dep_node, candidate));
return;
}
}
}
+ debug!(
+ "insert_candidate_cache(trait_ref={:?}, candidate={:?}) local",
+ trait_ref,
+ candidate,
+ );
self.infcx.selection_cache.hashmap
.borrow_mut()
.insert(trait_ref, WithDepNode::new(dep_node, candidate));
}
}
+impl<'tcx> fmt::Display for traits::ProgramClause<'tcx> {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let traits::ProgramClause { goal, hypotheses } = self;
+ write!(fmt, "{}", goal)?;
+ if !hypotheses.is_empty() {
+ write!(fmt, " :- ")?;
+ for (index, condition) in hypotheses.iter().enumerate() {
+ if index > 0 {
+ write!(fmt, ", ")?;
+ }
+ write!(fmt, "{}", condition)?;
+ }
+ }
+ write!(fmt, ".")
+ }
+}
+
impl<'tcx> fmt::Display for traits::Clause<'tcx> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
use traits::Clause::*;
match self {
- Implies(hypotheses, goal) => {
- write!(fmt, "{}", goal)?;
- if !hypotheses.is_empty() {
- write!(fmt, " :- ")?;
- for (index, condition) in hypotheses.iter().enumerate() {
- if index > 0 {
- write!(fmt, ", ")?;
- }
- write!(fmt, "{}", condition)?;
- }
- }
- write!(fmt, ".")
- }
- DomainGoal(domain_goal) => write!(fmt, "{}.", domain_goal),
+ Implies(clause) => write!(fmt, "{}", clause),
ForAll(clause) => {
// FIXME: appropriate binder names
write!(fmt, "forall<> {{ {} }}", clause.skip_binder())
}
}
+BraceStructTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for traits::ProgramClause<'tcx> {
+ goal,
+ hypotheses
+ }
+}
+
EnumTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for traits::Clause<'tcx> {
- (traits::Clause::Implies)(hypotheses, goal),
- (traits::Clause::DomainGoal)(domain_goal),
+ (traits::Clause::Implies)(clause),
(traits::Clause::ForAll)(clause),
}
}
use dep_graph::{DepNode, DepConstructor};
use errors::DiagnosticBuilder;
use session::Session;
-use session::config::{BorrowckMode, OutputFilenames};
+use session::config::{BorrowckMode, OutputFilenames, OptLevel};
+use session::config::CrateType::*;
use middle;
use hir::{TraitCandidate, HirId, ItemLocalId};
use hir::def::{Def, Export};
StableVec};
use arena::{TypedArena, DroplessArena};
use rustc_data_structures::indexed_vec::IndexVec;
-use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::sync::{Lrc, Lock};
use std::any::Any;
use std::borrow::Borrow;
use std::cell::{Cell, RefCell};
use std::sync::mpsc;
use std::sync::Arc;
use syntax::abi;
-use syntax::ast::{self, Name, NodeId};
+use syntax::ast::{self, NodeId};
use syntax::attr;
use syntax::codemap::MultiSpan;
use syntax::feature_gate;
-use syntax::symbol::{Symbol, keywords};
+use syntax::symbol::{Symbol, keywords, InternedString};
use syntax_pos::Span;
use hir;
/// Specifically use a speedy hash algorithm for these hash sets,
/// they're accessed quite often.
- type_: RefCell<FxHashSet<Interned<'tcx, TyS<'tcx>>>>,
- type_list: RefCell<FxHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
- substs: RefCell<FxHashSet<Interned<'tcx, Substs<'tcx>>>>,
- canonical_var_infos: RefCell<FxHashSet<Interned<'tcx, Slice<CanonicalVarInfo>>>>,
- region: RefCell<FxHashSet<Interned<'tcx, RegionKind>>>,
- existential_predicates: RefCell<FxHashSet<Interned<'tcx, Slice<ExistentialPredicate<'tcx>>>>>,
- predicates: RefCell<FxHashSet<Interned<'tcx, Slice<Predicate<'tcx>>>>>,
- const_: RefCell<FxHashSet<Interned<'tcx, Const<'tcx>>>>,
+ type_: Lock<FxHashSet<Interned<'tcx, TyS<'tcx>>>>,
+ type_list: Lock<FxHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
+ substs: Lock<FxHashSet<Interned<'tcx, Substs<'tcx>>>>,
+ canonical_var_infos: Lock<FxHashSet<Interned<'tcx, Slice<CanonicalVarInfo>>>>,
+ region: Lock<FxHashSet<Interned<'tcx, RegionKind>>>,
+ existential_predicates: Lock<FxHashSet<Interned<'tcx, Slice<ExistentialPredicate<'tcx>>>>>,
+ predicates: Lock<FxHashSet<Interned<'tcx, Slice<Predicate<'tcx>>>>>,
+ const_: Lock<FxHashSet<Interned<'tcx, Const<'tcx>>>>,
}
impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
fn new(arena: &'tcx DroplessArena) -> CtxtInterners<'tcx> {
CtxtInterners {
- arena,
- type_: RefCell::new(FxHashSet()),
- type_list: RefCell::new(FxHashSet()),
- substs: RefCell::new(FxHashSet()),
- region: RefCell::new(FxHashSet()),
- existential_predicates: RefCell::new(FxHashSet()),
- canonical_var_infos: RefCell::new(FxHashSet()),
- predicates: RefCell::new(FxHashSet()),
- const_: RefCell::new(FxHashSet()),
+ arena: arena,
+ type_: Lock::new(FxHashSet()),
+ type_list: Lock::new(FxHashSet()),
+ substs: Lock::new(FxHashSet()),
+ canonical_var_infos: Lock::new(FxHashSet()),
+ region: Lock::new(FxHashSet()),
+ existential_predicates: Lock::new(FxHashSet()),
+ predicates: Lock::new(FxHashSet()),
+ const_: Lock::new(FxHashSet()),
}
}
/// by `proc-macro` crates.
pub derive_macros: RefCell<NodeMap<Symbol>>,
- stability_interner: RefCell<FxHashSet<&'tcx attr::Stability>>,
+ stability_interner: Lock<FxHashSet<&'tcx attr::Stability>>,
pub interpret_interner: InterpretInterner<'tcx>,
- layout_interner: RefCell<FxHashSet<&'tcx LayoutDetails>>,
+ layout_interner: Lock<FxHashSet<&'tcx LayoutDetails>>,
/// A vector of every trait accessible in the whole crate
/// (i.e. including those from subcrates). This is used only for
/// This is intended to only get used during the trans phase of the compiler
/// when satisfying the query for a particular codegen unit. Internally in
/// the query it'll send data along this channel to get processed later.
- pub tx_to_llvm_workers: mpsc::Sender<Box<dyn Any + Send>>,
+ pub tx_to_llvm_workers: Lock<mpsc::Sender<Box<dyn Any + Send>>>,
output_filenames: Arc<OutputFilenames>,
}
/// Everything needed to efficiently work with interned allocations
#[derive(Debug, Default)]
pub struct InterpretInterner<'tcx> {
- inner: RefCell<InterpretInternerInner<'tcx>>,
+ inner: Lock<InterpretInternerInner<'tcx>>,
}
#[derive(Debug, Default)]
Lrc::new(StableVec::new(v)));
}
- tls::enter_global(GlobalCtxt {
+ let gcx = &GlobalCtxt {
sess: s,
cstore,
global_arenas: &arenas.global,
evaluation_cache: traits::EvaluationCache::new(),
crate_name: Symbol::intern(crate_name),
data_layout,
- layout_interner: RefCell::new(FxHashSet()),
+ layout_interner: Lock::new(FxHashSet()),
layout_depth: Cell::new(0),
derive_macros: RefCell::new(NodeMap()),
- stability_interner: RefCell::new(FxHashSet()),
+ stability_interner: Lock::new(FxHashSet()),
interpret_interner: Default::default(),
all_traits: RefCell::new(None),
- tx_to_llvm_workers: tx,
+ tx_to_llvm_workers: Lock::new(tx),
output_filenames: Arc::new(output_filenames.clone()),
- }, f)
+ };
+
+ tls::enter_global(gcx, f)
}
pub fn consider_optimizing<T: Fn() -> String>(&self, msg: T) -> bool {
self.sess.opts.debugging_opts.mir_emit_validate > 0 ||
self.use_mir()
}
+
+ #[inline]
+ pub fn share_generics(self) -> bool {
+ match self.sess.opts.debugging_opts.share_generics {
+ Some(setting) => setting,
+ None => {
+ self.sess.opts.incremental.is_some() ||
+ match self.sess.opts.optimize {
+ OptLevel::No |
+ OptLevel::Less |
+ OptLevel::Size |
+ OptLevel::SizeMin => true,
+ OptLevel::Default |
+ OptLevel::Aggressive => false,
+ }
+ }
+ }
+ }
+
+ #[inline]
+ pub fn local_crate_exports_generics(self) -> bool {
+ debug_assert!(self.share_generics());
+
+ self.sess.crate_types.borrow().iter().any(|crate_type| {
+ match crate_type {
+ CrateTypeExecutable |
+ CrateTypeStaticlib |
+ CrateTypeProcMacro |
+ CrateTypeCdylib => false,
+ CrateTypeRlib |
+ CrateTypeDylib => true,
+ }
+ })
+ }
}
impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
/// Call the closure with a local `TyCtxt` using the given arena.
- pub fn enter_local<F, R>(&self, arena: &'tcx DroplessArena, f: F) -> R
- where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
+ pub fn enter_local<F, R>(
+ &self,
+ arena: &'tcx DroplessArena,
+ f: F
+ ) -> R
+ where
+ F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
{
let interners = CtxtInterners::new(arena);
- tls::enter(self, &interners, f)
+ let tcx = TyCtxt {
+ gcx: self,
+ interners: &interners,
+ };
+ ty::tls::with_related_context(tcx.global_tcx(), |icx| {
+ let new_icx = ty::tls::ImplicitCtxt {
+ tcx,
+ query: icx.query.clone(),
+ };
+ ty::tls::enter_context(&new_icx, |new_icx| {
+ f(new_icx.tcx)
+ })
+ })
}
}
}
pub mod tls {
- use super::{CtxtInterners, GlobalCtxt, TyCtxt};
+ use super::{GlobalCtxt, TyCtxt};
use std::cell::Cell;
use std::fmt;
+ use std::mem;
use syntax_pos;
+ use ty::maps;
+ use errors::{Diagnostic, TRACK_DIAGNOSTICS};
+ use rustc_data_structures::OnDrop;
+ use rustc_data_structures::sync::Lrc;
- /// Marker types used for the scoped TLS slot.
- /// The type context cannot be used directly because the scoped TLS
- /// in libstd doesn't allow types generic over lifetimes.
- enum ThreadLocalGlobalCtxt {}
- enum ThreadLocalInterners {}
+ /// This is the implicit state of rustc. It contains the current
+ /// TyCtxt and query. It is updated when creating a local interner or
+ /// executing a new query. Whenever there's a TyCtxt value available
+ /// you should also have access to an ImplicitCtxt through the functions
+ /// in this module.
+ #[derive(Clone)]
+ pub struct ImplicitCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+ /// The current TyCtxt. Initially created by `enter_global` and updated
+ /// by `enter_local` with a new local interner
+ pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
- thread_local! {
- static TLS_TCX: Cell<Option<(*const ThreadLocalGlobalCtxt,
- *const ThreadLocalInterners)>> = Cell::new(None)
+ /// The current query job, if any. This is updated by start_job in
+ /// ty::maps::plumbing when executing a query
+ pub query: Option<Lrc<maps::QueryJob<'gcx>>>,
}
+ // A thread local value which stores a pointer to the current ImplicitCtxt
+ thread_local!(static TLV: Cell<usize> = Cell::new(0));
+
+ fn set_tlv<F: FnOnce() -> R, R>(value: usize, f: F) -> R {
+ let old = get_tlv();
+ let _reset = OnDrop(move || TLV.with(|tlv| tlv.set(old)));
+ TLV.with(|tlv| tlv.set(value));
+ f()
+ }
+
+ fn get_tlv() -> usize {
+ TLV.with(|tlv| tlv.get())
+ }
+
+ /// This is a callback from libsyntax as it cannot access the implicit state
+ /// in librustc otherwise
fn span_debug(span: syntax_pos::Span, f: &mut fmt::Formatter) -> fmt::Result {
with(|tcx| {
write!(f, "{}", tcx.sess.codemap().span_to_string(span))
})
}
- pub fn enter_global<'gcx, F, R>(gcx: GlobalCtxt<'gcx>, f: F) -> R
- where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
+ /// This is a callback from libsyntax as it cannot access the implicit state
+ /// in librustc otherwise. It is used to when diagnostic messages are
+ /// emitted and stores them in the current query, if there is one.
+ fn track_diagnostic(diagnostic: &Diagnostic) {
+ with_context(|context| {
+ if let Some(ref query) = context.query {
+ query.diagnostics.lock().push(diagnostic.clone());
+ }
+ })
+ }
+
+ /// Sets up the callbacks from libsyntax on the current thread
+ pub fn with_thread_locals<F, R>(f: F) -> R
+ where F: FnOnce() -> R
{
syntax_pos::SPAN_DEBUG.with(|span_dbg| {
let original_span_debug = span_dbg.get();
span_dbg.set(span_debug);
- let result = enter(&gcx, &gcx.global_interners, f);
- span_dbg.set(original_span_debug);
- result
+
+ let _on_drop = OnDrop(move || {
+ span_dbg.set(original_span_debug);
+ });
+
+ TRACK_DIAGNOSTICS.with(|current| {
+ let original = current.get();
+ current.set(track_diagnostic);
+
+ let _on_drop = OnDrop(move || {
+ current.set(original);
+ });
+
+ f()
+ })
})
}
- pub fn enter<'a, 'gcx: 'tcx, 'tcx, F, R>(gcx: &'a GlobalCtxt<'gcx>,
- interners: &'a CtxtInterners<'tcx>,
- f: F) -> R
- where F: FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
+ /// Sets `context` as the new current ImplicitCtxt for the duration of the function `f`
+ pub fn enter_context<'a, 'gcx: 'tcx, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'gcx, 'tcx>,
+ f: F) -> R
+ where F: FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
{
- let gcx_ptr = gcx as *const _ as *const ThreadLocalGlobalCtxt;
- let interners_ptr = interners as *const _ as *const ThreadLocalInterners;
- TLS_TCX.with(|tls| {
- let prev = tls.get();
- tls.set(Some((gcx_ptr, interners_ptr)));
- let ret = f(TyCtxt {
- gcx,
- interners,
- });
- tls.set(prev);
- ret
+ set_tlv(context as *const _ as usize, || {
+ f(&context)
})
}
- pub fn with<F, R>(f: F) -> R
- where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
+ /// Enters GlobalCtxt by setting up libsyntax callbacks and
+ /// creating a initial TyCtxt and ImplicitCtxt.
+ /// This happens once per rustc session and TyCtxts only exists
+ /// inside the `f` function.
+ pub fn enter_global<'gcx, F, R>(gcx: &GlobalCtxt<'gcx>, f: F) -> R
+ where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R
{
- TLS_TCX.with(|tcx| {
- let (gcx, interners) = tcx.get().unwrap();
- let gcx = unsafe { &*(gcx as *const GlobalCtxt) };
- let interners = unsafe { &*(interners as *const CtxtInterners) };
- f(TyCtxt {
+ with_thread_locals(|| {
+ let tcx = TyCtxt {
gcx,
- interners,
+ interners: &gcx.global_interners,
+ };
+ let icx = ImplicitCtxt {
+ tcx,
+ query: None,
+ };
+ enter_context(&icx, |_| {
+ f(tcx)
})
})
}
- pub fn with_opt<F, R>(f: F) -> R
- where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
+ /// Allows access to the current ImplicitCtxt in a closure if one is available
+ pub fn with_context_opt<F, R>(f: F) -> R
+ where F: for<'a, 'gcx, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'gcx, 'tcx>>) -> R
{
- if TLS_TCX.with(|tcx| tcx.get().is_some()) {
- with(|v| f(Some(v)))
- } else {
+ let context = get_tlv();
+ if context == 0 {
f(None)
+ } else {
+ unsafe { f(Some(&*(context as *const ImplicitCtxt))) }
}
}
+
+ /// Allows access to the current ImplicitCtxt.
+ /// Panics if there is no ImplicitCtxt available
+ pub fn with_context<F, R>(f: F) -> R
+ where F: for<'a, 'gcx, 'tcx> FnOnce(&ImplicitCtxt<'a, 'gcx, 'tcx>) -> R
+ {
+ with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls")))
+ }
+
+ /// Allows access to the current ImplicitCtxt whose tcx field has the same global
+ /// interner as the tcx argument passed in. This means the closure is given an ImplicitCtxt
+ /// with the same 'gcx lifetime as the TyCtxt passed in.
+ /// This will panic if you pass it a TyCtxt which has a different global interner from
+ /// the current ImplicitCtxt's tcx field.
+ pub fn with_related_context<'a, 'gcx, 'tcx1, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx1>, f: F) -> R
+ where F: for<'b, 'tcx2> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx2>) -> R
+ {
+ with_context(|context| {
+ unsafe {
+ let gcx = tcx.gcx as *const _ as usize;
+ assert!(context.tcx.gcx as *const _ as usize == gcx);
+ let context: &ImplicitCtxt = mem::transmute(context);
+ f(context)
+ }
+ })
+ }
+
+ /// Allows access to the current ImplicitCtxt whose tcx field has the same global
+ /// interner and local interner as the tcx argument passed in. This means the closure
+ /// is given an ImplicitCtxt with the same 'tcx and 'gcx lifetimes as the TyCtxt passed in.
+ /// This will panic if you pass it a TyCtxt which has a different global interner or
+ /// a different local interner from the current ImplicitCtxt's tcx field.
+ pub fn with_fully_related_context<'a, 'gcx, 'tcx, F, R>(tcx: TyCtxt<'a, 'gcx, 'tcx>, f: F) -> R
+ where F: for<'b> FnOnce(&ImplicitCtxt<'b, 'gcx, 'tcx>) -> R
+ {
+ with_context(|context| {
+ unsafe {
+ let gcx = tcx.gcx as *const _ as usize;
+ let interners = tcx.interners as *const _ as usize;
+ assert!(context.tcx.gcx as *const _ as usize == gcx);
+ assert!(context.tcx.interners as *const _ as usize == interners);
+ let context: &ImplicitCtxt = mem::transmute(context);
+ f(context)
+ }
+ })
+ }
+
+ /// Allows access to the TyCtxt in the current ImplicitCtxt.
+ /// Panics if there is no ImplicitCtxt available
+ pub fn with<F, R>(f: F) -> R
+ where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
+ {
+ with_context(|context| f(context.tcx))
+ }
+
+ /// Allows access to the TyCtxt in the current ImplicitCtxt.
+ /// The closure is passed None if there is no ImplicitCtxt available
+ pub fn with_opt<F, R>(f: F) -> R
+ where F: for<'a, 'gcx, 'tcx> FnOnce(Option<TyCtxt<'a, 'gcx, 'tcx>>) -> R
+ {
+ with_context_opt(|opt_context| f(opt_context.map(|context| context.tcx)))
+ }
}
macro_rules! sty_debug_print {
pub fn mk_param(self,
index: u32,
- name: Name) -> Ty<'tcx> {
+ name: InternedString) -> Ty<'tcx> {
self.mk_ty(TyParam(ParamTy { idx: index, name: name }))
}
pub fn mk_self_type(self) -> Ty<'tcx> {
- self.mk_param(0, keywords::SelfType.name())
+ self.mk_param(0, keywords::SelfType.name().as_str())
}
pub fn mk_param_from_def(self, def: &ty::TypeParameterDef) -> Ty<'tcx> {
}
}
+impl<'tcx> QueryDescription<'tcx> for queries::upstream_monomorphizations<'tcx> {
+ fn describe(_: TyCtxt, k: CrateNum) -> String {
+ format!("collecting available upstream monomorphizations `{:?}`", k)
+ }
+}
+
impl<'tcx> QueryDescription<'tcx> for queries::crate_inherent_impls<'tcx> {
fn describe(_: TyCtxt, k: CrateNum) -> String {
format!("all inherent impls defined in crate `{:?}`", k)
}
}
+impl<'tcx> QueryDescription<'tcx> for queries::extra_filename<'tcx> {
+ fn describe(_tcx: TyCtxt, _: CrateNum) -> String {
+ format!("looking up the extra filename for a crate")
+ }
+}
+
impl<'tcx> QueryDescription<'tcx> for queries::implementations_of_trait<'tcx> {
fn describe(_tcx: TyCtxt, _: (CrateNum, DefId)) -> String {
format!("looking up implementations of a trait in a crate")
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc_data_structures::sync::{Lock, Lrc};
+use syntax_pos::Span;
+use ty::tls;
+use ty::maps::Query;
+use ty::maps::plumbing::CycleError;
+use ty::context::TyCtxt;
+use errors::Diagnostic;
+
+/// Indicates the state of a query for a given key in a query map
+pub(super) enum QueryResult<'tcx, T> {
+ /// An already executing query. The query job can be used to await for its completion
+ Started(Lrc<QueryJob<'tcx>>),
+
+ /// The query is complete and produced `T`
+ Complete(T),
+
+ /// The query panicked. Queries trying to wait on this will raise a fatal error / silently panic
+ Poisoned,
+}
+
+/// A span and a query key
+#[derive(Clone, Debug)]
+pub struct QueryInfo<'tcx> {
+ pub span: Span,
+ pub query: Query<'tcx>,
+}
+
+/// A object representing an active query job.
+pub struct QueryJob<'tcx> {
+ pub info: QueryInfo<'tcx>,
+
+ /// The parent query job which created this job and is implicitly waiting on it.
+ pub parent: Option<Lrc<QueryJob<'tcx>>>,
+
+ /// Diagnostic messages which are emitted while the query executes
+ pub diagnostics: Lock<Vec<Diagnostic>>,
+}
+
+impl<'tcx> QueryJob<'tcx> {
+ /// Creates a new query job
+ pub fn new(info: QueryInfo<'tcx>, parent: Option<Lrc<QueryJob<'tcx>>>) -> Self {
+ QueryJob {
+ diagnostics: Lock::new(Vec::new()),
+ info,
+ parent,
+ }
+ }
+
+ /// Awaits for the query job to complete.
+ ///
+ /// For single threaded rustc there's no concurrent jobs running, so if we are waiting for any
+ /// query that means that there is a query cycle, thus this always running a cycle error.
+ pub(super) fn await<'lcx>(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, 'lcx>,
+ span: Span,
+ ) -> Result<(), CycleError<'tcx>> {
+ // Get the current executing query (waiter) and find the waitee amongst its parents
+ let mut current_job = tls::with_related_context(tcx, |icx| icx.query.clone());
+ let mut cycle = Vec::new();
+
+ while let Some(job) = current_job {
+ cycle.insert(0, job.info.clone());
+
+ if &*job as *const _ == self as *const _ {
+ break;
+ }
+
+ current_job = job.parent.clone();
+ }
+
+ Err(CycleError { span, cycle })
+ }
+
+ /// Signals to waiters that the query is complete.
+ ///
+ /// This does nothing for single threaded rustc,
+ /// as there are no concurrent jobs which could be waiting on us
+ pub fn signal_complete(&self) {}
+}
use hir::def::{Def, Export};
use hir::{self, TraitCandidate, ItemLocalId, TransFnAttrs};
use hir::svh::Svh;
-use infer::canonical::{Canonical, QueryResult};
+use infer::canonical::{self, Canonical};
use lint;
use middle::borrowck::BorrowCheckResult;
use middle::cstore::{ExternCrate, LinkagePreference, NativeLibrary,
use self::plumbing::*;
pub use self::plumbing::force_from_dep_node;
+mod job;
+pub use self::job::{QueryJob, QueryInfo};
+use self::job::QueryResult;
+
mod keys;
pub use self::keys::Key;
//
// Does not include external symbols that don't have a corresponding DefId,
// like the compiler-generated `main` function and so on.
- [] fn reachable_non_generics: ReachableNonGenerics(CrateNum) -> Lrc<DefIdSet>,
+ [] fn reachable_non_generics: ReachableNonGenerics(CrateNum)
+ -> Lrc<DefIdMap<SymbolExportLevel>>,
[] fn is_reachable_non_generic: IsReachableNonGeneric(DefId) -> bool,
+ [] fn is_unreachable_local_definition: IsUnreachableLocalDefinition(DefId) -> bool,
+ [] fn upstream_monomorphizations: UpstreamMonomorphizations(CrateNum)
+ -> Lrc<DefIdMap<Lrc<FxHashMap<&'tcx Substs<'tcx>, CrateNum>>>>,
+ [] fn upstream_monomorphizations_for: UpstreamMonomorphizationsFor(DefId)
+ -> Option<Lrc<FxHashMap<&'tcx Substs<'tcx>, CrateNum>>>,
[] fn native_libraries: NativeLibraries(CrateNum) -> Lrc<Vec<NativeLibrary>>,
[] fn crate_disambiguator: CrateDisambiguator(CrateNum) -> CrateDisambiguator,
[] fn crate_hash: CrateHash(CrateNum) -> Svh,
[] fn original_crate_name: OriginalCrateName(CrateNum) -> Symbol,
+ [] fn extra_filename: ExtraFileName(CrateNum) -> String,
[] fn implementations_of_trait: implementations_of_trait_node((CrateNum, DefId))
-> Lrc<Vec<DefId>>,
[] fn all_crate_nums: all_crate_nums_node(CrateNum) -> Lrc<Vec<CrateNum>>,
[] fn exported_symbols: ExportedSymbols(CrateNum)
- -> Arc<Vec<(ExportedSymbol, SymbolExportLevel)>>,
+ -> Arc<Vec<(ExportedSymbol<'tcx>, SymbolExportLevel)>>,
[] fn collect_and_partition_translation_items:
collect_and_partition_translation_items_node(CrateNum)
-> (Arc<DefIdSet>, Arc<Vec<Arc<CodegenUnit<'tcx>>>>),
- [] fn symbol_export_level: GetSymbolExportLevel(DefId) -> SymbolExportLevel,
[] fn is_translated_item: IsTranslatedItem(DefId) -> bool,
[] fn codegen_unit: CodegenUnit(InternedString) -> Arc<CodegenUnit<'tcx>>,
[] fn compile_codegen_unit: CompileCodegenUnit(InternedString) -> Stats,
[] fn normalize_projection_ty: NormalizeProjectionTy(
CanonicalProjectionGoal<'tcx>
) -> Result<
- Lrc<Canonical<'tcx, QueryResult<'tcx, NormalizationResult<'tcx>>>>,
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, NormalizationResult<'tcx>>>>,
NoSolution,
>,
[] fn dropck_outlives: DropckOutlives(
CanonicalTyGoal<'tcx>
) -> Result<
- Lrc<Canonical<'tcx, QueryResult<'tcx, DropckOutlivesResult<'tcx>>>>,
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, DropckOutlivesResult<'tcx>>>>,
NoSolution,
>,
use syntax_pos::{BytePos, Span, DUMMY_SP, FileMap};
use syntax_pos::hygiene::{Mark, SyntaxContext, ExpnInfo};
use ty;
+use ty::maps::job::QueryResult;
use ty::codec::{self as ty_codec, TyDecoder, TyEncoder};
use ty::context::TyCtxt;
for (key, entry) in const_eval::get_cache_internal(tcx).map.iter() {
use ty::maps::config::QueryDescription;
if const_eval::cache_on_disk(key.clone()) {
+ let entry = match *entry {
+ QueryResult::Complete(ref v) => v,
+ _ => panic!("incomplete query"),
+ };
if let Ok(ref value) = entry.value {
let dep_node = SerializedDepNodeIndex::new(entry.index.index());
{
for (key, entry) in Q::get_cache_internal(tcx).map.iter() {
if Q::cache_on_disk(key.clone()) {
+ let entry = match *entry {
+ QueryResult::Complete(ref v) => v,
+ _ => panic!("incomplete query"),
+ };
let dep_node = SerializedDepNodeIndex::new(entry.index.index());
// Record position of the cache entry
use dep_graph::{DepNodeIndex, DepNode, DepKind, DepNodeColor};
use errors::DiagnosticBuilder;
+use errors::Level;
+use ty::tls;
use ty::{TyCtxt};
-use ty::maps::Query; // NB: actually generated by the macros in this file
use ty::maps::config::QueryDescription;
+use ty::maps::job::{QueryResult, QueryInfo};
use ty::item_path;
use rustc_data_structures::fx::{FxHashMap};
-use std::cell::{Ref, RefMut};
+use rustc_data_structures::sync::LockGuard;
use std::marker::PhantomData;
-use std::mem;
use syntax_pos::Span;
pub(super) struct QueryMap<'tcx, D: QueryDescription<'tcx>> {
phantom: PhantomData<(D, &'tcx ())>,
- pub(super) map: FxHashMap<D::Key, QueryValue<D::Value>>,
+ pub(super) map: FxHashMap<D::Key, QueryResult<'tcx, QueryValue<D::Value>>>,
}
pub(super) struct QueryValue<T> {
pub(super) trait GetCacheInternal<'tcx>: QueryDescription<'tcx> + Sized {
fn get_cache_internal<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
- -> Ref<'a, QueryMap<'tcx, Self>>;
+ -> LockGuard<'a, QueryMap<'tcx, Self>>;
}
-pub(super) struct CycleError<'a, 'tcx: 'a> {
- span: Span,
- cycle: RefMut<'a, [(Span, Query<'tcx>)]>,
+#[derive(Clone)]
+pub(super) struct CycleError<'tcx> {
+ pub(super) span: Span,
+ pub(super) cycle: Vec<QueryInfo<'tcx>>,
+}
+
+/// The result of `try_get_lock`
+pub(super) enum TryGetLock<'a, 'tcx: 'a, T, D: QueryDescription<'tcx> + 'a> {
+ /// The query is not yet started. Contains a guard to the map eventually used to start it.
+ NotYetStarted(LockGuard<'a, QueryMap<'tcx, D>>),
+
+ /// The query was already completed.
+ /// Returns the result of the query and its dep node index
+ /// if it succeeded or a cycle error if it failed
+ JobCompleted(Result<(T, DepNodeIndex), CycleError<'tcx>>),
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
- pub(super) fn report_cycle(self, CycleError { span, cycle }: CycleError)
+ pub(super) fn report_cycle(self, CycleError { span, cycle: stack }: CycleError)
-> DiagnosticBuilder<'a>
{
- // Subtle: release the refcell lock before invoking `describe()`
- // below by dropping `cycle`.
- let stack = cycle.to_vec();
- mem::drop(cycle);
-
assert!(!stack.is_empty());
// Disable naming impls with types in this path, since that
"cyclic dependency detected");
err.span_label(span, "cyclic reference");
- err.span_note(self.sess.codemap().def_span(stack[0].0),
- &format!("the cycle begins when {}...", stack[0].1.describe(self)));
+ err.span_note(self.sess.codemap().def_span(stack[0].span),
+ &format!("the cycle begins when {}...", stack[0].query.describe(self)));
- for &(span, ref query) in &stack[1..] {
+ for &QueryInfo { span, ref query, .. } in &stack[1..] {
err.span_note(self.sess.codemap().def_span(span),
&format!("...which then requires {}...", query.describe(self)));
}
err.note(&format!("...which then again requires {}, completing the cycle.",
- stack[0].1.describe(self)));
+ stack[0].query.describe(self)));
return err
})
}
- pub(super) fn cycle_check<F, R>(self, span: Span, query: Query<'gcx>, compute: F)
- -> Result<R, CycleError<'a, 'gcx>>
- where F: FnOnce() -> R
- {
- {
- let mut stack = self.maps.query_stack.borrow_mut();
- if let Some((i, _)) = stack.iter().enumerate().rev()
- .find(|&(_, &(_, ref q))| *q == query) {
- return Err(CycleError {
- span,
- cycle: RefMut::map(stack, |stack| &mut stack[i..])
- });
+ pub fn try_print_query_stack() {
+ eprintln!("query stack during panic:");
+
+ tls::with_context_opt(|icx| {
+ if let Some(icx) = icx {
+ let mut current_query = icx.query.clone();
+ let mut i = 0;
+
+ while let Some(query) = current_query {
+ let mut db = DiagnosticBuilder::new(icx.tcx.sess.diagnostic(),
+ Level::FailureNote,
+ &format!("#{} [{}] {}",
+ i,
+ query.info.query.name(),
+ query.info.query.describe(icx.tcx)));
+ db.set_span(icx.tcx.sess.codemap().def_span(query.info.span));
+ icx.tcx.sess.diagnostic().force_print_db(db);
+
+ current_query = query.parent.clone();
+ i += 1;
+ }
}
- stack.push((span, query));
- }
-
- let result = compute();
-
- self.maps.query_stack.borrow_mut().pop();
+ });
- Ok(result)
+ eprintln!("end of query stack");
}
/// Try to read a node index for the node dep_node.
[$($modifiers:tt)*] fn $name:ident: $node:ident($K:ty) -> $V:ty,)*) => {
use dep_graph::DepNodeIndex;
- use std::cell::RefCell;
+ use std::mem;
+ use errors::Diagnostic;
+ use errors::FatalError;
+ use rustc_data_structures::sync::{Lock, LockGuard};
+ use rustc_data_structures::OnDrop;
define_map_struct! {
tcx: $tcx,
-> Self {
Maps {
providers,
- query_stack: RefCell::new(vec![]),
- $($name: RefCell::new(QueryMap::new())),*
+ $($name: Lock::new(QueryMap::new())),*
}
}
}
}
impl<$tcx> Query<$tcx> {
+ pub fn name(&self) -> &'static str {
+ match *self {
+ $(Query::$name(_) => stringify!($name),)*
+ }
+ }
+
pub fn describe(&self, tcx: TyCtxt) -> String {
let (r, name) = match *self {
$(Query::$name(key) => {
impl<$tcx> GetCacheInternal<$tcx> for queries::$name<$tcx> {
fn get_cache_internal<'a>(tcx: TyCtxt<'a, $tcx, $tcx>)
- -> ::std::cell::Ref<'a, QueryMap<$tcx, Self>> {
+ -> LockGuard<'a, QueryMap<$tcx, Self>> {
tcx.maps.$name.borrow()
}
}
DepNode::new(tcx, $node(*key))
}
+ /// Either get the lock of the query map, allowing us to
+ /// start executing the query, or it returns with the result of the query.
+ /// If the query already executed and panicked, this will fatal error / silently panic
+ fn try_get_lock(
+ tcx: TyCtxt<'a, $tcx, 'lcx>,
+ mut span: Span,
+ key: &$K
+ ) -> TryGetLock<'a, $tcx, $V, Self>
+ {
+ loop {
+ let lock = tcx.maps.$name.borrow_mut();
+ let job = if let Some(value) = lock.map.get(key) {
+ match *value {
+ QueryResult::Started(ref job) => Some(job.clone()),
+ QueryResult::Complete(ref value) => {
+ profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
+ let result = Ok(((&value.value).clone(), value.index));
+ return TryGetLock::JobCompleted(result);
+ },
+ QueryResult::Poisoned => FatalError.raise(),
+ }
+ } else {
+ None
+ };
+ let job = if let Some(job) = job {
+ job
+ } else {
+ return TryGetLock::NotYetStarted(lock);
+ };
+ mem::drop(lock);
+
+ // This just matches the behavior of `try_get_with` so the span when
+ // we await matches the span we would use when executing.
+ // See the FIXME there.
+ if span == DUMMY_SP && stringify!($name) != "def_span" {
+ span = key.default_span(tcx);
+ }
+
+ if let Err(cycle) = job.await(tcx, span) {
+ return TryGetLock::JobCompleted(Err(cycle));
+ }
+ }
+ }
+
fn try_get_with(tcx: TyCtxt<'a, $tcx, 'lcx>,
mut span: Span,
key: $K)
- -> Result<$V, CycleError<'a, $tcx>>
+ -> Result<$V, CycleError<$tcx>>
{
debug!("ty::queries::{}::try_get_with(key={:?}, span={:?})",
stringify!($name),
)
);
- if let Some(value) = tcx.maps.$name.borrow().map.get(&key) {
- profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
- tcx.dep_graph.read_index(value.index);
- return Ok((&value.value).clone());
+ /// Get the lock used to start the query or
+ /// return the result of the completed query
+ macro_rules! get_lock_or_return {
+ () => {{
+ match Self::try_get_lock(tcx, span, &key) {
+ TryGetLock::NotYetStarted(lock) => lock,
+ TryGetLock::JobCompleted(result) => {
+ return result.map(|(v, index)| {
+ tcx.dep_graph.read_index(index);
+ v
+ })
+ }
+ }
+ }}
}
+ let mut lock = get_lock_or_return!();
+
// FIXME(eddyb) Get more valid Span's on queries.
// def_span guard is necessary to prevent a recursive loop,
// default_span calls def_span query internally.
if span == DUMMY_SP && stringify!($name) != "def_span" {
- span = key.default_span(tcx)
+ // This might deadlock if we hold the map lock since we might be
+ // waiting for the def_span query and switch to some other fiber
+ // So we drop the lock here and reacquire it
+ mem::drop(lock);
+ span = key.default_span(tcx);
+ lock = get_lock_or_return!();
}
// Fast path for when incr. comp. is off. `to_dep_node` is
// expensive for some DepKinds.
if !tcx.dep_graph.is_fully_enabled() {
let null_dep_node = DepNode::new_no_params(::dep_graph::DepKind::Null);
- return Self::force(tcx, key, span, null_dep_node)
+ return Self::force_with_lock(tcx, key, span, lock, null_dep_node)
.map(|(v, _)| v);
}
if dep_node.kind.is_anon() {
profq_msg!(tcx, ProfileQueriesMsg::ProviderBegin);
- let res = tcx.cycle_check(span, Query::$name(key), || {
- tcx.sess.diagnostic().track_diagnostics(|| {
- tcx.dep_graph.with_anon_task(dep_node.kind, || {
- Self::compute_result(tcx.global_tcx(), key)
- })
+ let res = Self::start_job(tcx, span, key, lock, |tcx| {
+ tcx.dep_graph.with_anon_task(dep_node.kind, || {
+ Self::compute_result(tcx.global_tcx(), key)
})
})?;
profq_msg!(tcx, ProfileQueriesMsg::ProviderEnd);
- let ((result, dep_node_index), diagnostics) = res;
+ let (((result, dep_node_index), diagnostics), job) = res;
tcx.dep_graph.read_index(dep_node_index);
tcx.on_disk_query_result_cache
.store_diagnostics_for_anon_node(dep_node_index, diagnostics);
- let value = QueryValue::new(result, dep_node_index);
+ let value = QueryValue::new(Clone::clone(&result), dep_node_index);
- return Ok((&tcx.maps
- .$name
- .borrow_mut()
- .map
- .entry(key)
- .or_insert(value)
- .value).clone());
+ tcx.maps
+ .$name
+ .borrow_mut()
+ .map
+ .insert(key, QueryResult::Complete(value));
+
+ job.signal_complete();
+
+ return Ok(result);
}
if !dep_node.kind.is_input() {
+ // try_mark_green_and_read may force queries. So we must drop our lock here
+ mem::drop(lock);
if let Some(dep_node_index) = tcx.try_mark_green_and_read(&dep_node) {
profq_msg!(tcx, ProfileQueriesMsg::CacheHit);
return Self::load_from_disk_and_cache_in_memory(tcx,
dep_node_index,
&dep_node)
}
+ lock = get_lock_or_return!();
}
- match Self::force(tcx, key, span, dep_node) {
+ match Self::force_with_lock(tcx, key, span, lock, dep_node) {
Ok((result, dep_node_index)) => {
tcx.dep_graph.read_index(dep_node_index);
Ok(result)
}
}
+ /// Creates a job for the query and updates the query map indicating that it started.
+ /// Then it changes ImplicitCtxt to point to the new query job while it executes.
+ /// If the query panics, this updates the query map to indicate so.
+ fn start_job<F, R>(tcx: TyCtxt<'_, $tcx, 'lcx>,
+ span: Span,
+ key: $K,
+ mut map: LockGuard<'_, QueryMap<$tcx, Self>>,
+ compute: F)
+ -> Result<((R, Vec<Diagnostic>), Lrc<QueryJob<$tcx>>), CycleError<$tcx>>
+ where F: for<'b> FnOnce(TyCtxt<'b, $tcx, 'lcx>) -> R
+ {
+ let query = Query::$name(Clone::clone(&key));
+
+ let entry = QueryInfo {
+ span,
+ query,
+ };
+
+ // The TyCtxt stored in TLS has the same global interner lifetime
+ // as `tcx`, so we use `with_related_context` to relate the 'gcx lifetimes
+ // when accessing the ImplicitCtxt
+ let (r, job) = ty::tls::with_related_context(tcx, move |icx| {
+ let job = Lrc::new(QueryJob::new(entry, icx.query.clone()));
+
+ // Store the job in the query map and drop the lock to allow
+ // others to wait it
+ map.map.entry(key).or_insert(QueryResult::Started(job.clone()));
+ mem::drop(map);
+
+ let r = {
+ let on_drop = OnDrop(|| {
+ // Poison the query so jobs waiting on it panic
+ tcx.maps
+ .$name
+ .borrow_mut()
+ .map
+ .insert(key, QueryResult::Poisoned);
+ // Also signal the completion of the job, so waiters
+ // will continue execution
+ job.signal_complete();
+ });
+
+ // Update the ImplicitCtxt to point to our new query job
+ let icx = ty::tls::ImplicitCtxt {
+ tcx,
+ query: Some(job.clone()),
+ };
+
+ // Use the ImplicitCtxt while we execute the query
+ let r = ty::tls::enter_context(&icx, |icx| {
+ compute(icx.tcx)
+ });
+
+ mem::forget(on_drop);
+
+ r
+ };
+
+ (r, job)
+ });
+
+ // Extract the diagnostic from the job
+ let diagnostics: Vec<_> = mem::replace(&mut *job.diagnostics.lock(), Vec::new());
+
+ Ok(((r, diagnostics), job))
+ }
+
fn compute_result(tcx: TyCtxt<'a, $tcx, 'lcx>, key: $K) -> $V {
let provider = tcx.maps.providers[key.map_crate()].$name;
provider(tcx.global_tcx(), key)
span: Span,
dep_node_index: DepNodeIndex,
dep_node: &DepNode)
- -> Result<$V, CycleError<'a, $tcx>>
+ -> Result<$V, CycleError<$tcx>>
{
+ // Note this function can be called concurrently from the same query
+ // We must ensure that this is handled correctly
+
debug_assert!(tcx.dep_graph.is_green(dep_node));
// First we try to load the result from the on-disk cache
None
};
- let result = if let Some(result) = result {
- result
+ let (result, job) = if let Some(result) = result {
+ (result, None)
} else {
// We could not load a result from the on-disk cache, so
// recompute.
- let (result, _ ) = tcx.cycle_check(span, Query::$name(key), || {
- // The diagnostics for this query have already been
- // promoted to the current session during
- // try_mark_green(), so we can ignore them here.
- tcx.sess.diagnostic().track_diagnostics(|| {
- // The dep-graph for this computation is already in
- // place
- tcx.dep_graph.with_ignore(|| {
- Self::compute_result(tcx, key)
- })
+
+ // The diagnostics for this query have already been
+ // promoted to the current session during
+ // try_mark_green(), so we can ignore them here.
+ let ((result, _), job) = Self::start_job(tcx,
+ span,
+ key,
+ tcx.maps.$name.borrow_mut(),
+ |tcx| {
+ // The dep-graph for this computation is already in
+ // place
+ tcx.dep_graph.with_ignore(|| {
+ Self::compute_result(tcx, key)
})
})?;
- result
+ (result, Some(job))
};
// If -Zincremental-verify-ich is specified, re-hash results from
tcx.dep_graph.mark_loaded_from_cache(dep_node_index, true);
}
- let value = QueryValue::new(result, dep_node_index);
+ let value = QueryValue::new(Clone::clone(&result), dep_node_index);
+
+ tcx.maps
+ .$name
+ .borrow_mut()
+ .map
+ .insert(key, QueryResult::Complete(value));
- Ok((&tcx.maps
- .$name
- .borrow_mut()
- .map
- .entry(key)
- .or_insert(value)
- .value).clone())
+ job.map(|j| j.signal_complete());
+
+ Ok(result)
}
+ #[allow(dead_code)]
fn force(tcx: TyCtxt<'a, $tcx, 'lcx>,
key: $K,
span: Span,
dep_node: DepNode)
- -> Result<($V, DepNodeIndex), CycleError<'a, $tcx>> {
- debug_assert!(!tcx.dep_graph.dep_node_exists(&dep_node));
+ -> Result<($V, DepNodeIndex), CycleError<$tcx>> {
+ // We may be concurrently trying both execute and force a query
+ // Ensure that only one of them runs the query
+ let lock = match Self::try_get_lock(tcx, span, &key) {
+ TryGetLock::NotYetStarted(lock) => lock,
+ TryGetLock::JobCompleted(result) => return result,
+ };
+ Self::force_with_lock(tcx,
+ key,
+ span,
+ lock,
+ dep_node)
+ }
+
+ fn force_with_lock(tcx: TyCtxt<'a, $tcx, 'lcx>,
+ key: $K,
+ span: Span,
+ map: LockGuard<'_, QueryMap<$tcx, Self>>,
+ dep_node: DepNode)
+ -> Result<($V, DepNodeIndex), CycleError<$tcx>> {
+ // If the following assertion triggers, it can have two reasons:
+ // 1. Something is wrong with DepNode creation, either here or
+ // in DepGraph::try_mark_green()
+ // 2. Two distinct query keys get mapped to the same DepNode
+ // (see for example #48923)
+ assert!(!tcx.dep_graph.dep_node_exists(&dep_node),
+ "Forcing query with already existing DepNode.\n\
+ - query-key: {:?}\n\
+ - dep-node: {:?}",
+ key, dep_node);
profq_msg!(tcx, ProfileQueriesMsg::ProviderBegin);
- let res = tcx.cycle_check(span, Query::$name(key), || {
- tcx.sess.diagnostic().track_diagnostics(|| {
- if dep_node.kind.is_eval_always() {
- tcx.dep_graph.with_eval_always_task(dep_node,
- tcx,
- key,
- Self::compute_result)
- } else {
- tcx.dep_graph.with_task(dep_node,
- tcx,
- key,
- Self::compute_result)
- }
- })
+ let res = Self::start_job(tcx,
+ span,
+ key,
+ map,
+ |tcx| {
+ if dep_node.kind.is_eval_always() {
+ tcx.dep_graph.with_eval_always_task(dep_node,
+ tcx,
+ key,
+ Self::compute_result)
+ } else {
+ tcx.dep_graph.with_task(dep_node,
+ tcx,
+ key,
+ Self::compute_result)
+ }
})?;
profq_msg!(tcx, ProfileQueriesMsg::ProviderEnd);
- let ((result, dep_node_index), diagnostics) = res;
+ let (((result, dep_node_index), diagnostics), job) = res;
if tcx.sess.opts.debugging_opts.query_dep_graph {
tcx.dep_graph.mark_loaded_from_cache(dep_node_index, false);
.store_diagnostics(dep_node_index, diagnostics);
}
- let value = QueryValue::new(result, dep_node_index);
+ let value = QueryValue::new(Clone::clone(&result), dep_node_index);
+
+ tcx.maps
+ .$name
+ .borrow_mut()
+ .map
+ .insert(key, QueryResult::Complete(value));
- Ok(((&tcx.maps
- .$name
- .borrow_mut()
- .map
- .entry(key)
- .or_insert(value)
- .value).clone(),
- dep_node_index))
+ let job: Lrc<QueryJob> = job;
+
+ job.signal_complete();
+
+ Ok((result, dep_node_index))
}
pub fn try_get(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K)
input: ($(([$($modifiers:tt)*] [$($attr:tt)*] [$name:ident]))*)) => {
pub struct Maps<$tcx> {
providers: IndexVec<CrateNum, Providers<$tcx>>,
- query_stack: RefCell<Vec<(Span, Query<$tcx>)>>,
- $($(#[$attr])* $name: RefCell<QueryMap<$tcx, queries::$name<$tcx>>>,)*
+ $($(#[$attr])* $name: Lock<QueryMap<$tcx, queries::$name<$tcx>>>,)*
}
};
}
DepKind::ImplParent => { force!(impl_parent, def_id!()); }
DepKind::TraitOfItem => { force!(trait_of_item, def_id!()); }
DepKind::IsReachableNonGeneric => { force!(is_reachable_non_generic, def_id!()); }
+ DepKind::IsUnreachableLocalDefinition => {
+ force!(is_unreachable_local_definition, def_id!());
+ }
DepKind::IsMirAvailable => { force!(is_mir_available, def_id!()); }
DepKind::ItemAttrs => { force!(item_attrs, def_id!()); }
DepKind::TransFnAttrs => { force!(trans_fn_attrs, def_id!()); }
DepKind::CrateDisambiguator => { force!(crate_disambiguator, krate!()); }
DepKind::CrateHash => { force!(crate_hash, krate!()); }
DepKind::OriginalCrateName => { force!(original_crate_name, krate!()); }
+ DepKind::ExtraFileName => { force!(extra_filename, krate!()); }
DepKind::AllTraitImplementations => {
force!(all_trait_implementations, krate!());
DepKind::TargetFeaturesWhitelist => { force!(target_features_whitelist, LOCAL_CRATE); }
- DepKind::GetSymbolExportLevel => { force!(symbol_export_level, def_id!()); }
DepKind::Features => { force!(features_query, LOCAL_CRATE); }
DepKind::ProgramClausesFor => { force!(program_clauses_for, def_id!()); }
DepKind::WasmCustomSections => { force!(wasm_custom_sections, krate!()); }
DepKind::WasmImportModuleMap => { force!(wasm_import_module_map, krate!()); }
DepKind::ForeignModules => { force!(foreign_modules, krate!()); }
+
+ DepKind::UpstreamMonomorphizations => {
+ force!(upstream_monomorphizations, krate!());
+ }
+ DepKind::UpstreamMonomorphizationsFor => {
+ force!(upstream_monomorphizations_for, def_id!());
+ }
}
true
#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
pub struct TypeParameterDef {
- pub name: Name,
+ pub name: InternedString,
pub def_id: DefId,
pub index: u32,
pub has_default: bool,
return Some(index);
}
let mut ident = name.to_ident();
- while ident.ctxt != SyntaxContext::empty() {
- ident.ctxt.remove_mark();
+ while ident.span.ctxt() != SyntaxContext::empty() {
+ ident.span.remove_mark();
if let Some(field) = self.fields.iter().position(|f| f.name.to_ident() == ident) {
return Some(field);
}
LOCAL_CRATE => self.hir.definitions().expansion(scope.index),
_ => Mark::root(),
};
- let scope = match ident.ctxt.adjust(expansion) {
+ let scope = match ident.span.adjust(expansion) {
Some(macro_def) => self.hir.definitions().macro_def_scope(macro_def),
None if block == DUMMY_NODE_ID => DefId::local(CRATE_DEF_INDEX), // Dummy DefId
None => self.hir.get_module_parent(block),
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use std::cell::{Ref, RefCell};
+use rustc_data_structures::sync::{RwLock, ReadGuard};
use std::mem;
/// The `Steal` struct is intended to used as the value for a query.
///
/// FIXME(#41710) -- what is the best way to model linear queries?
pub struct Steal<T> {
- value: RefCell<Option<T>>
+ value: RwLock<Option<T>>
}
impl<T> Steal<T> {
pub fn new(value: T) -> Self {
Steal {
- value: RefCell::new(Some(value))
+ value: RwLock::new(Some(value))
}
}
- pub fn borrow(&self) -> Ref<T> {
- Ref::map(self.value.borrow(), |opt| match *opt {
+ pub fn borrow(&self) -> ReadGuard<T> {
+ ReadGuard::map(self.value.borrow(), |opt| match *opt {
None => bug!("attempted to read from stolen value"),
Some(ref v) => v
})
}
pub fn steal(&self) -> T {
- let value_ref = &mut *self.value.borrow_mut();
+ let value_ref = &mut *self.value.try_write().expect("stealing value which is locked");
let value = mem::replace(value_ref, None);
value.expect("attempt to read from stolen value")
}
use std::cmp::Ordering;
use syntax::abi;
use syntax::ast::{self, Name};
-use syntax::symbol::keywords;
+use syntax::symbol::{keywords, InternedString};
use serialize;
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct ParamTy {
pub idx: u32,
- pub name: Name,
+ pub name: InternedString,
}
impl<'a, 'gcx, 'tcx> ParamTy {
- pub fn new(index: u32, name: Name) -> ParamTy {
+ pub fn new(index: u32, name: InternedString) -> ParamTy {
ParamTy { idx: index, name: name }
}
pub fn for_self() -> ParamTy {
- ParamTy::new(0, keywords::SelfType.name())
+ ParamTy::new(0, keywords::SelfType.name().as_str())
}
pub fn for_def(def: &ty::TypeParameterDef) -> ParamTy {
}
pub fn is_self(&self) -> bool {
- if self.name == keywords::SelfType.name() {
+ if self.name == keywords::SelfType.name().as_str() {
assert_eq!(self.idx, 0);
true
} else {
}
TyParam(p) => {
self.hash(p.idx);
- self.hash(p.name.as_str());
+ self.hash(p.name);
}
TyProjection(ref data) => {
self.def_id(data.item_def_id);
use std::hash::{Hash, BuildHasher};
use std::iter::repeat;
use std::panic;
+use std::env;
use std::path::Path;
use std::time::{Duration, Instant};
use std::sync::mpsc::{Sender};
use syntax_pos::{SpanData};
use ty::maps::{QueryMsg};
+use ty::TyCtxt;
use dep_graph::{DepNode};
use proc_macro;
use lazy_static;
fn panic_hook(info: &panic::PanicInfo) {
if !proc_macro::__internal::in_sess() {
- (*DEFAULT_HOOK)(info)
+ (*DEFAULT_HOOK)(info);
+
+ let backtrace = env::var_os("RUST_BACKTRACE").map(|x| &x != "0").unwrap_or(false);
+
+ if backtrace {
+ TyCtxt::try_print_query_stack();
+ }
}
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![deny(warnings)]
-
#![feature(rustc_private)]
extern crate rustc;
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![forbid(unsafe_code)]
-#![cfg_attr(stage0, feature(slice_patterns))]
-#![cfg_attr(stage0, feature(i128_type))]
-#![cfg_attr(stage0, feature(try_from))]
-
// See librustc_cratesio_shim/Cargo.toml for a comment explaining this.
#[allow(unused_extern_crates)]
extern crate rustc_cratesio_shim;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![cfg_attr(stage0, feature(i128_type))]
-
#[macro_use]
extern crate rustc_apfloat;
alloc = { path = "../liballoc" }
alloc_system = { path = "../liballoc_system" }
core = { path = "../libcore" }
+compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(box_syntax)]
#![feature(const_fn)]
exe_allocation_crate: super::maybe_jemalloc(),
has_elf_tls: version >= (10, 7),
abi_return_struct_as_int: true,
+ emit_debug_gdb_scripts: false,
.. Default::default()
}
}
/// Whether or not bitcode is embedded in object files
pub embed_bitcode: bool,
+
+ /// Whether a .debug_gdb_scripts section will be added to the output object file
+ pub emit_debug_gdb_scripts: bool,
}
impl Default for TargetOptions {
codegen_backend: "llvm".to_string(),
default_hidden_visibility: false,
embed_bitcode: false,
+ emit_debug_gdb_scripts: true,
}
}
}
key!(codegen_backend);
key!(default_hidden_visibility, bool);
key!(embed_bitcode, bool);
+ key!(emit_debug_gdb_scripts, bool);
if let Some(array) = obj.find("abi-blacklist").and_then(Json::as_array) {
for name in array.iter().filter_map(|abi| abi.as_string()) {
target_option_val!(codegen_backend);
target_option_val!(default_hidden_visibility);
target_option_val!(embed_bitcode);
+ target_option_val!(emit_debug_gdb_scripts);
if default.abi_blacklist != self.options.abi_blacklist {
d.insert("abi-blacklist".to_string(), self.options.abi_blacklist.iter()
// too much overhead for such small target.
trap_unreachable: false,
+ // See the thumb_base.rs file for an explanation of this value
+ emit_debug_gdb_scripts: false,
+
.. Default::default( )
}
})
// costs it involves.
relocation_model: "static".to_string(),
abi_blacklist: super::arm_base::abi_blacklist(),
+ // When this section is added a volatile load to its start address is also generated. This
+ // volatile load is a footgun as it can end up loading an invalid memory address, depending
+ // on how the user set up their linker scripts. This section adds pretty printer for stuff
+ // like std::Vec, which is not that used in no-std context, so it's best to left it out
+ // until we figure a way to add the pretty printers without requiring a volatile load cf.
+ // rust-lang/rust#44993.
+ emit_debug_gdb_scripts: false,
.. Default::default()
}
}
],
custom_unwind_resume: true,
abi_return_struct_as_int: true,
+ emit_debug_gdb_scripts: false,
.. Default::default()
}
crt_static_allows_dylibs: true,
crt_static_respected: true,
abi_return_struct_as_int: true,
+ emit_debug_gdb_scripts: false,
.. Default::default()
}
// Note that `mir_validated` is a "stealable" result; the
// thief, `optimized_mir()`, forces borrowck, so we know that
// is not yet stolen.
- tcx.mir_validated(owner_def_id).borrow();
+ ty::maps::queries::mir_validated::ensure(tcx, owner_def_id);
// option dance because you can't capture an uninitialized variable
// by mut-ref.
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![allow(non_camel_case_types)]
#![feature(from_ref)]
-#![cfg_attr(stage0, feature(match_default_bindings))]
#![feature(quote)]
#[macro_use] extern crate log;
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
-
-#![cfg_attr(stage0, feature(i128_type, i128))]
extern crate rustc_apfloat;
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://www.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(collections_range)]
#![feature(nonzero)]
#![feature(unboxed_closures)]
#![feature(fn_traits)]
#![feature(unsize)]
-#![cfg_attr(stage0, feature(conservative_impl_trait))]
-#![cfg_attr(stage0, feature(i128_type, i128))]
#![feature(specialization)]
#![feature(optin_builtin_traits)]
-#![cfg_attr(stage0, feature(underscore_lifetimes))]
#![feature(macro_vis_matcher)]
#![feature(allow_internal_unstable)]
-#![cfg_attr(stage0, feature(universal_impl_trait))]
#![cfg_attr(unix, feature(libc))]
#![cfg_attr(test, feature(test))]
f(&*self.read())
}
+ #[cfg(not(parallel_queries))]
+ #[inline(always)]
+ pub fn try_write(&self) -> Result<WriteGuard<T>, ()> {
+ self.0.try_borrow_mut().map_err(|_| ())
+ }
+
+ #[cfg(parallel_queries)]
+ #[inline(always)]
+ pub fn try_write(&self) -> Result<WriteGuard<T>, ()> {
+ self.0.try_write().ok_or(())
+ }
+
#[cfg(not(parallel_queries))]
#[inline(always)]
pub fn write(&self) -> WriteGuard<T> {
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(box_syntax)]
#![cfg_attr(unix, feature(libc))]
let mut cfgs = Vec::new();
for &(name, ref value) in sess.parse_sess.config.iter() {
let gated_cfg = GatedCfg::gate(&ast::MetaItem {
- name,
+ ident: ast::Ident::with_empty_ctxt(name),
node: ast::MetaItemKind::Word,
span: DUMMY_SP,
});
impl<'a> pprust::PpAnn for HygieneAnnotation<'a> {
fn post(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> {
match node {
- pprust::NodeIdent(&ast::Ident { name, ctxt }) => {
+ pprust::NodeIdent(&ast::Ident { name, span }) => {
s.s.space()?;
// FIXME #16420: this doesn't display the connections
// between syntax contexts
- s.synth_comment(format!("{}{:?}", name.as_u32(), ctxt))
+ s.synth_comment(format!("{}{:?}", name.as_u32(), span.ctxt()))
}
pprust::NodeName(&name) => {
s.s.space()?;
pub fn t_param(&self, index: u32) -> Ty<'tcx> {
let name = format!("T{}", index);
- self.infcx.tcx.mk_param(index, Symbol::intern(&name))
+ self.infcx.tcx.mk_param(index, Symbol::intern(&name).as_str())
}
pub fn re_early_bound(&self, index: u32, name: &'static str) -> ty::Region<'tcx> {
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(custom_attribute)]
#![allow(unused_attributes)]
#![feature(range_contains)]
#![cfg_attr(unix, feature(libc))]
-#![cfg_attr(stage0, feature(conservative_impl_trait))]
-#![cfg_attr(stage0, feature(i128_type))]
#![feature(optin_builtin_traits)]
extern crate atty;
use std::borrow::Cow;
use std::cell::{RefCell, Cell};
-use std::mem;
use std::{error, fmt};
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::SeqCst;
emitter: RefCell<Box<Emitter>>,
continue_after_error: Cell<bool>,
delayed_span_bug: RefCell<Option<Diagnostic>>,
- tracked_diagnostics: RefCell<Option<Vec<Diagnostic>>>,
// This set contains the `DiagnosticId` of all emitted diagnostics to avoid
// emitting the same diagnostic with extended help (`--teach`) twice, which
emitted_diagnostics: RefCell<FxHashSet<u128>>,
}
+fn default_track_diagnostic(_: &Diagnostic) {}
+
+thread_local!(pub static TRACK_DIAGNOSTICS: Cell<fn(&Diagnostic)> =
+ Cell::new(default_track_diagnostic));
+
#[derive(Default)]
pub struct HandlerFlags {
pub can_emit_warnings: bool,
emitter: RefCell::new(e),
continue_after_error: Cell::new(true),
delayed_span_bug: RefCell::new(None),
- tracked_diagnostics: RefCell::new(None),
tracked_diagnostic_codes: RefCell::new(FxHashSet()),
emitted_diagnostics: RefCell::new(FxHashSet()),
}
}
}
- pub fn track_diagnostics<F, R>(&self, f: F) -> (R, Vec<Diagnostic>)
- where F: FnOnce() -> R
- {
- let prev = mem::replace(&mut *self.tracked_diagnostics.borrow_mut(),
- Some(Vec::new()));
- let ret = f();
- let diagnostics = mem::replace(&mut *self.tracked_diagnostics.borrow_mut(), prev)
- .unwrap();
- (ret, diagnostics)
- }
-
/// `true` if a diagnostic with this code has already been emitted in this handler.
///
/// Used to suppress emitting the same error multiple times with extended explanation when
self.tracked_diagnostic_codes.borrow().contains(code)
}
+ pub fn force_print_db(&self, mut db: DiagnosticBuilder) {
+ self.emitter.borrow_mut().emit(&db);
+ db.cancel();
+ }
+
fn emit_db(&self, db: &DiagnosticBuilder) {
let diagnostic = &**db;
- if let Some(ref mut list) = *self.tracked_diagnostics.borrow_mut() {
- list.push(diagnostic.clone());
- }
+ TRACK_DIAGNOSTICS.with(|track_diagnostics| {
+ track_diagnostics.get()(diagnostic);
+ });
if let Some(ref code) = diagnostic.code {
self.tracked_diagnostic_codes.borrow_mut().insert(code.clone());
for list_item in attr.meta_item_list().unwrap_or_default() {
match list_item.word() {
Some(word) if value.is_none() =>
- value = Some(word.name().clone()),
+ value = Some(word.ident.name),
_ =>
// FIXME better-encapsulate meta_item (don't directly access `node`)
span_bug!(list_item.span(), "unexpected meta-item {:?}", list_item.node),
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
-#![cfg_attr(stage0, feature(conservative_impl_trait))]
#![feature(fs_read_write)]
-#![cfg_attr(stage0, feature(i128_type))]
-#![cfg_attr(stage0, feature(inclusive_range_syntax))]
#![feature(specialization)]
extern crate graphviz;
if fieldpat.node.is_shorthand {
continue;
}
- if let PatKind::Binding(_, _, ident, None) = fieldpat.node.pat.node {
- if ident.node == fieldpat.node.name {
+ if let PatKind::Binding(_, _, name, None) = fieldpat.node.pat.node {
+ if name.node == fieldpat.node.name {
let mut err = cx.struct_span_lint(NON_SHORTHAND_FIELD_PATTERNS,
fieldpat.span,
&format!("the `{}:` in this pattern is redundant",
- ident.node));
+ name.node));
let subspan = cx.tcx.sess.codemap().span_through_char(fieldpat.span, ':');
err.span_suggestion_short(subspan,
"remove this",
- format!("{}", ident.node));
+ format!("{}", name.node));
err.emit();
}
}
for arg in sig.decl.inputs.iter() {
match arg.pat.node {
ast::PatKind::Ident(_, ident, None) => {
- if ident.node.name == keywords::Invalid.name() {
+ if ident.name == keywords::Invalid.name() {
cx.span_lint(ANONYMOUS_PARAMETERS,
arg.pat.span,
"use of deprecated anonymous parameter");
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![cfg_attr(test, feature(test))]
#![feature(box_patterns)]
#![feature(box_syntax)]
-#![cfg_attr(stage0, feature(i128_type))]
#![feature(macro_vis_matcher)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
-#![cfg_attr(stage0, feature(never_type))]
#[macro_use]
extern crate syntax;
let node_ident;
match items[0].0.kind {
ast::UseTreeKind::Simple(rename) => {
- let orig_ident = items[0].0.prefix.segments.last().unwrap().identifier;
+ let orig_ident = items[0].0.prefix.segments.last().unwrap().ident;
if orig_ident.name == keywords::SelfValue.name() {
return;
} else {
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(box_syntax)]
#![feature(concat_idents)]
alloc = { path = "../liballoc" }
alloc_system = { path = "../liballoc_system" }
core = { path = "../libcore" }
+compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
ident: Symbol,
name: Symbol,
hash: Option<&Svh>,
+ extra_filename: Option<&str>,
span: Span,
path_kind: PathKind,
mut dep_kind: DepKind)
ident,
crate_name: name,
hash: hash.map(|a| &*a),
+ extra_filename: extra_filename,
filesearch: self.sess.target_filesearch(path_kind),
target: &self.sess.target.target,
triple: &self.sess.opts.target_triple,
::std::iter::once(krate).chain(crate_root.crate_deps
.decode(metadata)
.map(|dep| {
- debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
+ info!("resolving dep crate {} hash: `{}` extra filename: `{}`", dep.name, dep.hash,
+ dep.extra_filename);
if dep.kind == DepKind::UnexportedMacrosOnly {
return krate;
}
_ => dep.kind,
};
let (local_cnum, ..) = self.resolve_crate(
- root, dep.name, dep.name, Some(&dep.hash), span, PathKind::Dependency, dep_kind,
+ root, dep.name, dep.name, Some(&dep.hash), Some(&dep.extra_filename), span,
+ PathKind::Dependency, dep_kind,
);
local_cnum
})).collect()
ident: orig_name,
crate_name: rename,
hash: None,
+ extra_filename: None,
filesearch: self.sess.host_filesearch(PathKind::Crate),
target: &self.sess.host,
triple: &host_triple,
let dep_kind = DepKind::Implicit;
let (cnum, data) =
- self.resolve_crate(&None, name, name, None, DUMMY_SP, PathKind::Crate, dep_kind);
+ self.resolve_crate(&None, name, name, None, None, DUMMY_SP, PathKind::Crate, dep_kind);
// Sanity check the loaded crate to ensure it is indeed a panic runtime
// and the panic strategy is indeed what we thought it was.
let symbol = Symbol::intern(name);
let dep_kind = DepKind::Explicit;
let (_, data) =
- self.resolve_crate(&None, symbol, symbol, None, DUMMY_SP,
+ self.resolve_crate(&None, symbol, symbol, None, None, DUMMY_SP,
PathKind::Crate, dep_kind);
// Sanity check the loaded crate to ensure it is indeed a sanitizer runtime
let symbol = Symbol::intern("profiler_builtins");
let dep_kind = DepKind::Implicit;
let (_, data) =
- self.resolve_crate(&None, symbol, symbol, None, DUMMY_SP,
+ self.resolve_crate(&None, symbol, symbol, None, None, DUMMY_SP,
PathKind::Crate, dep_kind);
// Sanity check the loaded crate to ensure it is indeed a profiler runtime
name,
name,
None,
+ None,
DUMMY_SP,
PathKind::Crate,
DepKind::Implicit);
};
let (cnum, ..) = self.resolve_crate(
- &None, item.ident.name, orig_name, None, item.span, PathKind::Crate, dep_kind,
+ &None, item.ident.name, orig_name, None, None,
+ item.span, PathKind::Crate, dep_kind,
);
let def_id = definitions.opt_local_def_id(item.id).unwrap();
}
fn resolve_crate_from_path(&mut self, name: Symbol, span: Span) -> CrateNum {
- self.resolve_crate(&None, name, name, None, span, PathKind::Crate, DepKind::Explicit).0
+ self.resolve_crate(&None, name, name, None, None, span, PathKind::Crate,
+ DepKind::Explicit).0
}
}
fn_arg_names => { cdata.get_fn_arg_names(def_id.index) }
impl_parent => { cdata.get_parent_impl(def_id.index) }
trait_of_item => { cdata.get_trait_of_item(def_id.index) }
- item_body_nested_bodies => { cdata.item_body_nested_bodies(def_id.index) }
+ item_body_nested_bodies => { cdata.item_body_nested_bodies(tcx, def_id.index) }
const_is_rvalue_promotable_to_static => {
cdata.const_is_rvalue_promotable_to_static(def_id.index)
}
let reachable_non_generics = tcx
.exported_symbols(cdata.cnum)
.iter()
- .filter_map(|&(exported_symbol, _)| {
+ .filter_map(|&(exported_symbol, export_level)| {
if let ExportedSymbol::NonGeneric(def_id) = exported_symbol {
- return Some(def_id)
+ return Some((def_id, export_level))
} else {
None
}
crate_hash => { cdata.hash() }
original_crate_name => { cdata.name() }
+ extra_filename => { cdata.root.extra_filename.clone() }
+
+
implementations_of_trait => {
let mut result = vec![];
let filter = Some(other);
return Arc::new(Vec::new())
}
- Arc::new(cdata.exported_symbols())
+ Arc::new(cdata.exported_symbols(tcx))
}
wasm_custom_sections => { Lrc::new(cdata.wasm_custom_sections()) }
tcx.alloc_tables(ast.tables.decode((self, tcx)))
}
- pub fn item_body_nested_bodies(&self, id: DefIndex) -> ExternBodyNestedBodies {
+ pub fn item_body_nested_bodies(&self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ id: DefIndex)
+ -> ExternBodyNestedBodies {
if let Some(ref ast) = self.entry(id).ast {
- let ast = ast.decode(self);
+ let mut ast = ast.decode(self);
let nested_bodies: BTreeMap<_, _> = ast.nested_bodies
- .decode(self)
+ .decode((self, tcx.sess))
.map(|body| (body.id(), body))
.collect();
ExternBodyNestedBodies {
arg_names.decode(self).collect()
}
- pub fn exported_symbols(&self) -> Vec<(ExportedSymbol, SymbolExportLevel)> {
- self.root
- .exported_symbols
- .decode(self)
- .collect()
+ pub fn exported_symbols(&self,
+ tcx: TyCtxt<'a, 'tcx, 'tcx>)
+ -> Vec<(ExportedSymbol<'tcx>, SymbolExportLevel)> {
+ let lazy_seq: LazySeq<(ExportedSymbol<'tcx>, SymbolExportLevel)> =
+ LazySeq::with_position_and_length(self.root.exported_symbols.position,
+ self.root.exported_symbols.len);
+ lazy_seq.decode((self, tcx)).collect()
}
pub fn wasm_custom_sections(&self) -> Vec<DefId> {
let has_global_allocator = tcx.sess.has_global_allocator.get();
let root = self.lazy(&CrateRoot {
name: tcx.crate_name(LOCAL_CRATE),
+ extra_filename: tcx.sess.opts.cg.extra_filename.clone(),
triple: tcx.sess.opts.target_triple.clone(),
hash: link_meta.crate_hash,
disambiguator: tcx.sess.local_crate_disambiguator(),
name: self.tcx.original_crate_name(cnum),
hash: self.tcx.crate_hash(cnum),
kind: self.tcx.dep_kind(cnum),
+ extra_filename: self.tcx.extra_filename(cnum),
};
(cnum, dep)
})
// definition (as that's not defined in this crate).
fn encode_exported_symbols(&mut self,
exported_symbols: &[(ExportedSymbol, SymbolExportLevel)])
- -> LazySeq<(ExportedSymbol, SymbolExportLevel)> {
-
+ -> EncodedExportedSymbols {
// The metadata symbol name is special. It should not show up in
// downstream crates.
let metadata_symbol_name = SymbolName::new(&metadata_symbol_name(self.tcx));
- self.lazy_seq(exported_symbols
+ let lazy_seq = self.lazy_seq(exported_symbols
.iter()
.filter(|&&(ref exported_symbol, _)| {
match *exported_symbol {
_ => true,
}
})
- .cloned())
+ .cloned());
+
+ EncodedExportedSymbols {
+ len: lazy_seq.len,
+ position: lazy_seq.position,
+ }
}
fn encode_wasm_custom_sections(&mut self, statics: &[DefId]) -> LazySeq<DefIndex> {
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(box_patterns)]
-#![cfg_attr(stage0, feature(conservative_impl_trait))]
#![feature(fs_read_write)]
-#![cfg_attr(stage0, feature(i128_type))]
#![feature(libc)]
#![feature(macro_lifetime_matcher)]
#![feature(proc_macro_internals)]
//! 1. Does the filename match an rlib/dylib pattern? That is to say, does the
//! filename have the right prefix/suffix?
//! 2. Does the filename have the right prefix for the crate name being queried?
-//! This is filtering for files like `libfoo*.rlib` and such.
+//! This is filtering for files like `libfoo*.rlib` and such. If the crate
+//! we're looking for was originally compiled with -C extra-filename, the
+//! extra filename will be included in this prefix to reduce reading
+//! metadata from crates that would otherwise share our prefix.
//! 3. Is the file an actual rust library? This is done by loading the metadata
//! from the library and making sure it's actually there.
//! 4. Does the name in the metadata agree with the name of the library?
use rustc_back::target::{Target, TargetTriple};
use std::cmp;
+use std::collections::HashSet;
use std::fmt;
use std::fs;
use std::io::{self, Read};
pub ident: Symbol,
pub crate_name: Symbol,
pub hash: Option<&'a Svh>,
+ pub extra_filename: Option<&'a str>,
// points to either self.sess.target.target or self.sess.host, must match triple
pub target: &'a Target,
pub triple: &'a TargetTriple,
impl<'a> Context<'a> {
pub fn maybe_load_library_crate(&mut self) -> Option<Library> {
- self.find_library_crate()
+ let mut seen_paths = HashSet::new();
+ match self.extra_filename {
+ Some(s) => self.find_library_crate(s, &mut seen_paths)
+ .or_else(|| self.find_library_crate("", &mut seen_paths)),
+ None => self.find_library_crate("", &mut seen_paths)
+ }
}
pub fn report_errs(&mut self) -> ! {
unreachable!();
}
- fn find_library_crate(&mut self) -> Option<Library> {
+ fn find_library_crate(&mut self,
+ extra_prefix: &str,
+ seen_paths: &mut HashSet<PathBuf>)
+ -> Option<Library> {
// If an SVH is specified, then this is a transitive dependency that
// must be loaded via -L plus some filtering.
if self.hash.is_none() {
let staticpair = self.staticlibname();
// want: crate_name.dir_part() + prefix + crate_name.file_part + "-"
- let dylib_prefix = format!("{}{}", dypair.0, self.crate_name);
- let rlib_prefix = format!("lib{}", self.crate_name);
- let staticlib_prefix = format!("{}{}", staticpair.0, self.crate_name);
+ let dylib_prefix = format!("{}{}{}", dypair.0, self.crate_name, extra_prefix);
+ let rlib_prefix = format!("lib{}{}", self.crate_name, extra_prefix);
+ let staticlib_prefix = format!("{}{}{}", staticpair.0, self.crate_name, extra_prefix);
let mut candidates = FxHashMap();
let mut staticlibs = vec![];
}
return FileDoesntMatch;
};
+
info!("lib candidate: {}", path.display());
let hash_str = hash.to_string();
let (ref mut rlibs, ref mut rmetas, ref mut dylibs) = *slot;
fs::canonicalize(path)
.map(|p| {
+ if seen_paths.contains(&p) {
+ return FileDoesntMatch
+ };
+ seen_paths.insert(p.clone());
match found_kind {
CrateFlavor::Rlib => { rlibs.insert(p, kind); }
CrateFlavor::Rmeta => { rmetas.insert(p, kind); }
use rustc::hir::def::{self, CtorKind};
use rustc::hir::def_id::{DefIndex, DefId, CrateNum};
use rustc::ich::StableHashingContext;
-use rustc::middle::exported_symbols::{ExportedSymbol, SymbolExportLevel};
use rustc::middle::cstore::{DepKind, LinkagePreference, NativeLibrary, ForeignModule};
use rustc::middle::lang_items;
use rustc::mir;
pub struct CrateRoot {
pub name: Symbol,
pub triple: TargetTriple,
+ pub extra_filename: String,
pub hash: hir::svh::Svh,
pub disambiguator: CrateDisambiguator,
pub panic_strategy: PanicStrategy,
pub codemap: LazySeq<syntax_pos::FileMap>,
pub def_path_table: Lazy<hir::map::definitions::DefPathTable>,
pub impls: LazySeq<TraitImpls>,
- pub exported_symbols: LazySeq<(ExportedSymbol, SymbolExportLevel)>,
+ pub exported_symbols: EncodedExportedSymbols,
pub wasm_custom_sections: LazySeq<DefIndex>,
pub index: LazySeq<index::Index>,
pub name: ast::Name,
pub hash: hir::svh::Svh,
pub kind: DepKind,
+ pub extra_filename: String,
}
impl_stable_hash_for!(struct CrateDep {
name,
hash,
- kind
+ kind,
+ extra_filename
});
#[derive(RustcEncodable, RustcDecodable)]
// Tags used for encoding Spans:
pub const TAG_VALID_SPAN: u8 = 0;
pub const TAG_INVALID_SPAN: u8 = 1;
+
+#[derive(RustcEncodable, RustcDecodable)]
+pub struct EncodedExportedSymbols {
+ pub position: usize,
+ pub len: usize,
+}
.collect::<Vec<_>>();
if mois.is_empty() {
+ let root_place = self.prefixes(&place, PrefixSet::All)
+ .last()
+ .unwrap();
+
+ if self.moved_error_reported
+ .contains(&root_place.clone())
+ {
+ debug!(
+ "report_use_of_moved_or_uninitialized place: error about {:?} suppressed",
+ root_place
+ );
+ return;
+ }
+
+ self.moved_error_reported
+ .insert(root_place.clone());
+
let item_msg = match self.describe_place(place) {
Some(name) => format!("`{}`", name),
None => "value".to_owned(),
use dataflow::move_paths::{IllegalMoveOriginKind, MoveError};
use dataflow::move_paths::{HasMoveData, LookupResult, MoveData, MovePathIndex};
use util::borrowck_errors::{BorrowckErrors, Origin};
+use util::collect_writes::FindAssignments;
use std::iter;
},
access_place_error_reported: FxHashSet(),
reservation_error_reported: FxHashSet(),
+ moved_error_reported: FxHashSet(),
nonlexical_regioncx: opt_regioncx,
nonlexical_cause_info: None,
};
/// but it is currently inconvenient to track down the BorrowIndex
/// at the time we detect and report a reservation error.
reservation_error_reported: FxHashSet<Place<'tcx>>,
+ /// This field keeps track of errors reported in the checking of moved variables,
+ /// so that we don't report report seemingly duplicate errors.
+ moved_error_reported: FxHashSet<Place<'tcx>>,
/// Non-lexical region inference context, if NLL is enabled. This
/// contains the results from region inference and lets us e.g.
/// find out which CFG points are contained in each borrow region.
LocalMutationIsAllowed::No,
flow_state,
);
- self.check_if_path_is_moved(
+ self.check_if_path_or_subpath_is_moved(
context,
InitializationRequiringAction::Use,
(output, span),
// Write of P[i] or *P, or WriteAndRead of any P, requires P init'd.
match mode {
MutateMode::WriteAndRead => {
- self.check_if_path_is_moved(
+ self.check_if_path_or_subpath_is_moved(
context,
InitializationRequiringAction::Update,
place_span,
flow_state,
);
- self.check_if_path_is_moved(
+ self.check_if_path_or_subpath_is_moved(
context,
InitializationRequiringAction::Borrow,
(place, span),
LocalMutationIsAllowed::No,
flow_state,
);
- self.check_if_path_is_moved(
+ self.check_if_path_or_subpath_is_moved(
context,
InitializationRequiringAction::Use,
(place, span),
);
// Finally, check if path was already moved.
- self.check_if_path_is_moved(
+ self.check_if_path_or_subpath_is_moved(
context,
InitializationRequiringAction::Use,
(place, span),
);
// Finally, check if path was already moved.
- self.check_if_path_is_moved(
+ self.check_if_path_or_subpath_is_moved(
context,
InitializationRequiringAction::Use,
(place, span),
LocalMutationIsAllowed::No,
flow_state,
);
- // We do not need to call `check_if_path_is_moved`
+ // We do not need to call `check_if_path_or_subpath_is_moved`
// again, as we already called it when we made the
// initial reservation.
}
}
}
- fn check_if_path_is_moved(
+ fn check_if_full_path_is_moved(
&mut self,
context: Context,
desired_action: InitializationRequiringAction,
//
// 1. Move of `a.b.c`, use of `a.b.c`
// 2. Move of `a.b.c`, use of `a.b.c.d` (without first reinitializing `a.b.c.d`)
- // 3. Move of `a.b.c`, use of `a` or `a.b`
- // 4. Uninitialized `(a.b.c: &_)`, use of `*a.b.c`; note that with
+ // 3. Uninitialized `(a.b.c: &_)`, use of `*a.b.c`; note that with
// partial initialization support, one might have `a.x`
// initialized but not `a.b`.
//
// OK scenarios:
//
- // 5. Move of `a.b.c`, use of `a.b.d`
- // 6. Uninitialized `a.x`, initialized `a.b`, use of `a.b`
- // 7. Copied `(a.b: &_)`, use of `*(a.b).c`; note that `a.b`
+ // 4. Move of `a.b.c`, use of `a.b.d`
+ // 5. Uninitialized `a.x`, initialized `a.b`, use of `a.b`
+ // 6. Copied `(a.b: &_)`, use of `*(a.b).c`; note that `a.b`
// must have been initialized for the use to be sound.
- // 8. Move of `a.b.c` then reinit of `a.b.c.d`, use of `a.b.c.d`
+ // 7. Move of `a.b.c` then reinit of `a.b.c.d`, use of `a.b.c.d`
// The dataflow tracks shallow prefixes distinctly (that is,
// field-accesses on P distinctly from P itself), in order to
// have a MovePath, that should capture the initialization
// state for the place scenario.
//
- // This code covers scenarios 1, 2, and 4.
+ // This code covers scenarios 1, 2, and 3.
- debug!("check_if_path_is_moved part1 place: {:?}", place);
+ debug!("check_if_full_path_is_moved place: {:?}", place);
match self.move_path_closest_to(place) {
Ok(mpi) => {
if maybe_uninits.contains(&mpi) {
// ancestors; dataflow recurs on children when parents
// move (to support partial (re)inits).
//
- // (I.e. querying parents breaks scenario 8; but may want
+ // (I.e. querying parents breaks scenario 7; but may want
// to do such a query based on partial-init feature-gate.)
}
+ }
+
+ fn check_if_path_or_subpath_is_moved(
+ &mut self,
+ context: Context,
+ desired_action: InitializationRequiringAction,
+ place_span: (&Place<'tcx>, Span),
+ flow_state: &Flows<'cx, 'gcx, 'tcx>,
+ ) {
+ // FIXME: analogous code in check_loans first maps `place` to
+ // its base_path ... but is that what we want here?
+ let place = self.base_path(place_span.0);
+
+ let maybe_uninits = &flow_state.uninits;
+ let curr_move_outs = &flow_state.move_outs;
+
+ // Bad scenarios:
+ //
+ // 1. Move of `a.b.c`, use of `a` or `a.b`
+ // partial initialization support, one might have `a.x`
+ // initialized but not `a.b`.
+ // 2. All bad scenarios from `check_if_full_path_is_moved`
+ //
+ // OK scenarios:
+ //
+ // 3. Move of `a.b.c`, use of `a.b.d`
+ // 4. Uninitialized `a.x`, initialized `a.b`, use of `a.b`
+ // 5. Copied `(a.b: &_)`, use of `*(a.b).c`; note that `a.b`
+ // must have been initialized for the use to be sound.
+ // 6. Move of `a.b.c` then reinit of `a.b.c.d`, use of `a.b.c.d`
+
+ self.check_if_full_path_is_moved(context, desired_action, place_span, flow_state);
// A move of any shallow suffix of `place` also interferes
// with an attempt to use `place`. This is scenario 3 above.
// (Distinct from handling of scenarios 1+2+4 above because
// `place` does not interfere with suffixes of its prefixes,
// e.g. `a.b.c` does not interfere with `a.b.d`)
+ //
+ // This code covers scenario 1.
- debug!("check_if_path_is_moved part2 place: {:?}", place);
+ debug!("check_if_path_or_subpath_is_moved place: {:?}", place);
if let Some(mpi) = self.move_path_for_place(place) {
if let Some(child_mpi) = maybe_uninits.has_any_child_of(mpi) {
self.report_use_of_moved_or_uninitialized(
(place, span): (&Place<'tcx>, Span),
flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
- // recur down place; dispatch to check_if_path_is_moved when necessary
+ debug!("check_if_assigned_path_is_moved place: {:?}", place);
+ // recur down place; dispatch to external checks when necessary
let mut place = place;
loop {
match *place {
Place::Projection(ref proj) => {
let Projection { ref base, ref elem } = **proj;
match *elem {
- ProjectionElem::Deref |
- // assigning to *P requires `P` initialized.
ProjectionElem::Index(_/*operand*/) |
ProjectionElem::ConstantIndex { .. } |
- // assigning to P[i] requires `P` initialized.
+ // assigning to P[i] requires P to be valid.
ProjectionElem::Downcast(_/*adt_def*/, _/*variant_idx*/) =>
// assigning to (P->variant) is okay if assigning to `P` is okay
//
// FIXME: is this true even if P is a adt with a dtor?
{ }
+ // assigning to (*P) requires P to be initialized
+ ProjectionElem::Deref => {
+ self.check_if_full_path_is_moved(
+ context, InitializationRequiringAction::Use,
+ (base, span), flow_state);
+ // (base initialized; no need to
+ // recur further)
+ break;
+ }
+
ProjectionElem::Subslice { .. } => {
panic!("we don't allow assignments to subslices, context: {:?}",
context);
// check_loans.rs first maps
// `base` to its base_path.
- self.check_if_path_is_moved(
+ self.check_if_path_or_subpath_is_moved(
context, InitializationRequiringAction::Assignment,
(base, span), flow_state);
}
}
+ fn specialized_description(&self, place:&Place<'tcx>) -> Option<String>{
+ if let Some(_name) = self.describe_place(place) {
+ Some(format!("data in a `&` reference"))
+ } else {
+ None
+ }
+ }
+
+ fn get_default_err_msg(&self, place:&Place<'tcx>) -> String{
+ match self.describe_place(place) {
+ Some(name) => format!("immutable item `{}`", name),
+ None => "immutable item".to_owned(),
+ }
+ }
+
+ fn get_secondary_err_msg(&self, place:&Place<'tcx>) -> String{
+ match self.specialized_description(place) {
+ Some(_) => format!("data in a `&` reference"),
+ None => self.get_default_err_msg(place)
+ }
+ }
+
+ fn get_primary_err_msg(&self, place:&Place<'tcx>) -> String{
+ if let Some(name) = self.describe_place(place) {
+ format!("`{}` is a `&` reference, so the data it refers to cannot be written", name)
+ } else {
+ format!("cannot assign through `&`-reference")
+ }
+ }
+
/// Check the permissions for the given place and read or write kind
///
/// Returns true if an error is reported, false otherwise.
self.is_mutable(place, is_local_mutation_allowed)
{
error_reported = true;
-
- let item_msg = match self.describe_place(place) {
- Some(name) => format!("immutable item `{}`", name),
- None => "immutable item".to_owned(),
- };
-
+ let item_msg = self.get_default_err_msg(place);
let mut err = self.tcx
.cannot_borrow_path_as_mutable(span, &item_msg, Origin::Mir);
err.span_label(span, "cannot borrow as mutable");
if place != place_err {
if let Some(name) = self.describe_place(place_err) {
- err.note(&format!("Value not mutable causing this error: `{}`", name));
+ err.note(&format!("the value which is causing this path not to be mutable \
+ is...: `{}`", name));
}
}
err.emit();
},
Reservation(WriteKind::Mutate) | Write(WriteKind::Mutate) => {
+
if let Err(place_err) = self.is_mutable(place, is_local_mutation_allowed) {
error_reported = true;
+ let mut err_info = None;
+ match *place_err {
+
+ Place::Projection(box Projection {
+ ref base, elem:ProjectionElem::Deref}) => {
+ match *base {
+ Place::Local(local) => {
+ let locations = self.mir.find_assignments(local);
+ if locations.len() > 0 {
+ let item_msg = if error_reported {
+ self.get_secondary_err_msg(base)
+ } else {
+ self.get_default_err_msg(place)
+ };
+ err_info = Some((
+ self.mir.source_info(locations[0]).span,
+ "consider changing this to be a \
+ mutable reference: `&mut`", item_msg,
+ self.get_primary_err_msg(base)));
+ }
+ },
+ _ => {},
+ }
+ },
+ _ => {},
+ }
- let item_msg = match self.describe_place(place) {
- Some(name) => format!("immutable item `{}`", name),
- None => "immutable item".to_owned(),
- };
-
- let mut err = self.tcx.cannot_assign(span, &item_msg, Origin::Mir);
- err.span_label(span, "cannot mutate");
-
- if place != place_err {
- if let Some(name) = self.describe_place(place_err) {
- err.note(&format!("Value not mutable causing this error: `{}`", name));
+ if let Some((err_help_span, err_help_stmt, item_msg, sec_span)) = err_info {
+ let mut err = self.tcx.cannot_assign(span, &item_msg, Origin::Mir);
+ err.span_suggestion(err_help_span, err_help_stmt, format!(""));
+ if place != place_err {
+ err.span_label(span, sec_span);
}
+ err.emit()
+ } else {
+ let item_msg_ = self.get_default_err_msg(place);
+ let mut err = self.tcx.cannot_assign(span, &item_msg_, Origin::Mir);
+ err.span_label(span, "cannot mutate");
+ if place != place_err {
+ if let Some(name) = self.describe_place(place_err) {
+ err.note(&format!("the value which is causing this path not to be \
+ mutable is...: `{}`", name));
+ }
+ }
+ err.emit();
}
-
- err.emit();
}
}
Reservation(WriteKind::Move)
);
}
}
-
Activation(..) => {} // permission checks are done at Reservation point.
-
Read(ReadKind::Borrow(BorrowKind::Unique))
| Read(ReadKind::Borrow(BorrowKind::Mut { .. }))
| Read(ReadKind::Borrow(BorrowKind::Shared))
}
}
}
+
mutability: Mutability::Not,
};
if let Some(hir::map::NodeBinding(pat)) = tcx.hir.find(var_id) {
- if let hir::PatKind::Binding(_, _, ref ident, _) = pat.node {
- decl.debug_name = ident.node;
+ if let hir::PatKind::Binding(_, _, ref name, _) = pat.node {
+ decl.debug_name = name.node;
let bm = *hir.tables.pat_binding_modes()
.get(pat.hir_id)
assigned_map: FxHashMap<Place<'tcx>, FxHashSet<BorrowIndex>>,
/// Locations which activate borrows.
- /// NOTE: A given location may activate more than one borrow in the future
- /// when more general two-phase borrow support is introduced, but for now we
- /// only need to store one borrow index
- activation_map: FxHashMap<Location, BorrowIndex>,
+ activation_map: FxHashMap<Location, FxHashSet<BorrowIndex>>,
/// Every borrow has a region; this maps each such regions back to
/// its borrow-indexes.
idx_vec: IndexVec<BorrowIndex, BorrowData<'tcx>>,
location_map: FxHashMap<Location, BorrowIndex>,
assigned_map: FxHashMap<Place<'tcx>, FxHashSet<BorrowIndex>>,
- activation_map: FxHashMap<Location, BorrowIndex>,
+ activation_map: FxHashMap<Location, FxHashSet<BorrowIndex>>,
region_map: FxHashMap<Region<'tcx>, FxHashSet<BorrowIndex>>,
local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,
region_span_map: FxHashMap<RegionKind, Span>,
let idx = self.idx_vec.push(borrow);
self.location_map.insert(location, idx);
- // This assert is a good sanity check until more general 2-phase borrow
- // support is introduced. See NOTE on the activation_map field for more
- assert!(!self.activation_map.contains_key(&activate_location),
- "More than one activation introduced at the same location.");
- self.activation_map.insert(activate_location, idx);
-
+ insert(&mut self.activation_map, &activate_location, idx);
insert(&mut self.assigned_map, assigned_place, idx);
insert(&mut self.region_map, ®ion, idx);
if let Some(local) = root_local(borrowed_place) {
location: Location) {
// Handle activations
match self.activation_map.get(&location) {
- Some(&activated) => {
- debug!("activating borrow {:?}", activated);
- sets.gen(&ReserveOrActivateIndex::active(activated))
+ Some(activations) => {
+ for activated in activations {
+ debug!("activating borrow {:?}", activated);
+ sets.gen(&ReserveOrActivateIndex::active(*activated))
+ }
}
None => {}
}
} else {
sess.span_err(
item.span,
- &format!("{} attribute requires a path", item.name()));
+ &format!("{} attribute requires a path", item.ident));
return None;
}
}
}
}
- PatKind::Binding(_, id, ref ident, ref sub) => {
+ PatKind::Binding(_, id, ref name, ref sub) => {
let var_ty = self.tables.node_id_to_type(pat.hir_id);
let region = match var_ty.sty {
ty::TyRef(r, _) => Some(r),
if let ty::TyRef(_, mt) = ty.sty {
ty = mt.ty;
} else {
- bug!("`ref {}` has wrong type {}", ident.node, ty);
+ bug!("`ref {}` has wrong type {}", name.node, ty);
}
}
PatternKind::Binding {
mutability,
mode,
- name: ident.node,
+ name: name.node,
var: id,
ty: var_ty,
subpattern: self.lower_opt_pattern(sub),
}
- pub(crate) fn write_discriminant_value(
+ pub fn write_discriminant_value(
&mut self,
dest_ty: Ty<'tcx>,
dest: Place,
*/
-#![deny(warnings)]
-
#![feature(slice_patterns)]
#![feature(from_ref)]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(catch_expr)]
-#![cfg_attr(stage0, feature(conservative_impl_trait))]
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(decl_macro)]
#![feature(dyn_trait)]
#![feature(fs_read_write)]
-#![cfg_attr(stage0, feature(i128_type))]
-#![cfg_attr(stage0, feature(inclusive_range_syntax))]
#![feature(macro_vis_matcher)]
-#![cfg_attr(stage0, feature(match_default_bindings))]
#![feature(exhaustive_patterns)]
#![feature(range_contains)]
#![feature(rustc_diagnostic_macros)]
#![feature(nonzero)]
-#![cfg_attr(stage0, feature(underscore_lifetimes))]
-#![cfg_attr(stage0, feature(never_type))]
#![feature(inclusive_range_fields)]
+#![feature(crate_visibility_modifier)]
extern crate arena;
#[macro_use]
use rustc_data_structures::bitvec::BitVector;
-use std::iter;
-
#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]
pub enum MonoItemCollectionMode {
Eager,
ty::TyClosure(def_id, substs) => {
let instance = monomorphize::resolve_closure(
self.tcx, def_id, substs, ty::ClosureKind::FnOnce);
- self.output.push(create_fn_mono_item(instance));
+ if should_monomorphize_locally(self.tcx, &instance) {
+ self.output.push(create_fn_mono_item(instance));
+ }
}
_ => bug!(),
}
ty::InstanceDef::Intrinsic(_) |
ty::InstanceDef::CloneShim(..) => return true
};
- match tcx.hir.get_if_local(def_id) {
+
+ return match tcx.hir.get_if_local(def_id) {
Some(hir_map::NodeForeignItem(..)) => {
false // foreign items are linked against, not translated.
}
Some(_) => true,
None => {
if tcx.is_reachable_non_generic(def_id) ||
- tcx.is_foreign_item(def_id)
+ tcx.is_foreign_item(def_id) ||
+ is_available_upstream_generic(tcx, def_id, instance.substs)
{
// We can link to the item in question, no instance needed
// in this crate
true
}
}
+ };
+
+ fn is_available_upstream_generic<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ def_id: DefId,
+ substs: &'tcx Substs<'tcx>)
+ -> bool {
+ debug_assert!(!def_id.is_local());
+
+ // If we are not in share generics mode, we don't link to upstream
+ // monomorphizations but always instantiate our own internal versions
+ // instead.
+ if !tcx.share_generics() {
+ return false
+ }
+
+ // If this instance has no type parameters, it cannot be a shared
+ // monomorphization. Non-generic instances are already handled above
+ // by `is_reachable_non_generic()`
+ if substs.types().next().is_none() {
+ return false
+ }
+
+ // Take a look at the available monomorphizations listed in the metadata
+ // of upstream crates.
+ tcx.upstream_monomorphizations_for(def_id)
+ .map(|set| set.contains_key(substs))
+ .unwrap_or(false)
}
}
// late-bound regions, since late-bound
// regions must appear in the argument
// listing.
- let main_ret_ty = main_ret_ty.no_late_bound_regions().unwrap();
+ let main_ret_ty = self.tcx.erase_regions(
+ &main_ret_ty.no_late_bound_regions().unwrap(),
+ );
let start_instance = Instance::resolve(
self.tcx,
ty::ParamEnv::reveal_all(),
start_def_id,
- self.tcx.mk_substs(iter::once(Kind::from(main_ret_ty)))
+ self.tcx.intern_substs(&[Kind::from(main_ret_ty)])
).unwrap();
self.output.push(create_fn_mono_item(start_instance));
let is_incremental_build = tcx.sess.opts.incremental.is_some();
let mut internalization_candidates = FxHashSet();
+ // Determine if monomorphizations instantiated in this crate will be made
+ // available to downstream crates. This depends on whether we are in
+ // share-generics mode and whether the current crate can even have
+ // downstream crates.
+ let export_generics = tcx.share_generics() &&
+ tcx.local_crate_exports_generics();
+
for trans_item in trans_items {
match trans_item.instantiation_mode(tcx) {
InstantiationMode::GloballyShared { .. } => {}
.or_insert_with(make_codegen_unit);
let mut can_be_internalized = true;
- let default_visibility = |id: DefId| {
- if tcx.sess.target.target.options.default_hidden_visibility &&
- tcx.symbol_export_level(id) != SymbolExportLevel::C
- {
- Visibility::Hidden
- } else {
+ let default_visibility = |id: DefId, is_generic: bool| {
+ if !tcx.sess.target.target.options.default_hidden_visibility {
+ return Visibility::Default
+ }
+
+ // Generic functions never have export level C
+ if is_generic {
+ return Visibility::Hidden
+ }
+
+ // Things with export level C don't get instantiated in downstream
+ // crates
+ if !id.is_local() {
+ return Visibility::Hidden
+ }
+
+ if let Some(&SymbolExportLevel::C) = tcx.reachable_non_generics(id.krate)
+ .get(&id) {
Visibility::Default
+ } else {
+ Visibility::Hidden
}
};
let (linkage, mut visibility) = match trans_item.explicit_linkage(tcx) {
MonoItem::Fn(ref instance) => {
let visibility = match instance.def {
InstanceDef::Item(def_id) => {
+ let is_generic = instance.substs
+ .types()
+ .next()
+ .is_some();
+
// The `start_fn` lang item is actually a
// monomorphized instance of a function in the
// standard library, used for the `main`
can_be_internalized = false;
Visibility::Hidden
} else if def_id.is_local() {
- if tcx.is_reachable_non_generic(def_id) {
+ if is_generic {
+ if export_generics {
+ if tcx.is_unreachable_local_definition(def_id) {
+ // This instance cannot be used
+ // from another crate.
+ Visibility::Hidden
+ } else {
+ // This instance might be useful in
+ // a downstream crate.
+ can_be_internalized = false;
+ default_visibility(def_id, true)
+ }
+ } else {
+ // We are not exporting generics or
+ // the definition is not reachable
+ // for downstream crates, we can
+ // internalize its instantiations.
+ Visibility::Hidden
+ }
+ } else {
+ // This isn't a generic function.
+ if tcx.is_reachable_non_generic(def_id) {
+ can_be_internalized = false;
+ debug_assert!(!is_generic);
+ default_visibility(def_id, false)
+ } else {
+ Visibility::Hidden
+ }
+ }
+ } else {
+ // This is an upstream DefId.
+ if export_generics && is_generic {
+ // If it is a upstream monomorphization
+ // and we export generics, we must make
+ // it available to downstream crates.
can_be_internalized = false;
- default_visibility(def_id)
+ default_visibility(def_id, true)
} else {
Visibility::Hidden
}
- } else {
- Visibility::Hidden
}
}
InstanceDef::FnPtrShim(..) |
MonoItem::Static(def_id) => {
let visibility = if tcx.is_reachable_non_generic(def_id) {
can_be_internalized = false;
- default_visibility(def_id)
+ default_visibility(def_id, false)
} else {
Visibility::Hidden
};
let def_id = tcx.hir.local_def_id(node_id);
let visibility = if tcx.is_reachable_non_generic(def_id) {
can_be_internalized = false;
- default_visibility(def_id)
+ default_visibility(def_id, false)
} else {
Visibility::Hidden
};
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_assign(self, span: Span, desc: &str, o: Origin) -> DiagnosticBuilder<'cx>
+ fn cannot_assign(self, span: Span, desc: &str, o: Origin)
+ -> DiagnosticBuilder<'cx>
{
let err = struct_span_err!(self, span, E0594,
"cannot assign to {}{OGN}",
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::mir::{Local, Location};
+use rustc::mir::Mir;
+use rustc::mir::visit::PlaceContext;
+use rustc::mir::visit::Visitor;
+
+crate trait FindAssignments {
+ // Finds all statements that assign directly to local (i.e., X = ...)
+ // and returns their locations.
+ fn find_assignments(&self, local: Local) -> Vec<Location>;
+}
+
+impl<'tcx> FindAssignments for Mir<'tcx>{
+ fn find_assignments(&self, local: Local) -> Vec<Location>{
+ let mut visitor = FindLocalAssignmentVisitor{ needle: local, locations: vec![]};
+ visitor.visit_mir(self);
+ visitor.locations
+ }
+}
+
+// The Visitor walks the MIR to return the assignment statements corresponding
+// to a Local.
+struct FindLocalAssignmentVisitor {
+ needle: Local,
+ locations: Vec<Location>,
+}
+
+impl<'tcx> Visitor<'tcx> for FindLocalAssignmentVisitor {
+ fn visit_local(&mut self,
+ local: &Local,
+ place_context: PlaceContext<'tcx>,
+ location: Location) {
+ if self.needle != *local {
+ return;
+ }
+
+ match place_context {
+ PlaceContext::Store | PlaceContext::Call => {
+ self.locations.push(location);
+ }
+ PlaceContext::AsmOutput |
+ PlaceContext::Drop |
+ PlaceContext::Inspect |
+ PlaceContext::Borrow { .. } |
+ PlaceContext::Projection(..) |
+ PlaceContext::Copy |
+ PlaceContext::Move |
+ PlaceContext::StorageLive |
+ PlaceContext::StorageDead |
+ PlaceContext::Validate => {
+ // TO-DO
+ // self.super_local(local)
+ }
+ }
+ }
+ // TO-DO
+ // fn super_local()
+}
mod graphviz;
pub(crate) mod pretty;
pub mod liveness;
+pub mod collect_writes;
pub use self::alignment::is_disaligned;
pub use self::pretty::{dump_enabled, dump_mir, write_mir_pretty, PassWhere};
alloc = { path = "../liballoc" }
alloc_system = { path = "../liballoc_system" }
core = { path = "../libcore" }
+compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
&self.session.parse_sess.span_diagnostic
}
- fn check_lifetime(&self, lifetime: &Lifetime) {
+ fn check_lifetime(&self, ident: Ident) {
let valid_names = [keywords::UnderscoreLifetime.name(),
keywords::StaticLifetime.name(),
keywords::Invalid.name()];
- if !valid_names.contains(&lifetime.ident.name) &&
- token::is_reserved_ident(lifetime.ident.without_first_quote()) {
- self.err_handler().span_err(lifetime.span, "lifetimes cannot use keyword names");
+ if !valid_names.contains(&ident.name) &&
+ token::is_reserved_ident(ident.without_first_quote()) {
+ self.err_handler().span_err(ident.span, "lifetimes cannot use keyword names");
}
}
- fn check_label(&self, label: Ident, span: Span) {
- if token::is_reserved_ident(label.without_first_quote()) {
- self.err_handler().span_err(span, &format!("invalid label name `{}`", label.name));
+ fn check_label(&self, ident: Ident) {
+ if token::is_reserved_ident(ident.without_first_quote()) {
+ self.err_handler()
+ .span_err(ident.span, &format!("invalid label name `{}`", ident.name));
}
}
let non_lifetime_param_spans : Vec<_> = params.iter()
.filter_map(|param| match *param {
GenericParam::Lifetime(_) => None,
- GenericParam::Type(ref t) => Some(t.span),
+ GenericParam::Type(ref t) => Some(t.ident.span),
}).collect();
if !non_lifetime_param_spans.is_empty() {
self.err_handler().span_err(non_lifetime_param_spans,
match *param {
GenericParam::Lifetime(ref l) => {
if !l.bounds.is_empty() {
- let spans : Vec<_> = l.bounds.iter().map(|b| b.span).collect();
+ let spans: Vec<_> = l.bounds.iter().map(|b| b.ident.span).collect();
self.err_handler().span_err(spans,
"lifetime bounds cannot be used in this context");
}
for bound in bounds {
if let RegionTyParamBound(ref lifetime) = *bound {
if any_lifetime_bounds {
- span_err!(self.session, lifetime.span, E0226,
+ span_err!(self.session, lifetime.ident.span, E0226,
"only a single explicit lifetime bound is permitted");
break;
}
}
fn visit_label(&mut self, label: &'a Label) {
- self.check_label(label.ident, label.span);
+ self.check_label(label.ident);
visit::walk_label(self, label);
}
fn visit_lifetime(&mut self, lifetime: &'a Lifetime) {
- self.check_lifetime(lifetime);
+ self.check_lifetime(lifetime.ident);
visit::walk_lifetime(self, lifetime);
}
ItemKind::TraitAlias(Generics { ref params, .. }, ..) => {
for param in params {
if let GenericParam::Type(TyParam {
+ ident,
ref bounds,
ref default,
- span,
..
}) = *param
{
if !bounds.is_empty() {
- self.err_handler().span_err(span,
+ self.err_handler().span_err(ident.span,
"type parameters on the left side of a \
trait alias cannot be bounded");
}
if !default.is_none() {
- self.err_handler().span_err(span,
+ self.err_handler().span_err(ident.span,
"type parameters on the left side of a \
trait alias cannot have defaults");
}
.span_label(span, "pattern not allowed in foreign function").emit();
});
}
- ForeignItemKind::Static(..) | ForeignItemKind::Ty => {}
+ ForeignItemKind::Static(..) | ForeignItemKind::Ty | ForeignItemKind::Macro(..) => {}
}
visit::walk_foreign_item(self, fi)
match (param, seen_non_lifetime_param) {
(&GenericParam::Lifetime(ref ld), true) => {
self.err_handler()
- .span_err(ld.lifetime.span, "lifetime parameters must be leading");
+ .span_err(ld.lifetime.ident.span, "lifetime parameters must be leading");
},
(&GenericParam::Lifetime(_), false) => {}
_ => {
}
if let GenericParam::Type(ref ty_param @ TyParam { default: Some(_), .. }) = *param {
- seen_default = Some(ty_param.span);
+ seen_default = Some(ty_param.ident.span);
} else if let Some(span) = seen_default {
self.err_handler()
.span_err(span, "type parameters with a default must be trailing");
self.check_late_bound_lifetime_defs(&t.bound_generic_params);
visit::walk_poly_trait_ref(self, t, m);
}
+
+ fn visit_mac(&mut self, mac: &Spanned<Mac_>) {
+ // when a new macro kind is added but the author forgets to set it up for expansion
+ // because that's the only part that won't cause a compiler error
+ self.session.diagnostic()
+ .span_bug(mac.span, "macro invocation missed in expansion; did you forget to override \
+ the relevant `fold_*()` method in `PlaceholderExpander`?");
+ }
}
// Bans nested `impl Trait`, e.g. `impl Into<impl Debug>`.
}
}
}
+
+ fn visit_mac(&mut self, _mac: &Spanned<Mac_>) {
+ // covered in AstValidator
+ }
}
// Bans `impl Trait` in path projections like `<impl Iterator>::Item` or `Foo::Bar<impl Trait>`.
_ => visit::walk_ty(self, t),
}
}
+
+ fn visit_mac(&mut self, _mac: &Spanned<Mac_>) {
+ // covered in AstValidator
+ }
}
pub fn check_crate(session: &Session, krate: &Crate) {
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(rustc_diagnostic_macros)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![deny(warnings)]
#![allow(bad_style)]
pub struct Intrinsic {
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(rustc_diagnostic_macros)]
#![feature(staged_api)]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(rustc_diagnostic_macros)]
use syntax::ast::{self, CRATE_NODE_ID, Ident};
use syntax::symbol::keywords;
use syntax_pos::Span;
-use syntax_pos::hygiene::SyntaxContext;
use std::cmp;
use std::mem::replace;
impl<'a, 'tcx> NamePrivacyVisitor<'a, 'tcx> {
// Checks that a field in a struct constructor (expression or pattern) is accessible.
fn check_field(&mut self,
- use_ctxt: SyntaxContext, // Syntax context of the field name at the use site
+ use_ctxt: Span, // Syntax context of the field name at the use site
span: Span, // Span of the field pattern, e.g. `x: 0`
def: &'tcx ty::AdtDef, // Definition of the struct or enum
field: &'tcx ty::FieldDef) { // Definition of the field
- let ident = Ident { ctxt: use_ctxt.modern(), ..keywords::Invalid.ident() };
+ let ident = Ident::new(keywords::Invalid.name(), use_ctxt.modern());
let def_id = self.tcx.adjust_ident(ident, def.did, self.current_item).1;
if !def.is_enum() && !field.vis.is_accessible_from(def_id, self.tcx) {
struct_span_err!(self.tcx.sess, span, E0451, "field `{}` of {} `{}` is private",
for variant_field in &variant.fields {
let field = fields.iter().find(|f| f.name.node == variant_field.name);
let (use_ctxt, span) = match field {
- Some(field) => (field.name.node.to_ident().ctxt, field.span),
- None => (base.span.ctxt(), base.span),
+ Some(field) => (field.name.node.to_ident().span, field.span),
+ None => (base.span, base.span),
};
self.check_field(use_ctxt, span, adt, variant_field);
}
} else {
for field in fields {
- let use_ctxt = field.name.node.to_ident().ctxt;
+ let use_ctxt = field.name.node.to_ident().span;
let field_def = variant.field_named(field.name.node);
self.check_field(use_ctxt, field.span, adt, field_def);
}
let adt = self.tables.pat_ty(pat).ty_adt_def().unwrap();
let variant = adt.variant_of_def(def);
for field in fields {
- let use_ctxt = field.node.name.to_ident().ctxt;
+ let use_ctxt = field.node.name.to_ident().span;
let field_def = variant.field_named(field.node.name);
self.check_field(use_ctxt, field.span, adt, field_def);
}
use syntax::ast::{self, Block, ForeignItem, ForeignItemKind, Item, ItemKind, NodeId};
use syntax::ast::{Mutability, StmtKind, TraitItem, TraitItemKind, Variant};
-use syntax::codemap::respan;
use syntax::ext::base::SyntaxExtension;
use syntax::ext::base::Determinacy::Undetermined;
use syntax::ext::hygiene::Mark;
let mut module_path: Vec<_> = prefix.segments.iter()
.chain(path.segments.iter())
- .map(|seg| respan(seg.span, seg.identifier))
+ .map(|seg| seg.ident)
.collect();
match use_tree.kind {
ast::UseTreeKind::Simple(rename) => {
let mut ident = use_tree.ident();
- let mut source = module_path.pop().unwrap().node;
+ let mut source = module_path.pop().unwrap();
let mut type_ns_only = false;
if nested {
type_ns_only = true;
let last_segment = *module_path.last().unwrap();
- if last_segment.node.name == keywords::CrateRoot.name() {
+ if last_segment.name == keywords::CrateRoot.name() {
resolve_error(
self,
use_tree.span,
// Replace `use foo::self;` with `use foo;`
let _ = module_path.pop();
- source = last_segment.node;
+ source = last_segment;
if rename.is_none() {
- ident = last_segment.node;
+ ident = last_segment;
}
}
} else {
// Disallow `use $crate;`
if source.name == keywords::DollarCrate.name() && path.segments.len() == 1 {
- let crate_root = self.resolve_crate_root(source.ctxt, true);
+ let crate_root = self.resolve_crate_root(source.span.ctxt(), true);
let crate_name = match crate_root.kind {
ModuleKind::Def(_, name) => name,
ModuleKind::Block(..) => unreachable!(),
}
ast::UseTreeKind::Nested(ref items) => {
let prefix = ast::Path {
- segments: module_path.iter()
- .map(|s| ast::PathSegment {
- identifier: s.node,
- span: s.span,
- parameters: None,
- })
+ segments: module_path.into_iter()
+ .map(|ident| ast::PathSegment::from_ident(ident))
.collect(),
span: path.span,
};
parent: Module<'a>,
vis: ty::Visibility,
expansion: Mark) {
- let ident = variant.node.name;
+ let ident = variant.node.ident;
let def_id = self.definitions.local_def_id(variant.node.data.id());
// Define a name in the type namespace.
ForeignItemKind::Ty => {
(Def::TyForeign(self.definitions.local_def_id(item.id)), TypeNS)
}
+ ForeignItemKind::Macro(_) => unreachable!(),
};
let parent = self.current_module;
let vis = self.resolve_visibility(&item.vis);
match attr.meta_item_list() {
Some(names) => for attr in names {
if let Some(word) = attr.word() {
- imports.imports.push((word.name(), attr.span()));
+ imports.imports.push((word.ident.name, attr.span()));
} else {
span_err!(self.session, attr.span(), E0466, "bad macro import");
}
if let Some(names) = attr.meta_item_list() {
for attr in names {
if let Some(word) = attr.word() {
- imports.reexports.push((word.name(), attr.span()));
+ imports.reexports.push((word.ident.name, attr.span()));
} else {
bad_macro_reexport(self, attr.span());
}
}
fn visit_foreign_item(&mut self, foreign_item: &'a ForeignItem) {
+ if let ForeignItemKind::Macro(_) = foreign_item.node {
+ self.visit_invoc(foreign_item.id);
+ return;
+ }
+
self.resolver.build_reduced_graph_for_foreign_item(foreign_item, self.expansion);
visit::walk_foreign_item(self, foreign_item);
}
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(rustc_diagnostic_macros)]
use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap};
use rustc::util::nodemap::{NodeMap, NodeSet, FxHashMap, FxHashSet, DefIdMap};
-use syntax::codemap::{dummy_spanned, respan, BytePos, CodeMap};
+use syntax::codemap::{BytePos, CodeMap};
use syntax::ext::hygiene::{Mark, MarkKind, SyntaxContext};
-use syntax::ast::{self, Name, NodeId, Ident, SpannedIdent, FloatTy, IntTy, UintTy};
+use syntax::ast::{self, Name, NodeId, Ident, FloatTy, IntTy, UintTy};
use syntax::ext::base::SyntaxExtension;
use syntax::ext::base::Determinacy::{self, Determined, Undetermined};
use syntax::ext::base::MacroKind;
/// a new local type parameter.
///
/// For instance:
-/// ```
+/// ```rust,ignore (pseudo-Rust)
/// // Given span
/// fn my_function(param: T)
-/// ^ Original span
+/// // ^ Original span
///
/// // Result
/// fn my_function(param: T)
-/// ^^^^^^^^^^^ Generated span with snippet `my_function<T>`
+/// // ^^^^^^^^^^^ Generated span with snippet `my_function<T>`
/// ```
///
/// Attention: The method used is very fragile since it essentially duplicates the work of the
// `visit::walk_variant` without the discriminant expression.
self.visit_variant_data(&variant.node.data,
- variant.node.name,
+ variant.node.ident,
generics,
item_id,
variant.span);
}
ForeignItemKind::Static(..) => NoTypeParameters,
ForeignItemKind::Ty => NoTypeParameters,
+ ForeignItemKind::Macro(..) => NoTypeParameters,
};
self.with_type_parameter_rib(type_parameters, |this| {
visit::walk_foreign_item(this, foreign_item);
{
let namespace = if is_value { ValueNS } else { TypeNS };
let hir::Path { ref segments, span, ref mut def } = *path;
- let path: Vec<SpannedIdent> = segments.iter()
- .map(|seg| respan(span, Ident::with_empty_ctxt(seg.name)))
+ let path: Vec<Ident> = segments.iter()
+ .map(|seg| Ident::new(seg.name, span))
.collect();
match self.resolve_path(&path, Some(namespace), true, span) {
PathResult::Module(module) => *def = module.def().unwrap(),
path_span: Span)
-> Option<LexicalScopeBinding<'a>> {
if ns == TypeNS {
- ident.ctxt = if ident.name == keywords::SelfType.name() {
- SyntaxContext::empty() // FIXME(jseyfried) improve `Self` hygiene
+ ident.span = if ident.name == keywords::SelfType.name() {
+ // FIXME(jseyfried) improve `Self` hygiene
+ ident.span.with_ctxt(SyntaxContext::empty())
} else {
- ident.ctxt.modern()
+ ident.span.modern()
}
}
module = match self.ribs[ns][i].kind {
ModuleRibKind(module) => module,
- MacroDefinition(def) if def == self.macro_def(ident.ctxt) => {
+ MacroDefinition(def) if def == self.macro_def(ident.span.ctxt()) => {
// If an invocation of this macro created `ident`, give up on `ident`
// and switch to `ident`'s source from the macro definition.
- ident.ctxt.remove_mark();
+ ident.span.remove_mark();
continue
}
_ => continue,
}
}
- ident.ctxt = ident.ctxt.modern();
+ ident.span = ident.span.modern();
loop {
- module = unwrap_or!(self.hygienic_lexical_parent(module, &mut ident.ctxt), break);
+ module = unwrap_or!(self.hygienic_lexical_parent(module, &mut ident.span), break);
let orig_current_module = self.current_module;
self.current_module = module; // Lexical resolutions can never be a privacy error.
let result = self.resolve_ident_in_module_unadjusted(
}
}
- fn hygienic_lexical_parent(&mut self, mut module: Module<'a>, ctxt: &mut SyntaxContext)
+ fn hygienic_lexical_parent(&mut self, mut module: Module<'a>, span: &mut Span)
-> Option<Module<'a>> {
- if !module.expansion.is_descendant_of(ctxt.outer()) {
- return Some(self.macro_def_scope(ctxt.remove_mark()));
+ if !module.expansion.is_descendant_of(span.ctxt().outer()) {
+ return Some(self.macro_def_scope(span.remove_mark()));
}
if let ModuleKind::Block(..) = module.kind {
let parent_expansion = parent.expansion.modern();
if module_expansion.is_descendant_of(parent_expansion) &&
parent_expansion != module_expansion {
- return if parent_expansion.is_descendant_of(ctxt.outer()) {
+ return if parent_expansion.is_descendant_of(span.ctxt().outer()) {
Some(parent)
} else {
None
record_used: bool,
span: Span)
-> Result<&'a NameBinding<'a>, Determinacy> {
- ident.ctxt = ident.ctxt.modern();
+ ident.span = ident.span.modern();
let orig_current_module = self.current_module;
- if let Some(def) = ident.ctxt.adjust(module.expansion) {
+ if let Some(def) = ident.span.adjust(module.expansion) {
self.current_module = self.macro_def_scope(def);
}
let result = self.resolve_ident_in_module_unadjusted(
// If an invocation of this macro created `ident`, give up on `ident`
// and switch to `ident`'s source from the macro definition.
MacroDefinition(def) => {
- if def == self.macro_def(ident.ctxt) {
- ident.ctxt.remove_mark();
+ if def == self.macro_def(ident.span.ctxt()) {
+ ident.span.remove_mark();
}
}
_ => {
ident.name,
span,
);
- resolve_error(self, type_parameter.span, err);
+ resolve_error(self, type_parameter.ident.span, err);
}
- seen_bindings.entry(ident).or_insert(type_parameter.span);
+ seen_bindings.entry(ident).or_insert(type_parameter.ident.span);
// plain insert (no renaming)
let def_id = self.definitions.local_def_id(type_parameter.id);
let mut new_id = None;
if let Some(trait_ref) = opt_trait_ref {
let path: Vec<_> = trait_ref.path.segments.iter()
- .map(|seg| respan(seg.span, seg.identifier))
+ .map(|seg| seg.ident)
.collect();
let def = self.smart_resolve_path_fragment(
trait_ref.ref_id,
_ => false,
} {
let binding_info = BindingInfo { span: ident.span, binding_mode: binding_mode };
- binding_map.insert(ident.node, binding_info);
+ binding_map.insert(ident, binding_info);
}
}
true
}
fn fresh_binding(&mut self,
- ident: &SpannedIdent,
+ ident: Ident,
pat_id: NodeId,
outer_pat_id: NodeId,
pat_src: PatternSource,
// because that breaks the assumptions later
// passes make about or-patterns.)
let mut def = Def::Local(pat_id);
- match bindings.get(&ident.node).cloned() {
+ match bindings.get(&ident).cloned() {
Some(id) if id == outer_pat_id => {
// `Variant(a, a)`, error
resolve_error(
self,
ident.span,
ResolutionError::IdentifierBoundMoreThanOnceInSamePattern(
- &ident.node.name.as_str())
+ &ident.name.as_str())
);
}
Some(..) if pat_src == PatternSource::FnParam => {
self,
ident.span,
ResolutionError::IdentifierBoundMoreThanOnceInParameterList(
- &ident.node.name.as_str())
+ &ident.name.as_str())
);
}
Some(..) if pat_src == PatternSource::Match ||
pat_src == PatternSource::WhileLet => {
// `Variant1(a) | Variant2(a)`, ok
// Reuse definition from the first `a`.
- def = self.ribs[ValueNS].last_mut().unwrap().bindings[&ident.node];
+ def = self.ribs[ValueNS].last_mut().unwrap().bindings[&ident];
}
Some(..) => {
span_bug!(ident.span, "two bindings with the same name from \
}
None => {
// A completely fresh binding, add to the lists if it's valid.
- if ident.node.name != keywords::Invalid.name() {
- bindings.insert(ident.node, outer_pat_id);
- self.ribs[ValueNS].last_mut().unwrap().bindings.insert(ident.node, def);
+ if ident.name != keywords::Invalid.name() {
+ bindings.insert(ident, outer_pat_id);
+ self.ribs[ValueNS].last_mut().unwrap().bindings.insert(ident, def);
}
}
}
let outer_pat_id = pat.id;
pat.walk(&mut |pat| {
match pat.node {
- PatKind::Ident(bmode, ref ident, ref opt_pat) => {
+ PatKind::Ident(bmode, ident, ref opt_pat) => {
// First try to resolve the identifier as some existing
// entity, then fall back to a fresh binding.
- let binding = self.resolve_ident_in_lexical_scope(ident.node, ValueNS,
+ let binding = self.resolve_ident_in_lexical_scope(ident, ValueNS,
false, pat.span)
.and_then(LexicalScopeBinding::item);
let resolution = binding.map(NameBinding::def).and_then(|def| {
Def::Const(..) if is_syntactic_ambiguity => {
// Disambiguate in favor of a unit struct/variant
// or constant pattern.
- self.record_use(ident.node, ValueNS, binding.unwrap(), ident.span);
+ self.record_use(ident, ValueNS, binding.unwrap(), ident.span);
Some(PathResolution::new(def))
}
Def::StructCtor(..) | Def::VariantCtor(..) |
self,
ident.span,
ResolutionError::BindingShadowsSomethingUnacceptable(
- pat_src.descr(), ident.node.name, binding.unwrap())
+ pat_src.descr(), ident.name, binding.unwrap())
);
None
}
source: PathSource)
-> PathResolution {
let segments = &path.segments.iter()
- .map(|seg| respan(seg.span, seg.identifier))
+ .map(|seg| seg.ident)
.collect::<Vec<_>>();
self.smart_resolve_path_fragment(id, qself, segments, path.span, source)
}
fn smart_resolve_path_fragment(&mut self,
id: NodeId,
qself: Option<&QSelf>,
- path: &[SpannedIdent],
+ path: &[Ident],
span: Span,
source: PathSource)
-> PathResolution {
format!("not a {}", expected),
span)
} else {
- let item_str = path[path.len() - 1].node;
+ let item_str = path[path.len() - 1];
let item_span = path[path.len() - 1].span;
let (mod_prefix, mod_str) = if path.len() == 1 {
(format!(""), format!("this scope"))
- } else if path.len() == 2 && path[0].node.name == keywords::CrateRoot.name() {
+ } else if path.len() == 2 && path[0].name == keywords::CrateRoot.name() {
(format!(""), format!("the crate root"))
} else {
let mod_path = &path[..path.len() - 1];
// Try to lookup the name in more relaxed fashion for better error reporting.
let ident = *path.last().unwrap();
- let candidates = this.lookup_import_candidates(ident.node.name, ns, is_expected);
+ let candidates = this.lookup_import_candidates(ident.name, ns, is_expected);
if candidates.is_empty() && is_expected(Def::Enum(DefId::local(CRATE_DEF_INDEX))) {
let enum_candidates =
- this.lookup_import_candidates(ident.node.name, ns, is_enum_variant);
+ this.lookup_import_candidates(ident.name, ns, is_enum_variant);
let mut enum_candidates = enum_candidates.iter()
.map(|suggestion| import_candidate_to_paths(&suggestion)).collect::<Vec<_>>();
enum_candidates.sort();
}
}
if path.len() == 1 && this.self_type_is_available(span) {
- if let Some(candidate) = this.lookup_assoc_candidate(ident.node, ns, is_expected) {
- let self_is_available = this.self_value_is_available(path[0].node.ctxt, span);
+ if let Some(candidate) = this.lookup_assoc_candidate(ident, ns, is_expected) {
+ let self_is_available = this.self_value_is_available(path[0].span, span);
match candidate {
AssocSuggestion::Field => {
err.span_suggestion(span, "try",
(Def::Mod(..), PathSource::Expr(Some(parent))) => match parent.node {
ExprKind::Field(_, ident) => {
err.span_label(parent.span, format!("did you mean `{}::{}`?",
- path_str, ident.node));
+ path_str, ident));
return (err, candidates);
}
ExprKind::MethodCall(ref segment, ..) => {
err.span_label(parent.span, format!("did you mean `{}::{}(...)`?",
- path_str, segment.identifier));
+ path_str, segment.ident));
return (err, candidates);
}
_ => {}
// or `<T>::A::B`. If `B` should be resolved in value namespace then
// it needs to be added to the trait map.
if ns == ValueNS {
- let item_name = path.last().unwrap().node;
+ let item_name = *path.last().unwrap();
let traits = self.get_traits_containing_item(item_name, ns);
self.trait_map.insert(id, traits);
}
if let Some(LexicalScopeBinding::Def(def)) = binding { def != Def::Err } else { false }
}
- fn self_value_is_available(&mut self, ctxt: SyntaxContext, span: Span) -> bool {
- let ident = Ident { name: keywords::SelfValue.name(), ctxt: ctxt };
- let binding = self.resolve_ident_in_lexical_scope(ident, ValueNS, false, span);
+ fn self_value_is_available(&mut self, self_span: Span, path_span: Span) -> bool {
+ let ident = Ident::new(keywords::SelfValue.name(), self_span);
+ let binding = self.resolve_ident_in_lexical_scope(ident, ValueNS, false, path_span);
if let Some(LexicalScopeBinding::Def(def)) = binding { def != Def::Err } else { false }
}
fn resolve_qpath_anywhere(&mut self,
id: NodeId,
qself: Option<&QSelf>,
- path: &[SpannedIdent],
+ path: &[Ident],
primary_ns: Namespace,
span: Span,
defer_to_typeck: bool,
};
}
}
- let is_global = self.global_macros.get(&path[0].node.name).cloned()
+ let is_global = self.global_macros.get(&path[0].name).cloned()
.map(|binding| binding.get_macro(self).kind() == MacroKind::Bang).unwrap_or(false);
if primary_ns != MacroNS && (is_global ||
- self.macro_names.contains(&path[0].node.modern())) {
+ self.macro_names.contains(&path[0].modern())) {
// Return some dummy definition, it's enough for error reporting.
return Some(
PathResolution::new(Def::Macro(DefId::local(CRATE_DEF_INDEX), MacroKind::Bang))
fn resolve_qpath(&mut self,
id: NodeId,
qself: Option<&QSelf>,
- path: &[SpannedIdent],
+ path: &[Ident],
ns: Namespace,
span: Span,
global_by_default: bool)
PathResult::Module(..) | PathResult::Failed(..)
if (ns == TypeNS || path.len() > 1) &&
self.primitive_type_table.primitive_types
- .contains_key(&path[0].node.name) => {
- let prim = self.primitive_type_table.primitive_types[&path[0].node.name];
+ .contains_key(&path[0].name) => {
+ let prim = self.primitive_type_table.primitive_types[&path[0].name];
PathResolution::with_unresolved_segments(Def::PrimTy(prim), path.len() - 1)
}
PathResult::Module(module) => PathResolution::new(module.def().unwrap()),
};
if path.len() > 1 && !global_by_default && result.base_def() != Def::Err &&
- path[0].node.name != keywords::CrateRoot.name() &&
- path[0].node.name != keywords::DollarCrate.name() {
+ path[0].name != keywords::CrateRoot.name() &&
+ path[0].name != keywords::DollarCrate.name() {
let unqualified_result = {
match self.resolve_path(&[*path.last().unwrap()], Some(ns), false, span) {
PathResult::NonModule(path_res) => path_res.base_def(),
}
fn resolve_path(&mut self,
- path: &[SpannedIdent],
+ path: &[Ident],
opt_ns: Option<Namespace>, // `None` indicates a module path
record_used: bool,
path_span: Span)
debug!("resolve_path ident {} {:?}", i, ident);
let is_last = i == path.len() - 1;
let ns = if is_last { opt_ns.unwrap_or(TypeNS) } else { TypeNS };
- let name = ident.node.name;
+ let name = ident.name;
if i == 0 && ns == TypeNS && name == keywords::SelfValue.name() {
- let mut ctxt = ident.node.ctxt.modern();
+ let mut ctxt = ident.span.ctxt().modern();
module = Some(self.resolve_self(&mut ctxt, self.current_module));
continue
} else if allow_super && ns == TypeNS && name == keywords::Super.name() {
- let mut ctxt = ident.node.ctxt.modern();
+ let mut ctxt = ident.span.ctxt().modern();
let self_module = match i {
0 => self.resolve_self(&mut ctxt, self.current_module),
_ => module.unwrap(),
if ns == TypeNS {
if (i == 0 && name == keywords::CrateRoot.name()) ||
(i == 1 && name == keywords::Crate.name() &&
- path[0].node.name == keywords::CrateRoot.name()) {
+ path[0].name == keywords::CrateRoot.name()) {
// `::a::b` or `::crate::a::b`
- module = Some(self.resolve_crate_root(ident.node.ctxt, false));
+ module = Some(self.resolve_crate_root(ident.span.ctxt(), false));
continue
} else if i == 0 && name == keywords::DollarCrate.name() {
// `$crate::a::b`
- module = Some(self.resolve_crate_root(ident.node.ctxt, true));
+ module = Some(self.resolve_crate_root(ident.span.ctxt(), true));
continue
- } else if i == 1 && !token::is_path_segment_keyword(ident.node) {
- let prev_name = path[0].node.name;
+ } else if i == 1 && !token::is_path_segment_keyword(ident) {
+ let prev_name = path[0].name;
if prev_name == keywords::Extern.name() ||
prev_name == keywords::CrateRoot.name() &&
self.session.features_untracked().extern_absolute_paths {
name == keywords::Super.name() && i != 0 ||
name == keywords::Extern.name() && i != 0 ||
name == keywords::Crate.name() && i != 1 &&
- path[0].node.name != keywords::CrateRoot.name() {
+ path[0].name != keywords::CrateRoot.name() {
let name_str = if name == keywords::CrateRoot.name() {
format!("crate root")
} else {
format!("`{}`", name)
};
- let msg = if i == 1 && path[0].node.name == keywords::CrateRoot.name() {
+ let msg = if i == 1 && path[0].name == keywords::CrateRoot.name() {
format!("global paths cannot start with {}", name_str)
} else if i == 0 && name == keywords::Crate.name() {
format!("{} can only be used in absolute paths", name_str)
}
let binding = if let Some(module) = module {
- self.resolve_ident_in_module(module, ident.node, ns, false, record_used, path_span)
+ self.resolve_ident_in_module(module, ident, ns, false, record_used, path_span)
} else if opt_ns == Some(MacroNS) {
- self.resolve_lexical_macro_path_segment(ident.node, ns, record_used, path_span)
+ self.resolve_lexical_macro_path_segment(ident, ns, record_used, path_span)
.map(MacroBinding::binding)
} else {
- match self.resolve_ident_in_lexical_scope(ident.node, ns, record_used, path_span) {
+ match self.resolve_ident_in_lexical_scope(ident, ns, record_used, path_span) {
Some(LexicalScopeBinding::Item(binding)) => Ok(binding),
Some(LexicalScopeBinding::Def(def))
if opt_ns == Some(TypeNS) || opt_ns == Some(ValueNS) => {
));
} else {
return PathResult::Failed(ident.span,
- format!("Not a module `{}`", ident.node),
+ format!("Not a module `{}`", ident),
is_last);
}
}
if let Some(candidate) = candidates.get(0) {
format!("Did you mean `{}`?", candidate.path)
} else {
- format!("Maybe a missing `extern crate {};`?", ident.node)
+ format!("Maybe a missing `extern crate {};`?", ident)
}
} else if i == 0 {
- format!("Use of undeclared type or module `{}`", ident.node)
+ format!("Use of undeclared type or module `{}`", ident)
} else {
- format!("Could not find `{}` in `{}`", ident.node, path[i - 1].node)
+ format!("Could not find `{}` in `{}`", ident, path[i - 1])
};
return PathResult::Failed(ident.span, msg, is_last);
}
}
fn lookup_typo_candidate<FilterFn>(&mut self,
- path: &[SpannedIdent],
+ path: &[Ident],
ns: Namespace,
filter_fn: FilterFn,
span: Span)
}
}
- let name = path[path.len() - 1].node.name;
+ let name = path[path.len() - 1].name;
// Make sure error reporting is deterministic.
names.sort_by_key(|name| name.as_str());
match find_best_match_for_name(names.iter(), &name.as_str(), None) {
});
self.record_def(expr.id, err_path_resolution());
resolve_error(self,
- label.span,
+ label.ident.span,
ResolutionError::UndeclaredLabel(&label.ident.name.as_str(),
close_match));
}
fn record_candidate_traits_for_expr_if_necessary(&mut self, expr: &Expr) {
match expr.node {
- ExprKind::Field(_, name) => {
+ ExprKind::Field(_, ident) => {
// FIXME(#6890): Even though you can't treat a method like a
// field, we need to add any trait methods we find that match
// the field name so that we can do some nice error reporting
// later on in typeck.
- let traits = self.get_traits_containing_item(name.node, ValueNS);
+ let traits = self.get_traits_containing_item(ident, ValueNS);
self.trait_map.insert(expr.id, traits);
}
ExprKind::MethodCall(ref segment, ..) => {
debug!("(recording candidate traits for expr) recording traits for {}",
expr.id);
- let traits = self.get_traits_containing_item(segment.identifier, ValueNS);
+ let traits = self.get_traits_containing_item(segment.ident, ValueNS);
self.trait_map.insert(expr.id, traits);
}
_ => {
}
}
- ident.ctxt = ident.ctxt.modern();
+ ident.span = ident.span.modern();
let mut search_module = self.current_module;
loop {
self.get_traits_in_module_containing_item(ident, ns, search_module, &mut found_traits);
search_module =
- unwrap_or!(self.hygienic_lexical_parent(search_module, &mut ident.ctxt), break);
+ unwrap_or!(self.hygienic_lexical_parent(search_module, &mut ident.span), break);
}
if let Some(prelude) = self.prelude {
for &(trait_name, binding) in traits.as_ref().unwrap().iter() {
let module = binding.module().unwrap();
let mut ident = ident;
- if ident.ctxt.glob_adjust(module.expansion, binding.span.ctxt().modern()).is_none() {
+ if ident.span.glob_adjust(module.expansion, binding.span.ctxt().modern()).is_none() {
continue
}
if self.resolve_ident_in_module_unadjusted(module, ident, ns, false, false, module.span)
if filter_fn(name_binding.def()) {
// create the path
let mut segms = path_segments.clone();
- segms.push(ast::PathSegment::from_ident(ident, name_binding.span));
+ segms.push(ast::PathSegment::from_ident(ident));
let path = Path {
span: name_binding.span,
segments: segms,
if let Some(module) = name_binding.module() {
// form the path
let mut path_segments = path_segments.clone();
- path_segments.push(ast::PathSegment::from_ident(ident, name_binding.span));
+ path_segments.push(ast::PathSegment::from_ident(ident));
if !in_module_is_extern || name_binding.vis == ty::Visibility::Public {
// add the module to the lookup
if let Some(module) = name_binding.module() {
// form the path
let mut path_segments = path_segments.clone();
- path_segments.push(ast::PathSegment::from_ident(ident, name_binding.span));
+ path_segments.push(ast::PathSegment::from_ident(ident));
if module.def() == Some(module_def) {
let path = Path {
span: name_binding.span,
enum_module.for_each_child_stable(|ident, _, name_binding| {
if let Def::Variant(..) = name_binding.def() {
let mut segms = enum_import_suggestion.path.segments.clone();
- segms.push(ast::PathSegment::from_ident(ident, name_binding.span));
+ segms.push(ast::PathSegment::from_ident(ident));
variants.push(Path {
span: name_binding.span,
segments: segms,
ast::VisibilityKind::Restricted { ref path, id, .. } => {
// Visibilities are resolved as global by default, add starting root segment.
let segments = path.make_root().iter().chain(path.segments.iter())
- .map(|seg| respan(seg.span, seg.identifier))
+ .map(|seg| seg.ident)
.collect::<Vec<_>>();
let def = self.smart_resolve_path_fragment(id, None, &segments, path.span,
PathSource::Visibility).base_def();
if attr.path.segments.len() > 1 {
continue
}
- let ident = attr.path.segments[0].identifier;
+ let ident = attr.path.segments[0].ident;
let result = self.resolve_lexical_macro_path_segment(ident,
MacroNS,
false,
}
}
-fn is_self_type(path: &[SpannedIdent], namespace: Namespace) -> bool {
- namespace == TypeNS && path.len() == 1 && path[0].node.name == keywords::SelfType.name()
+fn is_self_type(path: &[Ident], namespace: Namespace) -> bool {
+ namespace == TypeNS && path.len() == 1 && path[0].name == keywords::SelfType.name()
}
-fn is_self_value(path: &[SpannedIdent], namespace: Namespace) -> bool {
- namespace == ValueNS && path.len() == 1 && path[0].node.name == keywords::SelfValue.name()
+fn is_self_value(path: &[Ident], namespace: Namespace) -> bool {
+ namespace == ValueNS && path.len() == 1 && path[0].name == keywords::SelfValue.name()
}
-fn names_to_string(idents: &[SpannedIdent]) -> String {
+fn names_to_string(idents: &[Ident]) -> String {
let mut result = String::new();
for (i, ident) in idents.iter()
- .filter(|i| i.node.name != keywords::CrateRoot.name())
+ .filter(|ident| ident.name != keywords::CrateRoot.name())
.enumerate() {
if i > 0 {
result.push_str("::");
}
- result.push_str(&ident.node.name.as_str());
+ result.push_str(&ident.name.as_str());
}
result
}
fn path_names_to_string(path: &Path) -> String {
names_to_string(&path.segments.iter()
- .map(|seg| respan(seg.span, seg.identifier))
+ .map(|seg| seg.ident)
.collect::<Vec<_>>())
}
}
Some(names_to_string(&names.into_iter()
.rev()
- .map(|n| dummy_spanned(n))
.collect::<Vec<_>>()))
}
use rustc::{ty, lint};
use syntax::ast::{self, Name, Ident};
use syntax::attr::{self, HasAttrs};
-use syntax::codemap::respan;
use syntax::errors::DiagnosticBuilder;
use syntax::ext::base::{self, Annotatable, Determinacy, MultiModifier, MultiDecorator};
use syntax::ext::base::{MacroKind, SyntaxExtension, Resolver as SyntaxResolver};
impl<'a, 'b> Folder for EliminateCrateVar<'a, 'b> {
fn fold_path(&mut self, mut path: ast::Path) -> ast::Path {
- let ident = path.segments[0].identifier;
+ let ident = path.segments[0].ident;
if ident.name == keywords::DollarCrate.name() {
- path.segments[0].identifier.name = keywords::CrateRoot.name();
- let module = self.0.resolve_crate_root(ident.ctxt, true);
+ path.segments[0].ident.name = keywords::CrateRoot.name();
+ let module = self.0.resolve_crate_root(ident.span.ctxt(), true);
if !module.is_local() {
- let span = path.segments[0].span;
+ let span = path.segments[0].ident.span;
path.segments.insert(1, match module.kind {
ModuleKind::Def(_, name) => ast::PathSegment::from_ident(
- ast::Ident::with_empty_ctxt(name), span
+ ast::Ident::with_empty_ctxt(name).with_span_pos(span)
),
_ => unreachable!(),
})
if traits[j].segments.len() > 1 {
continue
}
- let trait_name = traits[j].segments[0].identifier.name;
+ let trait_name = traits[j].segments[0].ident.name;
let legacy_name = Symbol::intern(&format!("derive_{}", trait_name));
if !self.global_macros.contains_key(&legacy_name) {
continue
if k > 0 {
tokens.push(TokenTree::Token(path.span, Token::ModSep).into());
}
- let tok = Token::from_ast_ident(segment.identifier);
+ let tok = Token::from_ast_ident(segment.ident);
tokens.push(TokenTree::Token(path.span, tok).into());
}
}
}).into();
}
return Some(ast::Attribute {
- path: ast::Path::from_ident(span, Ident::with_empty_ctxt(legacy_name)),
+ path: ast::Path::from_ident(Ident::new(legacy_name, span)),
tokens: TokenStream::empty(),
id: attr::mk_attr_id(),
style: ast::AttrStyle::Outer,
}
let attr_name = match path.segments.len() {
- 1 => path.segments[0].identifier.name,
+ 1 => path.segments[0].ident.name,
_ => return Err(determinacy),
};
for path in traits {
kind: MacroKind, force: bool)
-> Result<Def, Determinacy> {
let ast::Path { ref segments, span } = *path;
- let path: Vec<_> = segments.iter().map(|seg| respan(seg.span, seg.identifier)).collect();
+ let path: Vec<_> = segments.iter().map(|seg| seg.ident).collect();
let invocation = self.invocations[&scope];
let module = invocation.module.get();
self.current_module = if module.is_trait() { module.parent.unwrap() } else { module };
Err(Determinacy::Determined)
},
};
- let path = path.iter().map(|p| p.node).collect::<Vec<_>>();
self.current_module.nearest_item_scope().macro_resolutions.borrow_mut()
.push((path.into_boxed_slice(), span));
return def;
}
- let legacy_resolution = self.resolve_legacy_scope(&invocation.legacy_scope,
- path[0].node,
- false);
+ let legacy_resolution = self.resolve_legacy_scope(&invocation.legacy_scope, path[0], false);
let result = if let Some(MacroBinding::Legacy(binding)) = legacy_resolution {
Ok(Def::Macro(binding.def_id, MacroKind::Bang))
} else {
- match self.resolve_lexical_macro_path_segment(path[0].node, MacroNS, false, span) {
+ match self.resolve_lexical_macro_path_segment(path[0], MacroNS, false, span) {
Ok(binding) => Ok(binding.binding().def_ignoring_ambiguity()),
Err(Determinacy::Undetermined) if !force => return Err(Determinacy::Undetermined),
Err(_) => {
};
self.current_module.nearest_item_scope().legacy_macro_resolutions.borrow_mut()
- .push((scope, path[0].node, span, kind));
+ .push((scope, path[0], span, kind));
result
}
}
module = match module {
- Some(module) => self.hygienic_lexical_parent(module, &mut ident.ctxt),
+ Some(module) => self.hygienic_lexical_parent(module, &mut ident.span),
None => return potential_illegal_shadower,
}
}
pub fn finalize_current_module_macro_resolutions(&mut self) {
let module = self.current_module;
for &(ref path, span) in module.macro_resolutions.borrow().iter() {
- let path = path.iter().map(|p| respan(span, *p)).collect::<Vec<_>>();
match self.resolve_path(&path, Some(MacroNS), true, span) {
PathResult::NonModule(_) => {},
PathResult::Failed(span, msg, _) => {
false
}
};
- let ident = Ident::from_str(name);
- self.lookup_typo_candidate(&vec![respan(span, ident)], MacroNS, is_macro, span)
+ let ident = Ident::new(Symbol::intern(name), span);
+ self.lookup_typo_candidate(&vec![ident], MacroNS, is_macro, span)
});
if let Some(suggestion) = suggestion {
use rustc::session::DiagnosticMessageId;
use rustc::util::nodemap::{FxHashMap, FxHashSet};
-use syntax::ast::{Ident, Name, SpannedIdent, NodeId};
+use syntax::ast::{Ident, Name, NodeId};
use syntax::ext::base::Determinacy::{self, Determined, Undetermined};
use syntax::ext::hygiene::Mark;
use syntax::parse::token;
pub struct ImportDirective<'a> {
pub id: NodeId,
pub parent: Module<'a>,
- pub module_path: Vec<SpannedIdent>,
+ pub module_path: Vec<Ident>,
pub imported_module: Cell<Option<Module<'a>>>, // the resolution of `module_path`
pub subclass: ImportDirectiveSubclass<'a>,
pub span: Span,
}
let module = unwrap_or!(directive.imported_module.get(), return Err(Undetermined));
let (orig_current_module, mut ident) = (self.current_module, ident.modern());
- match ident.ctxt.glob_adjust(module.expansion, directive.span.ctxt().modern()) {
+ match ident.span.glob_adjust(module.expansion, directive.span.ctxt().modern()) {
Some(Some(def)) => self.current_module = self.macro_def_scope(def),
Some(None) => {}
None => continue,
// Add an import directive to the current module.
pub fn add_import_directive(&mut self,
- module_path: Vec<SpannedIdent>,
+ module_path: Vec<Ident>,
subclass: ImportDirectiveSubclass<'a>,
span: Span,
id: NodeId,
// Define `binding` in `module`s glob importers.
for directive in module.glob_importers.borrow_mut().iter() {
let mut ident = ident.modern();
- let scope = match ident.ctxt.reverse_glob_adjust(module.expansion,
+ let scope = match ident.span.reverse_glob_adjust(module.expansion,
directive.span.ctxt().modern()) {
Some(Some(def)) => self.macro_def_scope(def),
Some(None) => directive.parent,
// FIXME: Last path segment is treated specially in import resolution, so extern crate
// mode for absolute paths needs some special support for single-segment imports.
- if module_path.len() == 1 && (module_path[0].node.name == keywords::CrateRoot.name() ||
- module_path[0].node.name == keywords::Extern.name()) {
- let is_extern = module_path[0].node.name == keywords::Extern.name() ||
+ if module_path.len() == 1 && (module_path[0].name == keywords::CrateRoot.name() ||
+ module_path[0].name == keywords::Extern.name()) {
+ let is_extern = module_path[0].name == keywords::Extern.name() ||
self.session.features_untracked().extern_absolute_paths;
match directive.subclass {
GlobImport { .. } if is_extern => {
}
SingleImport { source, target, .. } => {
let crate_root = if source.name == keywords::Crate.name() &&
- module_path[0].node.name != keywords::Extern.name() {
+ module_path[0].name != keywords::Extern.name() {
if target.name == keywords::Crate.name() {
return Some((directive.span,
"crate root imports need to be explicitly named: \
`use crate as name;`".to_string()));
} else {
- Some(self.resolve_crate_root(source.ctxt.modern(), false))
+ Some(self.resolve_crate_root(source.span.ctxt().modern(), false))
}
} else if is_extern && !token::is_path_segment_keyword(source) {
let crate_id =
let (mut self_path, mut self_result) = (module_path.clone(), None);
let is_special = |ident| token::is_path_segment_keyword(ident) &&
ident.name != keywords::CrateRoot.name();
- if !self_path.is_empty() && !is_special(self_path[0].node) &&
- !(self_path.len() > 1 && is_special(self_path[1].node)) {
- self_path[0].node.name = keywords::SelfValue.name();
+ if !self_path.is_empty() && !is_special(self_path[0]) &&
+ !(self_path.len() > 1 && is_special(self_path[1])) {
+ self_path[0].name = keywords::SelfValue.name();
self_result = Some(self.resolve_path(&self_path, None, false, span));
}
return if let Some(PathResult::Module(..)) = self_result {
resolution.borrow().binding().map(|binding| (ident, binding))
}).collect::<Vec<_>>();
for ((mut ident, ns), binding) in bindings {
- let scope = match ident.ctxt.reverse_glob_adjust(module.expansion,
+ let scope = match ident.span.reverse_glob_adjust(module.expansion,
directive.span.ctxt().modern()) {
Some(Some(def)) => self.macro_def_scope(def),
Some(None) => self.current_module,
let resolutions = imported_module.parent.expect("parent should exist")
.resolutions.borrow();
let enum_path_segment_index = directive.module_path.len() - 1;
- let enum_ident = directive.module_path[enum_path_segment_index].node;
+ let enum_ident = directive.module_path[enum_path_segment_index];
let enum_resolution = resolutions.get(&(enum_ident, TypeNS))
.expect("resolution should exist");
}
}
-fn import_path_to_string(names: &[SpannedIdent],
+fn import_path_to_string(names: &[Ident],
subclass: &ImportDirectiveSubclass,
span: Span) -> String {
let pos = names.iter()
- .position(|p| span == p.span && p.node.name != keywords::CrateRoot.name());
- let global = !names.is_empty() && names[0].node.name == keywords::CrateRoot.name();
+ .position(|p| span == p.span && p.name != keywords::CrateRoot.name());
+ let global = !names.is_empty() && names[0].name == keywords::CrateRoot.name();
if let Some(pos) = pos {
let names = if global { &names[1..pos + 1] } else { &names[..pos + 1] };
names_to_string(names)
//!
//! SpanUtils is used to manipulate spans. In particular, to extract sub-spans
//! from spans (e.g., the span for `bar` from the above example path).
-//! DumpVisitor walks the AST and processes it, and an implementor of Dump
-//! is used for recording the output in a format-agnostic way (see CsvDumper
-//! for an example).
+//! DumpVisitor walks the AST and processes it, and JsonDumper is used for
+//! recording the output.
use rustc::hir::def::Def as HirDef;
use rustc::hir::def_id::DefId;
for (i, seg) in segments.iter().enumerate() {
segs.push(seg.clone());
let sub_path = ast::Path {
- span: seg.span, // span for the last segment
+ span: seg.ident.span, // span for the last segment
segments: segs,
};
let qualname = if i == 0 && path.is_global() {
} else {
path_to_string(&sub_path)
};
- result.push((seg.span, qualname));
+ result.push((seg.ident.span, qualname));
segs = sub_path.segments;
}
collector.visit_pat(&arg.pat);
let span_utils = self.span.clone();
- for (id, i, sp, ..) in collector.collected_idents {
+ for (id, ident, ..) in collector.collected_idents {
let hir_id = self.tcx.hir.node_to_hir_id(id);
let typ = match self.save_ctxt.tables.node_id_to_type_opt(hir_id) {
Some(s) => s.to_string(),
None => continue,
};
- let sub_span = span_utils.span_for_last_ident(sp);
- if !self.span.filter_generated(sub_span, sp) {
+ let sub_span = span_utils.span_for_last_ident(ident.span);
+ if !self.span.filter_generated(sub_span, ident.span) {
let id = ::id_from_node_id(id, &self.save_ctxt);
let span = self.span_from_span(sub_span.expect("No span found for variable"));
kind: DefKind::Local,
id,
span,
- name: i.to_string(),
- qualname: format!("{}::{}", qualname, i.to_string()),
+ name: ident.to_string(),
+ qualname: format!("{}::{}", qualname, ident.to_string()),
value: typ,
parent: None,
children: vec![],
) {
for param in &generics.params {
if let ast::GenericParam::Type(ref ty_param) = *param {
- let param_ss = ty_param.span;
+ let param_ss = ty_param.ident.span;
let name = escape(self.span.snippet(param_ss));
// Append $id to name to make sure each one is unique
let qualname = format!("{}::{}${}", prefix, name, id);
let access = access_from!(self.save_ctxt, item);
for variant in &enum_definition.variants {
- let name = variant.node.name.name.to_string();
+ let name = variant.node.ident.name.to_string();
let mut qualname = enum_data.qualname.clone();
qualname.push_str("::");
qualname.push_str(&name);
}
// process collected paths
- for (id, i, sp, immut) in collector.collected_idents {
+ for (id, ident, immut) in collector.collected_idents {
match self.save_ctxt.get_path_def(id) {
HirDef::Local(id) => {
let mut value = if immut == ast::Mutability::Immutable {
- self.span.snippet(sp).to_string()
+ self.span.snippet(ident.span).to_string()
} else {
"<mutable>".to_string()
};
value.push_str(": ");
value.push_str(&typ);
- if !self.span.filter_generated(Some(sp), sp) {
- let qualname = format!("{}${}", i.to_string(), id);
+ if !self.span.filter_generated(Some(ident.span), ident.span) {
+ let qualname = format!("{}${}", ident.to_string(), id);
let id = ::id_from_node_id(id, &self.save_ctxt);
- let span = self.span_from_span(sp);
+ let span = self.span_from_span(ident.span);
self.dumper.dump_def(
&Access {
kind: DefKind::Local,
id,
span,
- name: i.to_string(),
+ name: ident.to_string(),
qualname,
value: typ,
parent: None,
HirDef::TyAlias(..) |
HirDef::AssociatedTy(..) |
HirDef::SelfTy(..) => {
- self.dump_path_ref(id, &ast::Path::from_ident(sp, i));
+ self.dump_path_ref(id, &ast::Path::from_ident(ident));
}
def => error!(
"unexpected definition kind when processing collected idents: {:?}",
collector.visit_pat(&p);
self.visit_pat(&p);
- for (id, i, sp, immut) in collector.collected_idents {
+ for (id, ident, immut) in collector.collected_idents {
let mut value = match immut {
ast::Mutability::Immutable => value.to_string(),
_ => String::new(),
// Get the span only for the name of the variable (I hope the path
// is only ever a variable name, but who knows?).
- let sub_span = self.span.span_for_last_ident(sp);
+ let sub_span = self.span.span_for_last_ident(ident.span);
// Rust uses the id of the pattern for var lookups, so we'll use it too.
- if !self.span.filter_generated(sub_span, sp) {
- let qualname = format!("{}${}", i.to_string(), id);
+ if !self.span.filter_generated(sub_span, ident.span) {
+ let qualname = format!("{}${}", ident.to_string(), id);
let id = ::id_from_node_id(id, &self.save_ctxt);
let span = self.span_from_span(sub_span.expect("No span found for variable"));
kind: DefKind::Local,
id,
span,
- name: i.to_string(),
+ name: ident.to_string(),
qualname,
value: typ,
parent: None,
self.dumper.dump_def(&access, var_data);
}
}
+ ast::ForeignItemKind::Macro(..) => {}
}
}
}
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(custom_attribute)]
#![feature(macro_lifetime_matcher)]
#![allow(unused_attributes)]
}
// FIXME(plietar): needs a new DefKind in rls-data
ast::ForeignItemKind::Ty => None,
+ ast::ForeignItemKind::Macro(..) => None,
}
}
filter!(self.span_utils, sub_span, item.span, None);
let variants_str = def.variants
.iter()
- .map(|v| v.node.name.to_string())
+ .map(|v| v.node.ident.to_string())
.collect::<Vec<_>>()
.join(", ");
let value = format!("{}::{{{}}}", name, variants_str);
};
match self.tables.expr_ty_adjusted(&hir_node).sty {
ty::TyAdt(def, _) if !def.is_enum() => {
- let f = def.non_enum_variant().field_named(ident.node.name);
+ let f = def.non_enum_variant().field_named(ident.name);
let sub_span = self.span_utils.span_for_last_ident(expr.span);
filter!(self.span_utils, sub_span, expr.span, None);
let span = self.span_from_span(sub_span.unwrap());
ty::ImplContainer(_) => (Some(method_id), None),
ty::TraitContainer(_) => (None, Some(method_id)),
};
- let sub_span = seg.span;
+ let sub_span = seg.ident.span;
filter!(self.span_utils, Some(sub_span), expr.span, None);
let span = self.span_from_span(sub_span);
Some(Data::RefData(Ref {
let def = self.get_path_def(id);
let last_seg = &path.segments[path.segments.len() - 1];
- let sub_span = last_seg.span;
+ let sub_span = last_seg.ident.span;
filter!(self.span_utils, Some(sub_span), path.span, None);
match def {
HirDef::Upvar(id, ..) | HirDef::Local(id) => {
field_ref: &ast::Field,
variant: &ty::VariantDef,
) -> Option<Ref> {
- let f = variant.find_field_named(field_ref.ident.node.name)?;
+ let f = variant.find_field_named(field_ref.ident.name)?;
// We don't really need a sub-span here, but no harm done
let sub_span = self.span_utils.span_for_last_ident(field_ref.ident.span);
filter!(self.span_utils, sub_span, field_ref.ident.span, None);
// variables (idents) from patterns.
struct PathCollector<'l> {
collected_paths: Vec<(NodeId, &'l ast::Path)>,
- collected_idents: Vec<(NodeId, ast::Ident, Span, ast::Mutability)>,
+ collected_idents: Vec<(NodeId, ast::Ident, ast::Mutability)>,
}
impl<'l> PathCollector<'l> {
PatKind::TupleStruct(ref path, ..) | PatKind::Path(_, ref path) => {
self.collected_paths.push((p.id, path));
}
- PatKind::Ident(bm, ref path1, _) => {
+ PatKind::Ident(bm, ident, _) => {
debug!(
"PathCollector, visit ident in pat {}: {:?} {:?}",
- path1.node,
+ ident,
p.span,
- path1.span
+ ident.span
);
let immut = match bm {
// Even if the ref is mut, you can't change the ref, only
ast::BindingMode::ByValue(mt) => mt,
};
self.collected_idents
- .push((p.id, path1.node, path1.span, immut));
+ .push((p.id, ident, immut));
}
_ => {}
}
fn make(&self, offset: usize, _parent_id: Option<NodeId>, scx: &SaveContext) -> Result {
let mut text = String::new();
let mut defs = None;
- if let Some(ref ident) = self.ident {
+ if let Some(ident) = self.ident {
text.push_str(&ident.to_string());
defs = Some(SigElement {
id: id_from_node_id(self.id, scx),
impl Sig for ast::Variant_ {
fn make(&self, offset: usize, _parent_id: Option<NodeId>, scx: &SaveContext) -> Result {
- let mut text = self.name.to_string();
+ let mut text = self.ident.to_string();
match self.data {
ast::VariantData::Struct(ref fields, id) => {
let name_def = SigElement {
refs: vec![],
})
}
+ ast::ForeignItemKind::Macro(..) => Err("macro"),
}
}
}
//! New recursive solver modeled on Chalk's recursive solver. Most of
//! the guts are broken up into modules; see the comments in those modules.
-#![deny(warnings)]
-
#![feature(crate_visibility_modifier)]
-#![cfg_attr(stage0, feature(match_default_bindings))]
-#![cfg_attr(stage0, feature(underscore_lifetimes))]
#[macro_use]
extern crate log;
use rustc::hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc::ty::{self, TyCtxt};
use rustc::ty::subst::Substs;
-use rustc::traits::{QuantifierKind, Goal, DomainGoal, Clause, WhereClauseAtom};
+use rustc::traits::{WhereClauseAtom, PolyDomainGoal, DomainGoal, ProgramClause, Clause};
use syntax::ast;
use rustc_data_structures::sync::Lrc;
/// `ty::Binder` is used for wrapping a rustc construction possibly containing generic
/// lifetimes, e.g. `for<'a> T: Fn(&'a i32)`. Instead of representing higher-ranked things
/// in that leaf-form (i.e. `Holds(Implemented(Binder<TraitPredicate>))` in the previous
-/// example), we model them with quantified goals, e.g. as for the previous example:
+/// example), we model them with quantified domain goals, e.g. as for the previous example:
/// `forall<'a> { T: Fn(&'a i32) }` which corresponds to something like
/// `Binder<Holds(Implemented(TraitPredicate))>`.
-///
-/// Also, if `self` does not contain generic lifetimes, we can safely drop the binder and we
-/// can directly lower to a leaf goal instead of a quantified goal.
-impl<'tcx, T> Lower<Goal<'tcx>> for ty::Binder<T>
- where T: Lower<DomainGoal<'tcx>> + ty::fold::TypeFoldable<'tcx> + Copy
+impl<'tcx, T> Lower<PolyDomainGoal<'tcx>> for ty::Binder<T>
+ where T: Lower<DomainGoal<'tcx>> + ty::fold::TypeFoldable<'tcx>
{
- fn lower(&self) -> Goal<'tcx> {
- match self.no_late_bound_regions() {
- Some(p) => p.lower().into(),
- None => Goal::Quantified(
- QuantifierKind::Universal,
- Box::new(self.map_bound(|p| p.lower().into()))
- ),
- }
+ fn lower(&self) -> PolyDomainGoal<'tcx> {
+ self.map_bound_ref(|p| p.lower())
}
}
-impl<'tcx> Lower<Goal<'tcx>> for ty::Predicate<'tcx> {
- fn lower(&self) -> Goal<'tcx> {
+impl<'tcx> Lower<PolyDomainGoal<'tcx>> for ty::Predicate<'tcx> {
+ fn lower(&self) -> PolyDomainGoal<'tcx> {
use rustc::ty::Predicate::*;
match self {
RegionOutlives(predicate) => predicate.lower(),
TypeOutlives(predicate) => predicate.lower(),
Projection(predicate) => predicate.lower(),
- WellFormed(ty) => DomainGoal::WellFormedTy(*ty).into(),
+ WellFormed(ty) => ty::Binder::dummy(DomainGoal::WellFormedTy(*ty)),
ObjectSafe(..) |
ClosureKind(..) |
Subtype(..) |
}
}
+/// Transforms an existing goal into a FromEnv goal.
+///
+/// Used for lowered where clauses (see rustc guide).
+trait IntoFromEnvGoal {
+ fn into_from_env_goal(self) -> Self;
+}
+
+impl<'tcx> IntoFromEnvGoal for DomainGoal<'tcx> {
+ fn into_from_env_goal(self) -> DomainGoal<'tcx> {
+ use self::DomainGoal::*;
+ match self {
+ Holds(wc_atom) => FromEnv(wc_atom),
+ WellFormed(..) |
+ FromEnv(..) |
+ WellFormedTy(..) |
+ FromEnvTy(..) |
+ RegionOutlives(..) |
+ TypeOutlives(..) => self,
+ }
+ }
+}
+
crate fn program_clauses_for<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
-> Lrc<Vec<Clause<'tcx>>>
{
fn program_clauses_for_trait<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
-> Lrc<Vec<Clause<'tcx>>>
{
- // Rule Implemented-From-Env (see rustc guide)
- //
// `trait Trait<P1..Pn> where WC { .. } // P0 == Self`
+
+ // Rule Implemented-From-Env (see rustc guide)
//
// ```
// forall<Self, P1..Pn> {
}
};
// `FromEnv(Self: Trait<P1..Pn>)`
- let from_env = Goal::DomainGoal(DomainGoal::FromEnv(trait_pred.lower()));
+ let from_env = DomainGoal::FromEnv(trait_pred.lower()).into();
// `Implemented(Self: Trait<P1..Pn>)`
let impl_trait = DomainGoal::Holds(WhereClauseAtom::Implemented(trait_pred));
// `Implemented(Self: Trait<P1..Pn>) :- FromEnv(Self: Trait<P1..Pn>)`
- let clause = Clause::Implies(vec![from_env], impl_trait);
- Lrc::new(vec![clause])
+ let implemented_from_env = ProgramClause {
+ goal: impl_trait,
+ hypotheses: vec![from_env],
+ };
+ let mut clauses = vec![
+ Clause::ForAll(ty::Binder::dummy(implemented_from_env))
+ ];
+
+ // Rule Implied-Bound-From-Trait
+ //
+ // For each where clause WC:
+ // ```
+ // forall<Self, P1..Pn> {
+ // FromEnv(WC) :- FromEnv(Self: Trait<P1..Pn)
+ // }
+ // ```
+
+ // `FromEnv(WC) :- FromEnv(Self: Trait<P1..Pn>)`, for each where clause WC
+ // FIXME: Remove the [1..] slice; this is a hack because the query
+ // predicates_of currently includes the trait itself (`Self: Trait<P1..Pn>`).
+ let where_clauses = &tcx.predicates_of(def_id).predicates;
+ let implied_bound_clauses =
+ where_clauses[1..].into_iter()
+ .map(|wc| implied_bound_from_trait(trait_pred, wc));
+ clauses.extend(implied_bound_clauses);
+
+ Lrc::new(clauses)
+}
+
+/// For a given `where_clause`, returns a clause `FromEnv(WC) :- FromEnv(Self: Trait<P1..Pn>)`.
+fn implied_bound_from_trait<'tcx>(
+ trait_pred: ty::TraitPredicate<'tcx>,
+ where_clause: &ty::Predicate<'tcx>,
+) -> Clause<'tcx> {
+ // `FromEnv(Self: Trait<P1..Pn>)`
+ let impl_trait = DomainGoal::FromEnv(WhereClauseAtom::Implemented(trait_pred));
+
+ // `FromEnv(WC) :- FromEnv(Self: Trait<P1..Pn>)`
+ Clause::ForAll(
+ where_clause.lower().map_bound(|goal| ProgramClause {
+ goal: goal.into_from_env_goal(),
+ hypotheses: vec![impl_trait.into()],
+ })
+ )
}
fn program_clauses_for_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
let where_clauses = tcx.predicates_of(def_id).predicates.lower();
// `Implemented(A0: Trait<A1..An>) :- WC`
- let clause = Clause::Implies(where_clauses, trait_pred);
- Lrc::new(vec![clause])
+ let clause = ProgramClause {
+ goal: trait_pred,
+ hypotheses: where_clauses.into_iter().map(|wc| wc.into()).collect()
+ };
+ Lrc::new(vec![Clause::ForAll(ty::Binder::dummy(clause))])
}
pub fn dump_program_clauses<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
-impl <'a, 'tcx> ClauseDumper<'a, 'tcx > {
+impl<'a, 'tcx> ClauseDumper<'a, 'tcx > {
fn process_attrs(&mut self, node_id: ast::NodeId, attrs: &[ast::Attribute]) {
let def_id = self.tcx.hir.local_def_id(node_id);
for attr in attrs {
if attr.check_name("rustc_dump_program_clauses") {
let clauses = self.tcx.program_clauses_for(def_id);
for clause in &*clauses {
- self.tcx.sess.struct_span_err(attr.span, &format!("{}", clause)).emit();
+ // Skip the top-level binder for a less verbose output
+ let program_clause = match clause {
+ Clause::Implies(program_clause) => program_clause,
+ Clause::ForAll(program_clause) => program_clause.skip_binder(),
+ };
+ self.tcx.sess.struct_span_err(attr.span, &format!("{}", program_clause)).emit();
}
}
}
loop {
i += 1;
prog = time(sess, "running linker", || {
- exec_linker(sess, &mut cmd, tmpdir)
+ exec_linker(sess, &mut cmd, out_filename, tmpdir)
});
let output = match prog {
Ok(ref output) => output,
}
}
-fn exec_linker(sess: &Session, cmd: &mut Command, tmpdir: &Path)
+fn exec_linker(sess: &Session, cmd: &mut Command, out_filename: &Path, tmpdir: &Path)
-> io::Result<Output>
{
// When attempting to spawn the linker we run a risk of blowing out the
// there instead of looking at the command line.
if !cmd.very_likely_to_exceed_some_spawn_limit() {
match cmd.command().stdout(Stdio::piped()).stderr(Stdio::piped()).spawn() {
- Ok(child) => return child.wait_with_output(),
+ Ok(child) => {
+ let output = child.wait_with_output();
+ flush_linked_file(&output, out_filename)?;
+ return output;
+ }
Err(ref e) if command_line_too_big(e) => {
info!("command line to linker was too big: {}", e);
}
fs::write(&file, &bytes)?;
cmd2.arg(format!("@{}", file.display()));
info!("invoking linker {:?}", cmd2);
- return cmd2.output();
+ let output = cmd2.output();
+ flush_linked_file(&output, out_filename)?;
+ return output;
+
+ #[cfg(unix)]
+ fn flush_linked_file(_: &io::Result<Output>, _: &Path) -> io::Result<()> {
+ Ok(())
+ }
+
+ #[cfg(windows)]
+ fn flush_linked_file(command_output: &io::Result<Output>, out_filename: &Path)
+ -> io::Result<()>
+ {
+ // On Windows, under high I/O load, output buffers are sometimes not flushed,
+ // even long after process exit, causing nasty, non-reproducible output bugs.
+ //
+ // File::sync_all() calls FlushFileBuffers() down the line, which solves the problem.
+ //
+ // А full writeup of the original Chrome bug can be found at
+ // randomascii.wordpress.com/2018/02/25/compiler-bug-linker-bug-windows-kernel-bug/amp
+
+ if let &Ok(ref out) = command_output {
+ if out.status.success() {
+ if let Ok(of) = fs::OpenOptions::new().write(true).open(out_filename) {
+ of.sync_all()?;
+ }
+ }
+ }
+
+ Ok(())
+ }
#[cfg(unix)]
fn command_line_too_big(err: &io::Error) -> bool {
use monomorphize::Instance;
use rustc::hir;
use rustc::hir::TransFnAttrFlags;
-use rustc::hir::def_id::CrateNum;
-use rustc::hir::def_id::{DefId, LOCAL_CRATE};
+use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE, CRATE_DEF_INDEX};
+use rustc::ich::Fingerprint;
use rustc::middle::exported_symbols::{SymbolExportLevel, ExportedSymbol, metadata_symbol_name};
use rustc::session::config;
use rustc::ty::{TyCtxt, SymbolName};
use rustc::ty::maps::Providers;
-use rustc::util::nodemap::{FxHashMap, DefIdSet};
+use rustc::ty::subst::Substs;
+use rustc::util::nodemap::{FxHashMap, DefIdMap};
use rustc_allocator::ALLOCATOR_METHODS;
+use rustc_data_structures::indexed_vec::IndexVec;
+use std::collections::hash_map::Entry::*;
pub type ExportedSymbols = FxHashMap<
CrateNum,
fn reachable_non_generics_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
cnum: CrateNum)
- -> Lrc<DefIdSet>
+ -> Lrc<DefIdMap<SymbolExportLevel>>
{
assert_eq!(cnum, LOCAL_CRATE);
if !tcx.sess.opts.output_types.should_trans() {
- return Lrc::new(DefIdSet())
- }
-
- let export_threshold = threshold(tcx);
-
- // We already collect all potentially reachable non-generic items for
- // `exported_symbols`. Now we just filter them down to what is actually
- // exported for the given crate we are compiling.
- let reachable_non_generics = tcx
- .exported_symbols(LOCAL_CRATE)
- .iter()
- .filter_map(|&(exported_symbol, level)| {
- if let ExportedSymbol::NonGeneric(def_id) = exported_symbol {
- if level.is_below_threshold(export_threshold) {
- return Some(def_id)
- }
- }
-
- None
- })
- .collect();
-
- Lrc::new(reachable_non_generics)
-}
-
-fn is_reachable_non_generic_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId)
- -> bool {
- tcx.reachable_non_generics(def_id.krate).contains(&def_id)
-}
-
-fn exported_symbols_provider_local<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- cnum: CrateNum)
- -> Arc<Vec<(ExportedSymbol,
- SymbolExportLevel)>>
-{
- assert_eq!(cnum, LOCAL_CRATE);
-
- if !tcx.sess.opts.output_types.should_trans() {
- return Arc::new(vec![])
+ return Lrc::new(DefIdMap())
}
// Check to see if this crate is a "special runtime crate". These
let special_runtime_crate = tcx.is_panic_runtime(LOCAL_CRATE) ||
tcx.is_compiler_builtins(LOCAL_CRATE);
- let reachable_non_generics: DefIdSet = tcx.reachable_set(LOCAL_CRATE).0
+ let mut reachable_non_generics: DefIdMap<_> = tcx.reachable_set(LOCAL_CRATE).0
.iter()
.filter_map(|&node_id| {
// We want to ignore some FFI functions that are not exposed from
_ => None
}
})
- .collect();
-
- let mut symbols: Vec<_> = reachable_non_generics
- .iter()
- .map(|&def_id| {
+ .map(|def_id| {
let export_level = if special_runtime_crate {
let name = tcx.symbol_name(Instance::mono(tcx, def_id));
// We can probably do better here by just ensuring that
SymbolExportLevel::Rust
}
} else {
- tcx.symbol_export_level(def_id)
+ symbol_export_level(tcx, def_id)
};
debug!("EXPORTED SYMBOL (local): {} ({:?})",
tcx.symbol_name(Instance::mono(tcx, def_id)),
export_level);
- (ExportedSymbol::NonGeneric(def_id), export_level)
+ (def_id, export_level)
})
.collect();
if let Some(id) = tcx.sess.derive_registrar_fn.get() {
let def_id = tcx.hir.local_def_id(id);
- symbols.push((ExportedSymbol::NonGeneric(def_id), SymbolExportLevel::C));
+ reachable_non_generics.insert(def_id, SymbolExportLevel::C);
}
if let Some(id) = tcx.sess.plugin_registrar_fn.get() {
let def_id = tcx.hir.local_def_id(id);
- symbols.push((ExportedSymbol::NonGeneric(def_id), SymbolExportLevel::C));
+ reachable_non_generics.insert(def_id, SymbolExportLevel::C);
+ }
+
+ Lrc::new(reachable_non_generics)
+}
+
+fn is_reachable_non_generic_provider_local<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ def_id: DefId)
+ -> bool {
+ let export_threshold = threshold(tcx);
+
+ if let Some(&level) = tcx.reachable_non_generics(def_id.krate).get(&def_id) {
+ level.is_below_threshold(export_threshold)
+ } else {
+ false
}
+}
+
+fn is_reachable_non_generic_provider_extern<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ def_id: DefId)
+ -> bool {
+ tcx.reachable_non_generics(def_id.krate).contains_key(&def_id)
+}
+
+fn exported_symbols_provider_local<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ cnum: CrateNum)
+ -> Arc<Vec<(ExportedSymbol<'tcx>,
+ SymbolExportLevel)>>
+{
+ assert_eq!(cnum, LOCAL_CRATE);
+
+ if !tcx.sess.opts.output_types.should_trans() {
+ return Arc::new(vec![])
+ }
+
+ let mut symbols: Vec<_> = tcx.reachable_non_generics(LOCAL_CRATE)
+ .iter()
+ .map(|(&def_id, &level)| {
+ (ExportedSymbol::NonGeneric(def_id), level)
+ })
+ .collect();
if let Some(_) = *tcx.sess.entry_fn.borrow() {
let symbol_name = "main".to_string();
symbols.push((exported_symbol, SymbolExportLevel::Rust));
}
+ if tcx.share_generics() && tcx.local_crate_exports_generics() {
+ use rustc::mir::mono::{Linkage, Visibility, MonoItem};
+ use rustc::ty::InstanceDef;
+
+ // Normally, we require that shared monomorphizations are not hidden,
+ // because if we want to re-use a monomorphization from a Rust dylib, it
+ // needs to be exported.
+ // However, on platforms that don't allow for Rust dylibs, having
+ // external linkage is enough for monomorphization to be linked to.
+ let need_visibility = tcx.sess.target.target.options.dynamic_linking &&
+ !tcx.sess.target.target.options.only_cdylib;
+
+ let (_, cgus) = tcx.collect_and_partition_translation_items(LOCAL_CRATE);
+
+ for (mono_item, &(linkage, visibility)) in cgus.iter()
+ .flat_map(|cgu| cgu.items().iter()) {
+ if linkage != Linkage::External {
+ // We can only re-use things with external linkage, otherwise
+ // we'll get a linker error
+ continue
+ }
+
+ if need_visibility && visibility == Visibility::Hidden {
+ // If we potentially share things from Rust dylibs, they must
+ // not be hidden
+ continue
+ }
+
+ if let &MonoItem::Fn(Instance {
+ def: InstanceDef::Item(def_id),
+ substs,
+ }) = mono_item {
+ if substs.types().next().is_some() {
+ symbols.push((ExportedSymbol::Generic(def_id, substs),
+ SymbolExportLevel::Rust));
+ }
+ }
+ }
+ }
+
// Sort so we get a stable incr. comp. hash.
symbols.sort_unstable_by(|&(ref symbol1, ..), &(ref symbol2, ..)| {
symbol1.compare_stable(tcx, symbol2)
Arc::new(symbols)
}
+fn upstream_monomorphizations_provider<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ cnum: CrateNum)
+ -> Lrc<DefIdMap<Lrc<FxHashMap<&'tcx Substs<'tcx>, CrateNum>>>>
+{
+ debug_assert!(cnum == LOCAL_CRATE);
+
+ let cnums = tcx.all_crate_nums(LOCAL_CRATE);
+
+ let mut instances = DefIdMap();
+
+ let cnum_stable_ids: IndexVec<CrateNum, Fingerprint> = {
+ let mut cnum_stable_ids = IndexVec::from_elem_n(Fingerprint::ZERO,
+ cnums.len() + 1);
+
+ for &cnum in cnums.iter() {
+ cnum_stable_ids[cnum] = tcx.def_path_hash(DefId {
+ krate: cnum,
+ index: CRATE_DEF_INDEX,
+ }).0;
+ }
+
+ cnum_stable_ids
+ };
+
+ for &cnum in cnums.iter() {
+ for &(ref exported_symbol, _) in tcx.exported_symbols(cnum).iter() {
+ if let &ExportedSymbol::Generic(def_id, substs) = exported_symbol {
+ let substs_map = instances.entry(def_id)
+ .or_insert_with(|| FxHashMap());
+
+ match substs_map.entry(substs) {
+ Occupied(mut e) => {
+ // If there are multiple monomorphizations available,
+ // we select one deterministically.
+ let other_cnum = *e.get();
+ if cnum_stable_ids[other_cnum] > cnum_stable_ids[cnum] {
+ e.insert(cnum);
+ }
+ }
+ Vacant(e) => {
+ e.insert(cnum);
+ }
+ }
+ }
+ }
+ }
+
+ Lrc::new(instances.into_iter()
+ .map(|(key, value)| (key, Lrc::new(value)))
+ .collect())
+}
+
+fn upstream_monomorphizations_for_provider<'a, 'tcx>(
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ def_id: DefId)
+ -> Option<Lrc<FxHashMap<&'tcx Substs<'tcx>, CrateNum>>>
+{
+ debug_assert!(!def_id.is_local());
+ tcx.upstream_monomorphizations(LOCAL_CRATE)
+ .get(&def_id)
+ .cloned()
+}
+
+fn is_unreachable_local_definition_provider(tcx: TyCtxt, def_id: DefId) -> bool {
+ if let Some(node_id) = tcx.hir.as_local_node_id(def_id) {
+ !tcx.reachable_set(LOCAL_CRATE).0.contains(&node_id)
+ } else {
+ bug!("is_unreachable_local_definition called with non-local DefId: {:?}",
+ def_id)
+ }
+}
+
pub fn provide(providers: &mut Providers) {
providers.reachable_non_generics = reachable_non_generics_provider;
- providers.is_reachable_non_generic = is_reachable_non_generic_provider;
+ providers.is_reachable_non_generic = is_reachable_non_generic_provider_local;
providers.exported_symbols = exported_symbols_provider_local;
- providers.symbol_export_level = symbol_export_level_provider;
+ providers.upstream_monomorphizations = upstream_monomorphizations_provider;
+ providers.is_unreachable_local_definition = is_unreachable_local_definition_provider;
}
pub fn provide_extern(providers: &mut Providers) {
- providers.is_reachable_non_generic = is_reachable_non_generic_provider;
- providers.symbol_export_level = symbol_export_level_provider;
+ providers.is_reachable_non_generic = is_reachable_non_generic_provider_extern;
+ providers.upstream_monomorphizations_for = upstream_monomorphizations_for_provider;
}
-fn symbol_export_level_provider(tcx: TyCtxt, sym_def_id: DefId) -> SymbolExportLevel {
+fn symbol_export_level(tcx: TyCtxt, sym_def_id: DefId) -> SymbolExportLevel {
// We export anything that's not mangled at the "C" layer as it probably has
// to do with ABI concerns. We do not, however, apply such treatment to
// special symbols in the standard library for various plumbing between
crate_info,
time_graph,
- coordinator_send: tcx.tx_to_llvm_workers.clone(),
+ coordinator_send: tcx.tx_to_llvm_workers.lock().clone(),
trans_worker_receive,
shared_emitter_main,
future: coordinator_thread,
metadata_config: Arc<ModuleConfig>,
allocator_config: Arc<ModuleConfig>)
-> thread::JoinHandle<Result<CompiledModules, ()>> {
- let coordinator_send = tcx.tx_to_llvm_workers.clone();
+ let coordinator_send = tcx.tx_to_llvm_workers.lock().clone();
let sess = tcx.sess;
// Compute the set of symbols we need to retain when doing LTO (if we need to)
mtrans: ModuleTranslation,
cost: u64) {
let llvm_work_item = WorkItem::Optimize(mtrans);
- drop(tcx.tx_to_llvm_workers.send(Box::new(Message::TranslationDone {
+ drop(tcx.tx_to_llvm_workers.lock().send(Box::new(Message::TranslationDone {
llvm_work_item,
cost,
})));
use metadata;
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::middle::lang_items::StartFnLangItem;
+use rustc::middle::weak_lang_items;
use rustc::mir::mono::{Linkage, Visibility, Stats};
use rustc::middle::cstore::{EncodedMetadata};
use rustc::ty::{self, Ty, TyCtxt};
use std::sync::Arc;
use std::time::{Instant, Duration};
use std::{i32, usize};
-use std::iter;
use std::sync::mpsc;
use syntax_pos::Span;
use syntax_pos::symbol::InternedString;
// late-bound regions, since late-bound
// regions must appear in the argument
// listing.
- let main_ret_ty = main_ret_ty.no_late_bound_regions().unwrap();
+ let main_ret_ty = cx.tcx.erase_regions(
+ &main_ret_ty.no_late_bound_regions().unwrap(),
+ );
if declare::get_defined_value(cx, "main").is_some() {
// FIXME: We should be smart and show a better diagnostic here.
let (start_fn, args) = if use_start_lang_item {
let start_def_id = cx.tcx.require_lang_item(StartFnLangItem);
- let start_fn = callee::resolve_and_get_fn(cx, start_def_id, cx.tcx.mk_substs(
- iter::once(Kind::from(main_ret_ty))));
+ let start_fn = callee::resolve_and_get_fn(
+ cx,
+ start_def_id,
+ cx.tcx.intern_substs(&[Kind::from(main_ret_ty)]),
+ );
(start_fn, vec![bx.pointercast(rust_main, Type::i8p(cx).ptr_to()),
arg_argc, arg_argv])
} else {
info.lang_item_to_crate.insert(item, id.krate);
}
}
+
+ // No need to look for lang items that are whitelisted and don't
+ // actually need to exist.
+ let missing = missing.iter()
+ .cloned()
+ .filter(|&l| !weak_lang_items::whitelisted(tcx, l))
+ .collect();
info.missing_lang_items.insert(cnum, missing);
}
// This is sort of subtle. Inside our codegen unit we started off
// compilation by predefining all our own `TransItem` instances. That
// is, everything we're translating ourselves is already defined. That
- // means that anything we're actually translating ourselves will have
- // hit the above branch in `get_declared_value`. As a result, we're
- // guaranteed here that we're declaring a symbol that won't get defined,
- // or in other words we're referencing a foreign value.
+ // means that anything we're actually translating in this codegen unit
+ // will have hit the above branch in `get_declared_value`. As a result,
+ // we're guaranteed here that we're declaring a symbol that won't get
+ // defined, or in other words we're referencing a value from another
+ // codegen unit or even another crate.
//
// So because this is a foreign value we blanket apply an external
// linkage directive because it's coming from a different object file.
// The visibility here is where it gets tricky. This symbol could be
// referencing some foreign crate or foreign library (an `extern`
// block) in which case we want to leave the default visibility. We may
- // also, though, have multiple codegen units.
- //
- // In the situation of multiple codegen units this function may be
- // referencing a function from another codegen unit. If we're
- // indeed referencing a symbol in another codegen unit then we're in one
- // of two cases:
- //
- // * This is a symbol defined in a foreign crate and we're just
- // monomorphizing in another codegen unit. In this case this symbols
- // is for sure not exported, so both codegen units will be using
- // hidden visibility. Hence, we apply a hidden visibility here.
- //
- // * This is a symbol defined in our local crate. If the symbol in the
- // other codegen unit is also not exported then like with the foreign
- // case we apply a hidden visibility. If the symbol is exported from
- // the foreign object file, however, then we leave this at the
- // default visibility as we'll just import it naturally.
+ // also, though, have multiple codegen units. It could be a
+ // monomorphization, in which case its expected visibility depends on
+ // whether we are sharing generics or not. The important thing here is
+ // that the visibility we apply to the declaration is the same one that
+ // has been applied to the definition (wherever that definition may be).
unsafe {
llvm::LLVMRustSetLinkage(llfn, llvm::Linkage::ExternalLinkage);
- if cx.tcx.is_translated_item(instance_def_id) {
- if instance_def_id.is_local() {
- if !cx.tcx.is_reachable_non_generic(instance_def_id) {
- llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
+ let is_generic = instance.substs.types().next().is_some();
+
+ if is_generic {
+ // This is a monomorphization. Its expected visibility depends
+ // on whether we are in share-generics mode.
+
+ if cx.tcx.share_generics() {
+ // We are in share_generics mode.
+
+ if instance_def_id.is_local() {
+ // This is a definition from the current crate. If the
+ // definition is unreachable for downstream crates or
+ // the current crate does not re-export generics, the
+ // definition of the instance will have been declared
+ // as `hidden`.
+ if cx.tcx.is_unreachable_local_definition(instance_def_id) ||
+ !cx.tcx.local_crate_exports_generics() {
+ llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
+ }
+ } else {
+ // This is a monomorphization of a generic function
+ // defined in an upstream crate.
+ if cx.tcx.upstream_monomorphizations_for(instance_def_id)
+ .map(|set| set.contains_key(instance.substs))
+ .unwrap_or(false) {
+ // This is instantiated in another crate. It cannot
+ // be `hidden`.
+ } else {
+ // This is a local instantiation of an upstream definition.
+ // If the current crate does not re-export it
+ // (because it is a C library or an executable), it
+ // will have been declared `hidden`.
+ if !cx.tcx.local_crate_exports_generics() {
+ llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
+ }
+ }
}
} else {
+ // When not sharing generics, all instances are in the same
+ // crate and have hidden visibility
llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
}
+ } else {
+ // This is a non-generic function
+ if cx.tcx.is_translated_item(instance_def_id) {
+ // This is a function that is instantiated in the local crate
+
+ if instance_def_id.is_local() {
+ // This is function that is defined in the local crate.
+ // If it is not reachable, it is hidden.
+ if !cx.tcx.is_reachable_non_generic(instance_def_id) {
+ llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
+ }
+ } else {
+ // This is a function from an upstream crate that has
+ // been instantiated here. These are always hidden.
+ llvm::LLVMRustSetVisibility(llfn, llvm::Visibility::Hidden);
+ }
+ }
}
}
"omit_gdb_pretty_printer_section");
!omit_gdb_pretty_printer_section &&
- !cx.sess().target.target.options.is_like_osx &&
- !cx.sess().target.target.options.is_like_windows &&
- cx.sess().opts.debuginfo != NoDebugInfo
+ cx.sess().opts.debuginfo != NoDebugInfo &&
+ cx.sess().target.target.options.emit_debug_gdb_scripts
}
use syntax_pos::{self, Span, Pos};
use syntax::ast;
-use syntax::symbol::Symbol;
+use syntax::symbol::{Symbol, InternedString};
use rustc::ty::layout::{self, LayoutOf};
pub mod gdb;
substs.types().zip(names).map(|(ty, name)| {
let actual_type = cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), ty);
let actual_type_metadata = type_metadata(cx, actual_type, syntax_pos::DUMMY_SP);
- let name = CString::new(name.as_str().as_bytes()).unwrap();
+ let name = CString::new(name.as_bytes()).unwrap();
unsafe {
llvm::LLVMRustDIBuilderCreateTemplateTypeParameter(
DIB(cx),
return create_DIArray(DIB(cx), &template_params[..]);
}
- fn get_type_parameter_names(cx: &CodegenCx, generics: &ty::Generics) -> Vec<ast::Name> {
+ fn get_type_parameter_names(cx: &CodegenCx, generics: &ty::Generics) -> Vec<InternedString> {
let mut names = generics.parent.map_or(vec![], |def_id| {
get_type_parameter_names(cx, cx.tcx.generics_of(def_id))
});
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(custom_attribute)]
#![feature(fs_read_write)]
#![allow(unused_attributes)]
-#![cfg_attr(stage0, feature(i128_type, i128))]
-#![cfg_attr(stage0, feature(inclusive_range_syntax))]
#![feature(libc)]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
-#![cfg_attr(stage0, feature(slice_patterns))]
-#![cfg_attr(stage0, feature(conservative_impl_trait))]
#![feature(optin_builtin_traits)]
#![feature(inclusive_range_fields)]
+#![feature(underscore_lifetimes)]
use rustc::dep_graph::WorkProduct;
use syntax_pos::symbol::Symbol;
wasm_custom_sections: BTreeMap<String, Vec<u8>>,
wasm_imports: FxHashMap<String, String>,
lang_item_to_crate: FxHashMap<LangItem, CrateNum>,
- missing_lang_items: FxHashMap<CrateNum, Lrc<Vec<LangItem>>>,
+ missing_lang_items: FxHashMap<CrateNum, Vec<LangItem>>,
}
__build_diagnostic_array! { librustc_trans, DIAGNOSTICS }
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(custom_attribute)]
#![allow(unused_attributes)]
-#![cfg_attr(stage0, feature(i128_type))]
#![feature(quote)]
#![feature(rustc_diagnostic_macros)]
-#![cfg_attr(stage0, feature(conservative_impl_trait))]
extern crate ar;
extern crate flate2;
use rustc::middle::weak_lang_items;
use rustc_mir::monomorphize::Instance;
use rustc_mir::monomorphize::item::{MonoItem, MonoItemExt, InstantiationMode};
-use rustc::hir::def_id::DefId;
+use rustc::hir::def_id::{DefId, LOCAL_CRATE};
use rustc::hir::map as hir_map;
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc::ty::fold::TypeVisitor;
assert!(!substs.needs_subst());
substs.visit_with(&mut hasher);
- let mut avoid_cross_crate_conflicts = false;
-
- // If this is an instance of a generic function, we also hash in
- // the ID of the instantiating crate. This avoids symbol conflicts
- // in case the same instances is emitted in two crates of the same
- // project.
- if substs.types().next().is_some() {
- avoid_cross_crate_conflicts = true;
- }
-
- // If we're dealing with an instance of a function that's inlined from
- // another crate but we're marking it as globally shared to our
- // compliation (aka we're not making an internal copy in each of our
- // codegen units) then this symbol may become an exported (but hidden
- // visibility) symbol. This means that multiple crates may do the same
- // and we want to be sure to avoid any symbol conflicts here.
- match MonoItem::Fn(instance).instantiation_mode(tcx) {
- InstantiationMode::GloballyShared { may_conflict: true } => {
- avoid_cross_crate_conflicts = true;
- }
- _ => {}
- }
+ let is_generic = substs.types().next().is_some();
+ let avoid_cross_crate_conflicts =
+ // If this is an instance of a generic function, we also hash in
+ // the ID of the instantiating crate. This avoids symbol conflicts
+ // in case the same instances is emitted in two crates of the same
+ // project.
+ is_generic ||
+
+ // If we're dealing with an instance of a function that's inlined from
+ // another crate but we're marking it as globally shared to our
+ // compliation (aka we're not making an internal copy in each of our
+ // codegen units) then this symbol may become an exported (but hidden
+ // visibility) symbol. This means that multiple crates may do the same
+ // and we want to be sure to avoid any symbol conflicts here.
+ match MonoItem::Fn(instance).instantiation_mode(tcx) {
+ InstantiationMode::GloballyShared { may_conflict: true } => true,
+ _ => false,
+ };
if avoid_cross_crate_conflicts {
- hasher.hash(tcx.crate_name.as_str());
- hasher.hash(tcx.sess.local_crate_disambiguator());
+ let instantiating_crate = if is_generic {
+ if !def_id.is_local() && tcx.share_generics() {
+ // If we are re-using a monomorphization from another crate,
+ // we have to compute the symbol hash accordingly.
+ let upstream_monomorphizations =
+ tcx.upstream_monomorphizations_for(def_id);
+
+ upstream_monomorphizations.and_then(|monos| monos.get(&substs)
+ .cloned())
+ .unwrap_or(LOCAL_CRATE)
+ } else {
+ LOCAL_CRATE
+ }
+ } else {
+ LOCAL_CRATE
+ };
+
+ hasher.hash(&tcx.original_crate_name(instantiating_crate).as_str()[..]);
+ hasher.hash(&tcx.crate_disambiguator(instantiating_crate));
}
});
alloc = { path = "../liballoc" }
alloc_system = { path = "../liballoc_system" }
core = { path = "../libcore" }
+compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
let item_def_id = tcx.hir.local_def_id(item_id);
let generics = tcx.generics_of(item_def_id);
let index = generics.type_param_to_index[&tcx.hir.local_def_id(node_id)];
- tcx.mk_param(index, tcx.hir.name(node_id))
+ tcx.mk_param(index, tcx.hir.name(node_id).as_str())
}
Def::SelfTy(_, Some(def_id)) => {
// Self in impl (we know the concrete type).
/// and in libcore/intrinsics.rs
pub fn check_intrinsic_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
it: &hir::ForeignItem) {
- let param = |n| tcx.mk_param(n, Symbol::intern(&format!("P{}", n)));
+ let param = |n| tcx.mk_param(n, Symbol::intern(&format!("P{}", n)).as_str());
let name = it.name.as_str();
let (n_tps, inputs, output) = if name.starts_with("atomic_") {
let split : Vec<&str> = name.split('_').collect();
pub fn check_platform_intrinsic_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
it: &hir::ForeignItem) {
let param = |n| {
- let name = Symbol::intern(&format!("P{}", n));
+ let name = Symbol::intern(&format!("P{}", n)).as_str();
tcx.mk_param(n, name)
};
// variables.
let method_generics = self.tcx.generics_of(pick.item.def_id);
let mut fn_segment = Some((segment, method_generics));
- self.fcx.check_path_parameter_count(self.span, &mut fn_segment, true);
+ self.fcx.check_path_parameter_count(self.span, &mut fn_segment, true, false);
// Create subst for early-bound lifetime parameters, combining
// parameters from the type and those from the method.
sp: Span,
expr_sp: Span,
fn_inputs: &[Ty<'tcx>],
- expected_arg_tys: &[Ty<'tcx>],
+ mut expected_arg_tys: &[Ty<'tcx>],
args: &'gcx [hir::Expr],
variadic: bool,
tuple_arguments: TupleArgumentsFlag,
self.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation);
}
- let mut expected_arg_tys = expected_arg_tys;
let expected_arg_count = fn_inputs.len();
- fn parameter_count_error<'tcx>(sess: &Session,
- sp: Span,
- expr_sp: Span,
- expected_count: usize,
- arg_count: usize,
- error_code: &str,
- variadic: bool,
- def_span: Option<Span>,
- sugg_unit: bool) {
- let mut err = sess.struct_span_err_with_code(sp,
+ let param_count_error = |expected_count: usize,
+ arg_count: usize,
+ error_code: &str,
+ variadic: bool,
+ sugg_unit: bool| {
+ let mut err = tcx.sess.struct_span_err_with_code(sp,
&format!("this function takes {}{} parameter{} but {} parameter{} supplied",
if variadic {"at least "} else {""},
expected_count,
if arg_count == 1 {" was"} else {"s were"}),
DiagnosticId::Error(error_code.to_owned()));
- if let Some(def_s) = def_span.map(|sp| sess.codemap().def_span(sp)) {
+ if let Some(def_s) = def_span.map(|sp| tcx.sess.codemap().def_span(sp)) {
err.span_label(def_s, "defined here");
}
if sugg_unit {
- let sugg_span = sess.codemap().end_point(expr_sp);
+ let sugg_span = tcx.sess.codemap().end_point(expr_sp);
// remove closing `)` from the span
let sugg_span = sugg_span.shrink_to_lo();
err.span_suggestion(
if expected_count == 1 {""} else {"s"}));
}
err.emit();
- }
+ };
let formal_tys = if tuple_arguments == TupleArguments {
let tuple_type = self.structurally_resolved_type(sp, fn_inputs[0]);
match tuple_type.sty {
ty::TyTuple(arg_types) if arg_types.len() != args.len() => {
- parameter_count_error(tcx.sess, sp, expr_sp, arg_types.len(), args.len(),
- "E0057", false, def_span, false);
+ param_count_error(arg_types.len(), args.len(), "E0057", false, false);
expected_arg_tys = &[];
self.err_args(args.len())
}
if supplied_arg_count >= expected_arg_count {
fn_inputs.to_vec()
} else {
- parameter_count_error(tcx.sess, sp, expr_sp, expected_arg_count,
- supplied_arg_count, "E0060", true, def_span, false);
+ param_count_error(expected_arg_count, supplied_arg_count, "E0060", true, false);
expected_arg_tys = &[];
self.err_args(supplied_arg_count)
}
} else {
false
};
- parameter_count_error(tcx.sess, sp, expr_sp, expected_arg_count,
- supplied_arg_count, "E0061", false, def_span, sugg_unit);
+ param_count_error(expected_arg_count, supplied_arg_count, "E0061", false, sugg_unit);
+
expected_arg_tys = &[];
self.err_args(supplied_arg_count)
};
+ // If there is no expectation, expect formal_tys.
+ let expected_arg_tys = if !expected_arg_tys.is_empty() {
+ expected_arg_tys
+ } else {
+ &formal_tys
+ };
debug!("check_argument_types: formal_tys={:?}",
formal_tys.iter().map(|t| self.ty_to_string(*t)).collect::<Vec<String>>());
// The special-cased logic below has three functions:
// 1. Provide as good of an expected type as possible.
- let expected = expected_arg_tys.get(i).map(|&ty| {
- Expectation::rvalue_hint(self, ty)
- });
+ let expected = Expectation::rvalue_hint(self, expected_arg_tys[i]);
- let checked_ty = self.check_expr_with_expectation(
- &arg,
- expected.unwrap_or(ExpectHasType(formal_ty)));
+ let checked_ty = self.check_expr_with_expectation(&arg, expected);
// 2. Coerce to the most detailed type that could be coerced
// to, which is `expected_ty` if `rvalue_hint` returns an
// `ExpectHasType(expected_ty)`, or the `formal_ty` otherwise.
- let coerce_ty = expected.and_then(|e| e.only_has_type(self));
+ let coerce_ty = expected.only_has_type(self).unwrap_or(formal_ty);
// We're processing function arguments so we definitely want to use
// two-phase borrows.
- self.demand_coerce(&arg,
- checked_ty,
- coerce_ty.unwrap_or(formal_ty),
- AllowTwoPhase::Yes);
+ self.demand_coerce(&arg, checked_ty, coerce_ty, AllowTwoPhase::Yes);
// 3. Relate the expected type and the formal one,
// if the expected type was used for the coercion.
- coerce_ty.map(|ty| self.demand_suptype(arg.span, formal_ty, ty));
+ self.demand_suptype(arg.span, formal_ty, coerce_ty);
}
}
fn check_expr_coercable_to_type(&self,
expr: &'gcx hir::Expr,
expected: Ty<'tcx>) -> Ty<'tcx> {
- self.check_expr_coercable_to_type_with_needs(expr, expected, Needs::None)
- }
-
- fn check_expr_coercable_to_type_with_needs(&self,
- expr: &'gcx hir::Expr,
- expected: Ty<'tcx>,
- needs: Needs)
- -> Ty<'tcx> {
- let ty = self.check_expr_with_expectation_and_needs(
- expr,
- ExpectHasType(expected),
- needs);
+ let ty = self.check_expr_with_hint(expr, expected);
// checks don't need two phase
self.demand_coerce(expr, ty, expected, AllowTwoPhase::No)
}
formal_args: &[Ty<'tcx>])
-> Vec<Ty<'tcx>> {
let formal_ret = self.resolve_type_vars_with_obligations(formal_ret);
- let expected_args = expected_ret.only_has_type(self).and_then(|ret_ty| {
- self.fudge_regions_if_ok(&RegionVariableOrigin::Coercion(call_span), || {
- // Attempt to apply a subtyping relationship between the formal
- // return type (likely containing type variables if the function
- // is polymorphic) and the expected return type.
- // No argument expectations are produced if unification fails.
- let origin = self.misc(call_span);
- let ures = self.at(&origin, self.param_env).sup(ret_ty, &formal_ret);
-
- // FIXME(#27336) can't use ? here, Try::from_error doesn't default
- // to identity so the resulting type is not constrained.
- match ures {
- Ok(ok) => {
- // Process any obligations locally as much as
- // we can. We don't care if some things turn
- // out unconstrained or ambiguous, as we're
- // just trying to get hints here.
- self.save_and_restore_in_snapshot_flag(|_| {
- let mut fulfill = TraitEngine::new(self.tcx);
- for obligation in ok.obligations {
- fulfill.register_predicate_obligation(self, obligation);
- }
- fulfill.select_where_possible(self)
- }).map_err(|_| ())?;
- }
- Err(_) => return Err(()),
+ let ret_ty = match expected_ret.only_has_type(self) {
+ Some(ret) => ret,
+ None => return Vec::new()
+ };
+ let expect_args = self.fudge_regions_if_ok(&RegionVariableOrigin::Coercion(call_span), || {
+ // Attempt to apply a subtyping relationship between the formal
+ // return type (likely containing type variables if the function
+ // is polymorphic) and the expected return type.
+ // No argument expectations are produced if unification fails.
+ let origin = self.misc(call_span);
+ let ures = self.at(&origin, self.param_env).sup(ret_ty, &formal_ret);
+
+ // FIXME(#27336) can't use ? here, Try::from_error doesn't default
+ // to identity so the resulting type is not constrained.
+ match ures {
+ Ok(ok) => {
+ // Process any obligations locally as much as
+ // we can. We don't care if some things turn
+ // out unconstrained or ambiguous, as we're
+ // just trying to get hints here.
+ self.save_and_restore_in_snapshot_flag(|_| {
+ let mut fulfill = TraitEngine::new(self.tcx);
+ for obligation in ok.obligations {
+ fulfill.register_predicate_obligation(self, obligation);
+ }
+ fulfill.select_where_possible(self)
+ }).map_err(|_| ())?;
}
+ Err(_) => return Err(()),
+ }
- // Record all the argument types, with the substitutions
- // produced from the above subtyping unification.
- Ok(formal_args.iter().map(|ty| {
- self.resolve_type_vars_if_possible(ty)
- }).collect())
- }).ok()
- }).unwrap_or(vec![]);
+ // Record all the argument types, with the substitutions
+ // produced from the above subtyping unification.
+ Ok(formal_args.iter().map(|ty| {
+ self.resolve_type_vars_if_possible(ty)
+ }).collect())
+ }).unwrap_or(Vec::new());
debug!("expected_inputs_for_expected_output(formal={:?} -> {:?}, expected={:?} -> {:?})",
formal_args, formal_ret,
- expected_args, expected_ret);
- expected_args
+ expect_args, expected_ret);
+ expect_args
}
// Checks a method call.
if !tuple_like { continue }
debug!("tuple struct named {:?}", base_t);
- let ident = ast::Ident {
- name: Symbol::intern(&idx.node.to_string()),
- ctxt: idx.span.ctxt().modern(),
- };
+ let ident =
+ ast::Ident::new(Symbol::intern(&idx.node.to_string()), idx.span.modern());
let (ident, def_scope) =
self.tcx.adjust_ident(ident, base_def.did, self.body_id);
let fields = &base_def.non_enum_variant().fields;
// variables. If the user provided some types, we may still need
// to add defaults. If the user provided *too many* types, that's
// a problem.
- self.check_path_parameter_count(span, &mut type_segment, false);
- self.check_path_parameter_count(span, &mut fn_segment, false);
- self.check_impl_trait(span, &mut fn_segment);
+ let supress_mismatch = self.check_impl_trait(span, &mut fn_segment);
+ self.check_path_parameter_count(span, &mut type_segment, false, supress_mismatch);
+ self.check_path_parameter_count(span, &mut fn_segment, false, supress_mismatch);
let (fn_start, has_self) = match (type_segment, fn_segment) {
(_, Some((_, generics))) => {
fn check_path_parameter_count(&self,
span: Span,
segment: &mut Option<(&hir::PathSegment, &ty::Generics)>,
- is_method_call: bool) {
+ is_method_call: bool,
+ supress_mismatch_error: bool) {
let (lifetimes, types, infer_types, bindings) = segment.map_or(
(&[][..], &[][..], true, &[][..]),
|(s, _)| s.parameters.as_ref().map_or(
// type parameters, we force instantiate_value_path to
// use inference variables instead of the provided types.
*segment = None;
- } else if types.len() < required_len && !infer_types {
+ } else if types.len() < required_len && !infer_types && !supress_mismatch_error {
let expected_text = count_type_params(required_len);
let actual_text = count_type_params(types.len());
struct_span_err!(self.tcx.sess, span, E0089,
/// Report error if there is an explicit type parameter when using `impl Trait`.
fn check_impl_trait(&self,
span: Span,
- segment: &mut Option<(&hir::PathSegment, &ty::Generics)>) {
+ segment: &mut Option<(&hir::PathSegment, &ty::Generics)>)
+ -> bool {
use hir::SyntheticTyParamKind::*;
- segment.map(|(path_segment, generics)| {
+ let segment = segment.map(|(path_segment, generics)| {
let explicit = !path_segment.infer_types;
let impl_trait = generics.types.iter()
.any(|ty_param| {
err.emit();
}
+
+ impl_trait
});
+
+ segment.unwrap_or(false)
}
// Resolves `typ` by a single level if `typ` is a type variable.
// trait matching creating lifetime constraints that are too strict.
// E.g. adding `&'a T` and `&'b T`, given `&'x T: Add<&'x T>`, will result
// in `&'a T <: &'x T` and `&'b T <: &'x T`, instead of `'a = 'b = 'x`.
- let lhs_ty = self.check_expr_coercable_to_type_with_needs(lhs_expr,
- self.next_ty_var(TypeVariableOrigin::MiscVariable(lhs_expr.span)),
- lhs_needs);
+ let lhs_ty = self.check_expr_with_needs(lhs_expr, lhs_needs);
+ let fresh_var = self.next_ty_var(TypeVariableOrigin::MiscVariable(lhs_expr.span));
+ let lhs_ty = self.demand_coerce(lhs_expr, lhs_ty, fresh_var, AllowTwoPhase::No);
let lhs_ty = self.resolve_type_vars_with_obligations(lhs_ty);
// NB: As we have not yet type-checked the RHS, we don't have the
/// constraint that `'z <= 'a`. Given this setup, let's clarify the
/// parameters in (roughly) terms of the example:
///
+ /// ```plain,ignore (pseudo-Rust)
/// A borrow of: `& 'z bk * r` where `r` has type `& 'a bk T`
/// borrow_region ^~ ref_region ^~
/// borrow_kind ^~ ref_kind ^~
/// ref_cmt ^
+ /// ```
///
/// Here `bk` stands for some borrow-kind (e.g., `mut`, `uniq`, etc).
///
_ => t.super_visit_with(self)
}
}
+
+ fn visit_region(&mut self, _: ty::Region<'tcx>) -> bool {
+ true
+ }
}
let mut param_count = CountParams { params: FxHashSet() };
- pred.visit_with(&mut param_count);
+ let has_region = pred.visit_with(&mut param_count);
let substituted_pred = pred.subst(fcx.tcx, substs);
- // Don't check non-defaulted params, dependent defaults or preds with multiple params.
- if substituted_pred.references_error() || param_count.params.len() > 1 {
+ // Don't check non-defaulted params, dependent defaults (including lifetimes)
+ // or preds with multiple params.
+ if substituted_pred.references_error() || param_count.params.len() > 1
+ || has_region {
continue;
}
// Avoid duplication of predicates that contain no parameters, for example.
// local so it should be okay to just unwrap everything.
let trait_def_id = impl_params[&method_param.name];
let trait_decl_span = tcx.def_span(trait_def_id);
- error_194(tcx, type_span, trait_decl_span, method_param.name);
+ error_194(tcx, type_span, trait_decl_span, &method_param.name[..]);
}
}
}
err
}
-fn error_194(tcx: TyCtxt, span: Span, trait_decl_span: Span, name: ast::Name) {
+fn error_194(tcx: TyCtxt, span: Span, trait_decl_span: Span, name: &str) {
struct_span_err!(tcx.sess, span, E0194,
"type parameter `{}` shadows another type parameter of the same name",
name)
let param_owner_def_id = tcx.hir.local_def_id(param_owner);
let generics = tcx.generics_of(param_owner_def_id);
let index = generics.type_param_to_index[&def_id];
- let ty = tcx.mk_param(index, tcx.hir.ty_param_name(param_id));
+ let ty = tcx.mk_param(index, tcx.hir.ty_param_name(param_id).as_str());
// Don't look for bounds where the type parameter isn't in scope.
let parent = if item_def_id == param_owner_def_id {
opt_self = Some(ty::TypeParameterDef {
index: 0,
- name: keywords::SelfType.name(),
+ name: keywords::SelfType.name().as_str(),
def_id: tcx.hir.local_def_id(param_id),
has_default: false,
object_lifetime_default: rl::Set1::Empty,
ty::TypeParameterDef {
index: type_start + i as u32,
- name: p.name,
+ name: p.name.as_str(),
def_id: tcx.hir.local_def_id(p.id),
has_default: p.default.is_some(),
object_lifetime_default:
// add a dummy parameter for the closure kind
types.push(ty::TypeParameterDef {
index: type_start,
- name: Symbol::intern("<closure_kind>"),
+ name: Symbol::intern("<closure_kind>").as_str(),
def_id,
has_default: false,
object_lifetime_default: rl::Set1::Empty,
// add a dummy parameter for the closure signature
types.push(ty::TypeParameterDef {
index: type_start + 1,
- name: Symbol::intern("<closure_signature>"),
+ name: Symbol::intern("<closure_signature>").as_str(),
def_id,
has_default: false,
object_lifetime_default: rl::Set1::Empty,
tcx.with_freevars(node_id, |fv| {
types.extend(fv.iter().zip(2..).map(|(_, i)| ty::TypeParameterDef {
index: type_start + i,
- name: Symbol::intern("<upvar>"),
+ name: Symbol::intern("<upvar>").as_str(),
def_id,
has_default: false,
object_lifetime_default: rl::Set1::Empty,
// Collect the predicates that were written inline by the user on each
// type parameter (e.g., `<T:Foo>`).
for param in ast_generics.ty_params() {
- let param_ty = ty::ParamTy::new(index, param.name).to_ty(tcx);
+ let param_ty = ty::ParamTy::new(index, param.name.as_str()).to_ty(tcx);
index += 1;
let bounds = compute_bounds(&icx,
Erroneous code example:
```compile_fail,E0569
-#![feature(generic_param_attrs)]
#![feature(dropck_eyepatch)]
struct Foo<X>(X);
///
/// Example:
///
-/// ```
+/// ```rust,ignore (pseudo-Rust)
/// impl<T> Trait<Foo> for Bar { ... }
-/// ^ T does not appear in `Foo` or `Bar`, error!
+/// // ^ T does not appear in `Foo` or `Bar`, error!
///
/// impl<T> Trait<Foo<T>> for Bar { ... }
-/// ^ T appears in `Foo<T>`, ok.
+/// // ^ T appears in `Foo<T>`, ok.
///
/// impl<T> Trait<Foo> for Bar where Bar: Iterator<Item=T> { ... }
-/// ^ T is bound to `<Bar as Iterator>::Item`, ok.
+/// // ^ T is bound to `<Bar as Iterator>::Item`, ok.
///
/// impl<'a> Trait<Foo> for Bar { }
-/// ^ 'a is unused, but for back-compat we allow it
+/// // ^ 'a is unused, but for back-compat we allow it
///
/// impl<'a> Trait<Foo> for Bar { type X = &'a i32; }
-/// ^ 'a is unused and appears in assoc type, error
+/// // ^ 'a is unused and appears in assoc type, error
/// ```
pub fn impl_wf_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
// We will tag this as part of the WF check -- logically, it is,
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![allow(non_camel_case_types)]
-#![cfg_attr(stage0, feature(advanced_slice_patterns))]
#![feature(box_patterns)]
#![feature(box_syntax)]
-#![cfg_attr(stage0, feature(conservative_impl_trait))]
-#![cfg_attr(stage0, feature(copy_closures, clone_closures))]
#![feature(crate_visibility_modifier)]
#![feature(from_ref)]
-#![cfg_attr(stage0, feature(match_default_bindings))]
#![feature(exhaustive_patterns)]
#![feature(option_filter)]
#![feature(quote)]
#![feature(refcell_replace_swap)]
#![feature(rustc_diagnostic_macros)]
#![feature(slice_patterns)]
-#![cfg_attr(stage0, feature(i128_type))]
-#![cfg_attr(stage0, feature(never_type))]
#![feature(dyn_trait)]
#[macro_use] extern crate log;
let mut segments = path.segments.into_vec();
let last = segments.pop().unwrap();
- let real_name = name.as_ref().map(|n| Symbol::from(n.as_str()));
+ let real_name = name.map(|name| Symbol::intern(&name));
segments.push(hir::PathSegment::new(
real_name.unwrap_or(last.name),
P(hir::Path {
span: DUMMY_SP,
def: Def::TyParam(param.def_id),
- segments: HirVec::from_vec(vec![hir::PathSegment::from_name(param.name)]),
+ segments: HirVec::from_vec(vec![
+ hir::PathSegment::from_name(Symbol::intern(¶m.name))
+ ]),
}),
)),
span: DUMMY_SP,
/// If the content is not properly formatted, it will return an error indicating what and where
/// the error is.
pub fn parse(cfg: &MetaItem) -> Result<Cfg, InvalidCfgError> {
- let name = cfg.name();
+ let name = cfg.ident.name;
match cfg.node {
MetaItemKind::Word => Ok(Cfg::Cfg(name, None)),
MetaItemKind::NameValue(ref lit) => match lit.node {
fn test_parse_ok() {
with_globals(|| {
let mi = MetaItem {
- name: Symbol::intern("all"),
+ ident: Ident::from_str("all"),
node: MetaItemKind::Word,
span: DUMMY_SP,
};
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("all")));
let mi = MetaItem {
- name: Symbol::intern("all"),
+ ident: Ident::from_str("all"),
node: MetaItemKind::NameValue(dummy_spanned(LitKind::Str(
Symbol::intern("done"),
StrStyle::Cooked,
assert_eq!(Cfg::parse(&mi), Ok(name_value_cfg("all", "done")));
let mi = MetaItem {
- name: Symbol::intern("all"),
+ ident: Ident::from_str("all"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("a"),
+ ident: Ident::from_str("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("b"),
+ ident: Ident::from_str("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") & word_cfg("b")));
let mi = MetaItem {
- name: Symbol::intern("any"),
+ ident: Ident::from_str("any"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("a"),
+ ident: Ident::from_str("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("b"),
+ ident: Ident::from_str("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
assert_eq!(Cfg::parse(&mi), Ok(word_cfg("a") | word_cfg("b")));
let mi = MetaItem {
- name: Symbol::intern("not"),
+ ident: Ident::from_str("not"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("a"),
+ ident: Ident::from_str("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
assert_eq!(Cfg::parse(&mi), Ok(!word_cfg("a")));
let mi = MetaItem {
- name: Symbol::intern("not"),
+ ident: Ident::from_str("not"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("any"),
+ ident: Ident::from_str("any"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("a"),
+ ident: Ident::from_str("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("all"),
+ ident: Ident::from_str("all"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("b"),
+ ident: Ident::from_str("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("c"),
+ ident: Ident::from_str("c"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
assert_eq!(Cfg::parse(&mi), Ok(!(word_cfg("a") | (word_cfg("b") & word_cfg("c")))));
let mi = MetaItem {
- name: Symbol::intern("all"),
+ ident: Ident::from_str("all"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("a"),
+ ident: Ident::from_str("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("b"),
+ ident: Ident::from_str("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("c"),
+ ident: Ident::from_str("c"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
fn test_parse_err() {
with_globals(|| {
let mi = MetaItem {
- name: Symbol::intern("foo"),
+ ident: Ident::from_str("foo"),
node: MetaItemKind::NameValue(dummy_spanned(LitKind::Bool(false))),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
- name: Symbol::intern("not"),
+ ident: Ident::from_str("not"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("a"),
+ ident: Ident::from_str("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("b"),
+ ident: Ident::from_str("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
- name: Symbol::intern("not"),
+ ident: Ident::from_str("not"),
node: MetaItemKind::List(vec![]),
span: DUMMY_SP,
};
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
- name: Symbol::intern("foo"),
+ ident: Ident::from_str("foo"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("a"),
+ ident: Ident::from_str("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
- name: Symbol::intern("all"),
+ ident: Ident::from_str("all"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("foo"),
+ ident: Ident::from_str("foo"),
node: MetaItemKind::List(vec![]),
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("b"),
+ ident: Ident::from_str("b"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
- name: Symbol::intern("any"),
+ ident: Ident::from_str("any"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("a"),
+ ident: Ident::from_str("a"),
node: MetaItemKind::Word,
span: DUMMY_SP,
})),
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("foo"),
+ ident: Ident::from_str("foo"),
node: MetaItemKind::List(vec![]),
span: DUMMY_SP,
})),
assert!(Cfg::parse(&mi).is_err());
let mi = MetaItem {
- name: Symbol::intern("not"),
+ ident: Ident::from_str("not"),
node: MetaItemKind::List(vec![
dummy_spanned(NestedMetaItemKind::MetaItem(MetaItem {
- name: Symbol::intern("foo"),
+ ident: Ident::from_str("foo"),
node: MetaItemKind::List(vec![]),
span: DUMMY_SP,
})),
use syntax;
use syntax::abi::Abi;
-use syntax::ast::{self, AttrStyle};
+use syntax::ast::{self, AttrStyle, Ident};
use syntax::attr;
-use syntax::codemap::Spanned;
+use syntax::codemap::{dummy_spanned, Spanned};
use syntax::feature_gate::UnstableFeatures;
use syntax::ptr::P;
use syntax::symbol::keywords;
-use syntax::symbol::Symbol;
+use syntax::symbol::{Symbol, InternedString};
use syntax_pos::{self, DUMMY_SP, Pos, FileName};
use rustc::middle::const_val::ConstVal;
for attr in attrs.lists("target_feature") {
if attr.check_name("enable") {
if let Some(feat) = attr.value_str() {
- let meta = attr::mk_name_value_item_str("target_feature".into(), feat);
+ let meta = attr::mk_name_value_item_str(Ident::from_str("target_feature"),
+ dummy_spanned(feat));
if let Ok(feat_cfg) = Cfg::parse(&meta) {
cfg &= feat_cfg;
}
fn macro_resolve(cx: &DocContext, path_str: &str) -> Option<Def> {
use syntax::ext::base::{MacroKind, SyntaxExtension};
use syntax::ext::hygiene::Mark;
- let segment = ast::PathSegment {
- identifier: ast::Ident::from_str(path_str),
- span: DUMMY_SP,
- parameters: None,
- };
- let path = ast::Path {
- span: DUMMY_SP,
- segments: vec![segment],
- };
-
+ let segment = ast::PathSegment::from_ident(Ident::from_str(path_str));
+ let path = ast::Path { segments: vec![segment], span: DUMMY_SP };
let mut resolver = cx.resolver.borrow_mut();
let mark = Mark::root();
let res = resolver
} else {
None
}
- } else if let Some(def) = resolver.all_macros.get(&path_str.into()) {
+ } else if let Some(def) = resolver.all_macros.get(&Symbol::intern(path_str)) {
Some(*def)
} else {
None
// predicates field (see rustc_typeck::collect::ty_generics), so remove
// them.
let stripped_typarams = gens.types.iter().filter_map(|tp| {
- if tp.name == keywords::SelfType.name() {
+ if tp.name == keywords::SelfType.name().as_str() {
assert_eq!(tp.index, 0);
None
} else {
}
}
+impl Clean<String> for InternedString {
+ fn clean(&self, _: &DocContext) -> String {
+ self.to_string()
+ }
+}
+
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Typedef {
pub type_: Type,
href.push_str(component);
href.push('/');
});
- let mut fname = p.file_name().expect("source has no filename")
+ let mut fname = p.file_name()
+ .expect("source has no filename")
.to_os_string();
fname.push(".html");
cur.push(&fname);
}
}
+#[derive(Debug, Eq, PartialEq, Hash)]
+struct ItemEntry {
+ url: String,
+ name: String,
+}
+
+impl ItemEntry {
+ fn new(mut url: String, name: String) -> ItemEntry {
+ while url.starts_with('/') {
+ url.remove(0);
+ }
+ ItemEntry {
+ url,
+ name,
+ }
+ }
+}
+
+impl fmt::Display for ItemEntry {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "<a href='{}'>{}</a>", self.url, Escape(&self.name))
+ }
+}
+
+impl PartialOrd for ItemEntry {
+ fn partial_cmp(&self, other: &ItemEntry) -> Option<::std::cmp::Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl Ord for ItemEntry {
+ fn cmp(&self, other: &ItemEntry) -> ::std::cmp::Ordering {
+ self.name.cmp(&other.name)
+ }
+}
+
+#[derive(Debug)]
+struct AllTypes {
+ structs: HashSet<ItemEntry>,
+ enums: HashSet<ItemEntry>,
+ unions: HashSet<ItemEntry>,
+ primitives: HashSet<ItemEntry>,
+ traits: HashSet<ItemEntry>,
+ macros: HashSet<ItemEntry>,
+ functions: HashSet<ItemEntry>,
+ typedefs: HashSet<ItemEntry>,
+ statics: HashSet<ItemEntry>,
+ constants: HashSet<ItemEntry>,
+}
+
+impl AllTypes {
+ fn new() -> AllTypes {
+ AllTypes {
+ structs: HashSet::with_capacity(100),
+ enums: HashSet::with_capacity(100),
+ unions: HashSet::with_capacity(100),
+ primitives: HashSet::with_capacity(26),
+ traits: HashSet::with_capacity(100),
+ macros: HashSet::with_capacity(100),
+ functions: HashSet::with_capacity(100),
+ typedefs: HashSet::with_capacity(100),
+ statics: HashSet::with_capacity(100),
+ constants: HashSet::with_capacity(100),
+ }
+ }
+
+ fn append(&mut self, item_name: String, item_type: &ItemType) {
+ let mut url: Vec<_> = item_name.split("::").skip(1).collect();
+ if let Some(name) = url.pop() {
+ let new_url = format!("{}/{}.{}.html", url.join("/"), item_type, name);
+ url.push(name);
+ let name = url.join("::");
+ match *item_type {
+ ItemType::Struct => self.structs.insert(ItemEntry::new(new_url, name)),
+ ItemType::Enum => self.enums.insert(ItemEntry::new(new_url, name)),
+ ItemType::Union => self.unions.insert(ItemEntry::new(new_url, name)),
+ ItemType::Primitive => self.primitives.insert(ItemEntry::new(new_url, name)),
+ ItemType::Trait => self.traits.insert(ItemEntry::new(new_url, name)),
+ ItemType::Macro => self.macros.insert(ItemEntry::new(new_url, name)),
+ ItemType::Function => self.functions.insert(ItemEntry::new(new_url, name)),
+ ItemType::Typedef => self.typedefs.insert(ItemEntry::new(new_url, name)),
+ ItemType::Static => self.statics.insert(ItemEntry::new(new_url, name)),
+ ItemType::Constant => self.constants.insert(ItemEntry::new(new_url, name)),
+ _ => true,
+ };
+ }
+ }
+}
+
+fn print_entries(f: &mut fmt::Formatter, e: &HashSet<ItemEntry>, title: &str,
+ class: &str) -> fmt::Result {
+ if !e.is_empty() {
+ let mut e: Vec<&ItemEntry> = e.iter().collect();
+ e.sort();
+ write!(f, "<h3 id='{}'>{}</h3><ul class='{} docblock'>{}</ul>",
+ title,
+ Escape(title),
+ class,
+ e.iter().map(|s| format!("<li>{}</li>", s)).collect::<String>())?;
+ }
+ Ok(())
+}
+
+impl fmt::Display for AllTypes {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f,
+"<h1 class='fqn'>\
+ <span class='in-band'>List of all items</span>\
+ <span class='out-of-band'>\
+ <span id='render-detail'>\
+ <a id=\"toggle-all-docs\" href=\"javascript:void(0)\" title=\"collapse all docs\">\
+ [<span class='inner'>−</span>]\
+ </a>\
+ </span>
+ </span>
+</h1>")?;
+ print_entries(f, &self.structs, "Structs", "structs")?;
+ print_entries(f, &self.enums, "Enums", "enums")?;
+ print_entries(f, &self.unions, "Unions", "unions")?;
+ print_entries(f, &self.primitives, "Primitives", "primitives")?;
+ print_entries(f, &self.traits, "Traits", "traits")?;
+ print_entries(f, &self.macros, "Macros", "macros")?;
+ print_entries(f, &self.functions, "Functions", "functions")?;
+ print_entries(f, &self.typedefs, "Typedefs", "typedefs")?;
+ print_entries(f, &self.statics, "Statics", "statics")?;
+ print_entries(f, &self.constants, "Constants", "constants")
+ }
+}
+
impl Context {
/// String representation of how to get back to the root path of the 'doc/'
/// folder in terms of a relative URL.
Some(i) => i,
None => return Ok(()),
};
+ let final_file = self.dst.join(&krate.name)
+ .join("all.html");
+ let crate_name = krate.name.clone();
item.name = Some(krate.name);
- // Render the crate documentation
- let mut work = vec![(self, item)];
+ let mut all = AllTypes::new();
+
+ {
+ // Render the crate documentation
+ let mut work = vec![(self.clone(), item)];
- while let Some((mut cx, item)) = work.pop() {
- cx.item(item, |cx, item| {
- work.push((cx.clone(), item))
- })?
+ while let Some((mut cx, item)) = work.pop() {
+ cx.item(item, &mut all, |cx, item| {
+ work.push((cx.clone(), item))
+ })?
+ }
+ }
+
+ let mut w = BufWriter::new(try_err!(File::create(&final_file), &final_file));
+ let mut root_path = self.dst.to_str().expect("invalid path").to_owned();
+ if !root_path.ends_with('/') {
+ root_path.push('/');
}
+ let page = layout::Page {
+ title: "List of all items in this crate",
+ css_class: "mod",
+ root_path: "../",
+ description: "List of all items in this crate",
+ keywords: BASIC_KEYWORDS,
+ resource_suffix: &self.shared.resource_suffix,
+ };
+ let sidebar = if let Some(ref version) = cache().crate_version {
+ format!("<p class='location'>Crate {}</p>\
+ <div class='block version'>\
+ <p>Version {}</p>\
+ </div>\
+ <a id='all-types' href='index.html'><p>Back to index</p></a>",
+ crate_name, version)
+ } else {
+ String::new()
+ };
+ try_err!(layout::render(&mut w, &self.shared.layout,
+ &page, &sidebar, &all,
+ self.shared.css_file_extension.is_some(),
+ &self.shared.themes),
+ &final_file);
Ok(())
}
/// all sub-items which need to be rendered.
///
/// The rendering driver uses this closure to queue up more work.
- fn item<F>(&mut self, item: clean::Item, mut f: F) -> Result<(), Error> where
- F: FnMut(&mut Context, clean::Item),
+ fn item<F>(&mut self, item: clean::Item, all: &mut AllTypes, mut f: F) -> Result<(), Error>
+ where F: FnMut(&mut Context, clean::Item),
{
// Stripped modules survive the rustdoc passes (i.e. `strip-private`)
// if they contain impls for public types. These modules can also
}
for item in m.items {
- f(this,item);
+ f(this, item);
}
Ok(())
let mut dst = try_err!(File::create(&joint_dst), &joint_dst);
try_err!(dst.write_all(&buf), &joint_dst);
+ all.append(full_path(self, &item), &item_type);
// Redirect from a sane URL using the namespace to Rustdoc's
// URL for the page.
let redir_name = format!("{}.{}.html", name, item_type.name_space());
let redir_dst = self.dst.join(redir_name);
if let Ok(redirect_out) = OpenOptions::new().create_new(true)
- .write(true)
- .open(&redir_dst) {
+ .write(true)
+ .open(&redir_dst) {
let mut redirect_out = BufWriter::new(redirect_out);
try_err!(layout::redirect(&mut redirect_out, file_name), &redir_dst);
}
version)?;
}
write!(fmt,
- r##"<span id='render-detail'>
- <a id="toggle-all-docs" href="javascript:void(0)" title="collapse all docs">
- [<span class='inner'>−</span>]
- </a>
- </span>"##)?;
+ "<span id='render-detail'>\
+ <a id=\"toggle-all-docs\" href=\"javascript:void(0)\" \
+ title=\"collapse all docs\">\
+ [<span class='inner'>−</span>]\
+ </a>\
+ </span>")?;
// Write `src` tag
//
}
fn render_impls(cx: &Context, w: &mut fmt::Formatter,
- traits: Vec<&&Impl>,
+ traits: &[&&Impl],
containing_item: &clean::Item) -> Result<(), fmt::Error> {
- for i in &traits {
+ for i in traits {
let did = i.trait_did().unwrap();
let assoc_link = AssocItemLink::GotoSource(did, &i.inner_impl().provided_trait_methods);
render_impl(w, cx, i, assoc_link,
}
fn render_attribute(attr: &ast::MetaItem) -> Option<String> {
- let name = attr.name();
+ let name = attr.ident.name;
if attr.is_word() {
Some(format!("{}", name))
.iter()
.partition::<Vec<_>, _>(|t| t.inner_impl().synthetic);
- write!(w, "
- <h2 id='implementations' class='small-section-header'>
- Trait Implementations<a href='#implementations' class='anchor'></a>
- </h2>
- <div id='implementations-list'>
- ")?;
- render_impls(cx, w, concrete, containing_item)?;
- write!(w, "</div>")?;
+ struct RendererStruct<'a, 'b, 'c>(&'a Context, Vec<&'b &'b Impl>, &'c clean::Item);
+
+ impl<'a, 'b, 'c> fmt::Display for RendererStruct<'a, 'b, 'c> {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ render_impls(self.0, fmt, &self.1, self.2)
+ }
+ }
+
+ let impls = format!("{}", RendererStruct(cx, concrete, containing_item));
+ if !impls.is_empty() {
+ write!(w, "
+ <h2 id='implementations' class='small-section-header'>
+ Trait Implementations<a href='#implementations' class='anchor'></a>
+ </h2>
+ <div id='implementations-list'>{}</div>", impls)?;
+ }
if !synthetic.is_empty() {
write!(w, "
</h2>
<div id='synthetic-implementations-list'>
")?;
- render_impls(cx, w, synthetic, containing_item)?;
+ render_impls(cx, w, &synthetic, containing_item)?;
write!(w, "</div>")?;
}
}
if it.is_struct() || it.is_trait() || it.is_primitive() || it.is_union()
|| it.is_enum() || it.is_mod() || it.is_typedef() {
- write!(fmt, "<p class='location'>")?;
- match it.inner {
- clean::StructItem(..) => write!(fmt, "Struct ")?,
- clean::TraitItem(..) => write!(fmt, "Trait ")?,
- clean::PrimitiveItem(..) => write!(fmt, "Primitive Type ")?,
- clean::UnionItem(..) => write!(fmt, "Union ")?,
- clean::EnumItem(..) => write!(fmt, "Enum ")?,
- clean::TypedefItem(..) => write!(fmt, "Type Definition ")?,
- clean::ForeignTypeItem => write!(fmt, "Foreign Type ")?,
- clean::ModuleItem(..) => if it.is_crate() {
- write!(fmt, "Crate ")?;
- } else {
- write!(fmt, "Module ")?;
+ write!(fmt, "<p class='location'>{}{}</p>",
+ match it.inner {
+ clean::StructItem(..) => "Struct ",
+ clean::TraitItem(..) => "Trait ",
+ clean::PrimitiveItem(..) => "Primitive Type ",
+ clean::UnionItem(..) => "Union ",
+ clean::EnumItem(..) => "Enum ",
+ clean::TypedefItem(..) => "Type Definition ",
+ clean::ForeignTypeItem => "Foreign Type ",
+ clean::ModuleItem(..) => if it.is_crate() {
+ "Crate "
+ } else {
+ "Module "
+ },
+ _ => "",
},
- _ => (),
- }
- write!(fmt, "{}", it.name.as_ref().unwrap())?;
- write!(fmt, "</p>")?;
+ it.name.as_ref().unwrap())?;
}
if it.is_crate() {
write!(fmt,
"<div class='block version'>\
<p>Version {}</p>\
- </div>",
- version)?;
+ </div>
+ <a id='all-types' href='all.html'><p>See all {}'s items</p></a>",
+ version,
+ it.name.as_ref().unwrap())?;
}
}
};
}
+ function getPageId() {
+ var id = document.location.href.split('#')[1];
+ if (id) {
+ return id.split('?')[0].split('&')[0];
+ }
+ return null;
+ }
+
function hasClass(elem, className) {
if (elem && className && elem.className) {
var elemClass = elem.className;
}
}
- function toggleAllDocs() {
+ function toggleAllDocs(pageId) {
var toggle = document.getElementById("toggle-all-docs");
if (hasClass(toggle, "will-expand")) {
updateLocalStorage("rustdoc-collapse", "false");
toggle.title = "expand all docs";
onEach(document.getElementsByClassName("collapse-toggle"), function(e) {
- collapseDocs(e, "hide");
+ collapseDocs(e, "hide", pageId);
});
}
}
- function collapseDocs(toggle, mode) {
+ function collapseDocs(toggle, mode, pageId) {
if (!toggle || !toggle.parentNode) {
return;
}
}
}
- var relatedDoc = toggle.parentNode;
+ var parentElem = toggle.parentNode;
+ var relatedDoc = parentElem;
var docblock = relatedDoc.nextElementSibling;
while (!hasClass(relatedDoc, "impl-items")) {
relatedDoc = relatedDoc.nextElementSibling;
}
- if (!relatedDoc && !hasClass(docblock, "docblock")) {
+ if ((!relatedDoc && !hasClass(docblock, "docblock")) ||
+ (pageId && onEach(relatedDoc.childNodes, function(e) {
+ return e.id === pageId;
+ }) === true)) {
return;
}
}
}
- function autoCollapseAllImpls() {
+ function autoCollapseAllImpls(pageId) {
// Automatically minimize all non-inherent impls
onEach(document.getElementsByClassName('impl'), function(n) {
// inherent impl ids are like 'impl' or impl-<number>'
if (!inherent) {
onEach(n.childNodes, function(m) {
if (hasClass(m, "collapse-toggle")) {
- collapseDocs(m, "hide");
+ collapseDocs(m, "hide", pageId);
}
});
}
}
})
- autoCollapseAllImpls();
+ autoCollapseAllImpls(getPageId());
function createToggleWrapper() {
var span = document.createElement('span');
};
if (getCurrentValue("rustdoc-collapse") === "true") {
- toggleAllDocs();
+ toggleAllDocs(getPageId());
}
}());
left: -5px;
}
.small-section-header > .anchor {
+ left: -20px;
+}
+.small-section-header > .anchor:not(.field) {
left: -28px;
}
.anchor:before {
font-size: 19px;
display: block;
}
+
+#main > ul {
+ padding-left: 10px;
+}
+#main > ul > li {
+ list-style: none;
+}
+#all-types {
+ text-align: center;
+ border: 1px solid;
+ margin: 0 10px;
+ margin-bottom: 10px;
+ display: block;
+ border-radius: 7px;
+}
+#all-types > p {
+ margin: 5px 0;
+}
\ No newline at end of file
if (arr && arr.length > 0 && func) {
for (var i = 0; i < arr.length; i++) {
if (func(arr[i]) === true) {
- break;
+ return true;
}
}
}
+ return false;
}
function updateLocalStorage(name, value) {
background: #f0f0f0;
}
}
+
+#all-types {
+ background-color: #505050;
+}
+#all-types:hover {
+ background-color: #606060;
+}
background: #fff;
}
}
+
+#all-types {
+ background-color: #fff;
+}
+#all-types:hover {
+ background-color: #f9f9f9;
+}
\ No newline at end of file
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/",
html_playground_url = "https://play.rust-lang.org/")]
-#![deny(warnings)]
#![feature(ascii_ctype)]
#![feature(rustc_private)]
#![feature(box_syntax)]
#![feature(fs_read_write)]
#![feature(set_stdio)]
-#![cfg_attr(stage0, feature(slice_patterns))]
#![feature(test)]
#![feature(unicode)]
#![feature(vec_remove_item)]
let mut opts = TestOptions::default();
opts.no_crate_inject = true;
+ opts.display_warnings = display_warnings;
let mut collector = Collector::new(input.to_owned(), cfgs, libs, externs,
true, opts, maybe_sysroot, None,
Some(PathBuf::from(input)),
#[derive(Clone, Default)]
pub struct TestOptions {
+ /// Whether to disable the default `extern crate my_crate;` when creating doctests.
pub no_crate_inject: bool,
+ /// Whether to emit compilation warnings when compiling doctests. Setting this will suppress
+ /// the default `#![allow(unused)]`.
+ pub display_warnings: bool,
+ /// Additional crate-level attributes to add to doctests.
pub attrs: Vec<String>,
}
let crate_name = crate_name.unwrap_or_else(|| {
::rustc_trans_utils::link::find_crate_name(None, &hir_forest.krate().attrs, &input)
});
- let opts = scrape_test_config(hir_forest.krate());
+ let mut opts = scrape_test_config(hir_forest.krate());
+ opts.display_warnings |= display_warnings;
let mut collector = Collector::new(crate_name,
cfgs,
libs,
let mut opts = TestOptions {
no_crate_inject: false,
+ display_warnings: false,
attrs: Vec::new(),
};
let mut line_offset = 0;
let mut prog = String::new();
- if opts.attrs.is_empty() {
+ if opts.attrs.is_empty() && !opts.display_warnings {
// If there aren't any attributes supplied by #![doc(test(attr(...)))], then allow some
// lints that are commonly triggered in doctests. The crate-level test attributes are
// commonly used to make tests fail in case they trigger warnings, so having this there in
//adding it anyway
let opts = TestOptions {
no_crate_inject: true,
+ display_warnings: false,
attrs: vec![],
};
let input =
let output = make_test(input, None, true, &opts);
assert_eq!(output, (expected.clone(), 1));
}
+
+ #[test]
+ fn make_test_display_warnings() {
+ //if the user is asking to display doctest warnings, suppress the default allow(unused)
+ let mut opts = TestOptions::default();
+ opts.display_warnings = true;
+ let input =
+"assert_eq!(2+2, 4);";
+ let expected =
+"fn main() {
+assert_eq!(2+2, 4);
+}".to_string();
+ let output = make_test(input, None, false, &opts);
+ assert_eq!(output, (expected.clone(), 1));
+ }
}
html_root_url = "https://doc.rust-lang.org/nightly/",
html_playground_url = "https://play.rust-lang.org/",
test(attr(allow(unused_variables), deny(warnings))))]
-#![deny(warnings)]
#![feature(box_syntax)]
#![feature(core_intrinsics)]
-#![cfg_attr(stage0, feature(i128_type))]
#![feature(specialization)]
#![cfg_attr(test, feature(test))]
ArgsOs { inner: sys::args::args() }
}
-#[stable(feature = "env_unimpl_send_sync", since = "1.25.0")]
+#[stable(feature = "env_unimpl_send_sync", since = "1.26.0")]
impl !Send for Args {}
-#[stable(feature = "env_unimpl_send_sync", since = "1.25.0")]
+#[stable(feature = "env_unimpl_send_sync", since = "1.26.0")]
impl !Sync for Args {}
#[stable(feature = "env", since = "1.0.0")]
}
}
-#[stable(feature = "env_unimpl_send_sync", since = "1.25.0")]
+#[stable(feature = "env_unimpl_send_sync", since = "1.26.0")]
impl !Send for ArgsOs {}
-#[stable(feature = "env_unimpl_send_sync", since = "1.25.0")]
+#[stable(feature = "env_unimpl_send_sync", since = "1.26.0")]
impl !Sync for ArgsOs {}
#[stable(feature = "env", since = "1.0.0")]
// Tell the compiler to link to either panic_abort or panic_unwind
#![needs_panic_runtime]
-// Turn warnings into errors, but only after stage0, where it can be useful for
-// code to emit warnings during language transitions
-#![cfg_attr(not(stage0), deny(warnings))]
-
// std may use features in a platform-specific way
#![allow(unused_features)]
#![feature(float_from_str_radix)]
#![feature(fn_traits)]
#![feature(fnbox)]
-#![feature(generic_param_attrs)]
+#![cfg_attr(stage0, feature(generic_param_attrs))]
#![feature(hashmap_internals)]
#![feature(heap_api)]
-#![cfg_attr(stage0, feature(i128_type, i128))]
#![feature(int_error_internals)]
#![feature(integer_atomics)]
#![feature(into_cow)]
#![feature(doc_spotlight)]
#![cfg_attr(test, feature(update_panic_count))]
#![cfg_attr(windows, feature(used))]
-#![cfg_attr(stage0, feature(never_type))]
-#![cfg_attr(stage0, feature(termination_trait))]
#![default_lib_allocator]
// add a new crate name so we can attach the re-exports to it.
#[macro_reexport(assert_eq, assert_ne, debug_assert, debug_assert_eq,
debug_assert_ne, unreachable, unimplemented, write, writeln, try)]
-#[cfg_attr(stage0, macro_reexport(assert))]
extern crate core as __core;
#[macro_use]
// compiler-rt intrinsics
#[doc(masked)]
+#[cfg(stage0)]
extern crate compiler_builtins;
// During testing, this crate is not actually the "real" std library, but rather
// it links to the real std library, which was compiled from this same source
// code. So any lang items std defines are conditionally excluded (or else they
-// wolud generate duplicate lang item errors), and any globals it defines are
+// would generate duplicate lang item errors), and any globals it defines are
// _not_ the globals used by "real" std. So this import, defined only during
// testing gives test-std access to real-std lang items and globals. See #2912
#[cfg(test)] extern crate std as realstd;
// * By default everything is unwind safe
// * pointers T contains mutability of some form are not unwind safe
// * Unique, an owning pointer, lifts an implementation
-// * Types like Mutex/RwLock which are explicilty poisoned are unwind safe
+// * Types like Mutex/RwLock which are explicitly poisoned are unwind safe
// * Our custom AssertUnwindSafe wrapper is indeed unwind safe
#[stable(feature = "catch_unwind", since = "1.9.0")]
data: *mut u8,
data_ptr: *mut usize,
vtable_ptr: *mut usize) -> u32;
- #[cfg_attr(stage0, unwind)]
- #[cfg_attr(not(stage0), unwind(allowed))]
+ #[unwind(allowed)]
fn __rust_start_panic(data: usize, vtable: usize) -> u32;
}
/// Entry point of panic from the libcore crate.
#[cfg(not(test))]
#[lang = "panic_fmt"]
-#[cfg_attr(stage0, unwind)]
-#[cfg_attr(not(stage0), unwind(allowed))]
+#[unwind(allowed)]
pub extern fn rust_begin_panic(msg: fmt::Arguments,
file: &'static str,
line: u32,
#[unstable(feature = "termination_trait_lib", issue = "43301")]
impl Termination for () {
+ #[inline]
fn report(self) -> i32 { ExitCode::SUCCESS.report() }
}
#[unstable(feature = "termination_trait_lib", issue = "43301")]
impl Termination for ExitCode {
+ #[inline]
fn report(self) -> i32 {
self.0.as_i32()
}
pub const SUCCESS: ExitCode = ExitCode(EXIT_SUCCESS as _);
pub const FAILURE: ExitCode = ExitCode(EXIT_FAILURE as _);
+ #[inline]
pub fn as_i32(&self) -> i32 {
self.0 as i32
}
impl<'a> AsyncPipe<'a> {
fn new(pipe: Handle, dst: &'a mut Vec<u8>) -> io::Result<AsyncPipe<'a>> {
// Create an event which we'll use to coordinate our overlapped
- // opreations, this event will be used in WaitForMultipleObjects
+ // operations, this event will be used in WaitForMultipleObjects
// and passed as part of the OVERLAPPED handle.
//
// Note that we do a somewhat clever thing here by flagging the
pub const SUCCESS: ExitCode = ExitCode(EXIT_SUCCESS as _);
pub const FAILURE: ExitCode = ExitCode(EXIT_FAILURE as _);
+ #[inline]
pub fn as_i32(&self) -> i32 {
self.0 as i32
}
[dependencies]
core = { path = "../libcore" }
+compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
html_playground_url = "https://play.rust-lang.org/",
issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
test(no_crate_inject, attr(allow(unused_variables), deny(warnings))))]
-#![deny(warnings)]
#![deny(missing_debug_implementations)]
#![no_std]
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Label {
pub ident: Ident,
- pub span: Span,
}
impl fmt::Debug for Label {
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Lifetime {
pub id: NodeId,
- pub span: Span,
pub ident: Ident,
}
impl<'a> PartialEq<&'a str> for Path {
fn eq(&self, string: &&'a str) -> bool {
- self.segments.len() == 1 && self.segments[0].identifier.name == *string
+ self.segments.len() == 1 && self.segments[0].ident.name == *string
}
}
impl Path {
// convert a span and an identifier to the corresponding
// 1-segment path
- pub fn from_ident(s: Span, identifier: Ident) -> Path {
- Path {
- span: s,
- segments: vec![PathSegment::from_ident(identifier, s)],
- }
+ pub fn from_ident(ident: Ident) -> Path {
+ Path { segments: vec![PathSegment::from_ident(ident)], span: ident.span }
}
// Make a "crate root" segment for this path unless it already has it
// or starts with something like `self`/`super`/`$crate`/etc.
pub fn make_root(&self) -> Option<PathSegment> {
- if let Some(ident) = self.segments.get(0).map(|seg| seg.identifier) {
+ if let Some(ident) = self.segments.get(0).map(|seg| seg.ident) {
if ::parse::token::is_path_segment_keyword(ident) &&
ident.name != keywords::Crate.name() {
return None;
}
pub fn is_global(&self) -> bool {
- !self.segments.is_empty() && self.segments[0].identifier.name == keywords::CrateRoot.name()
+ !self.segments.is_empty() && self.segments[0].ident.name == keywords::CrateRoot.name()
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct PathSegment {
/// The identifier portion of this path segment.
- pub identifier: Ident,
- /// Span of the segment identifier.
- pub span: Span,
+ pub ident: Ident,
/// Type/lifetime parameters attached to this path. They come in
/// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`.
}
impl PathSegment {
- pub fn from_ident(ident: Ident, span: Span) -> Self {
- PathSegment { identifier: ident, span: span, parameters: None }
+ pub fn from_ident(ident: Ident) -> Self {
+ PathSegment { ident, parameters: None }
}
pub fn crate_root(span: Span) -> Self {
- PathSegment {
- identifier: Ident { ctxt: span.ctxt(), ..keywords::CrateRoot.ident() },
- span,
- parameters: None,
- }
+ PathSegment::from_ident(Ident::new(keywords::CrateRoot.name(), span))
}
}
pub fn span(&self) -> Span {
match self {
&TraitTyParamBound(ref t, ..) => t.span,
- &RegionTyParamBound(ref l) => l.span,
+ &RegionTyParamBound(ref l) => l.ident.span,
}
}
}
pub id: NodeId,
pub bounds: TyParamBounds,
pub default: Option<P<Ty>>,
- pub span: Span,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
for param in &self.params {
if let GenericParam::Type(ref t) = *param {
if t.ident.name == name {
- return Some(t.span);
+ return Some(t.ident.span);
}
}
}
/// E.g. `#[test]`, `#[derive(..)]` or `#[feature = "foo"]`
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct MetaItem {
- pub name: Name,
+ pub ident: Ident,
pub node: MetaItemKind,
pub span: Span,
}
let node = match &self.node {
PatKind::Wild => TyKind::Infer,
PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None) =>
- TyKind::Path(None, Path::from_ident(ident.span, ident.node)),
+ TyKind::Path(None, Path::from_ident(*ident)),
PatKind::Path(qself, path) => TyKind::Path(qself.clone(), path.clone()),
PatKind::Mac(mac) => TyKind::Mac(mac.clone()),
PatKind::Ref(pat, mutbl) =>
/// or a unit struct/variant pattern, or a const pattern (in the last two cases the third
/// field must be `None`). Disambiguation cannot be done with parser alone, so it happens
/// during name resolution.
- Ident(BindingMode, SpannedIdent, Option<P<Pat>>),
+ Ident(BindingMode, Ident, Option<P<Pat>>),
/// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`.
/// The `bool` is `true` in the presence of a `..`.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Field {
- pub ident: SpannedIdent,
+ pub ident: Ident,
pub expr: P<Expr>,
pub span: Span,
pub is_shorthand: bool,
pub attrs: ThinVec<Attribute>,
}
-pub type SpannedIdent = Spanned<Ident>;
-
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum BlockCheckMode {
Default,
/// For example, `a += 1`.
AssignOp(BinOp, P<Expr>, P<Expr>),
/// Access of a named struct field (`obj.foo`)
- Field(P<Expr>, SpannedIdent),
+ Field(P<Expr>, Ident),
/// Access of an unnamed field of a struct or tuple-struct
///
/// For example, `foo.0`.
impl Arg {
pub fn to_self(&self) -> Option<ExplicitSelf> {
if let PatKind::Ident(BindingMode::ByValue(mutbl), ident, _) = self.pat.node {
- if ident.node.name == keywords::SelfValue.name() {
+ if ident.name == keywords::SelfValue.name() {
return match self.ty.node {
TyKind::ImplicitSelf => Some(respan(self.pat.span, SelfKind::Value(mutbl))),
TyKind::Rptr(lt, MutTy{ref ty, mutbl}) if ty.node == TyKind::ImplicitSelf => {
pub fn is_self(&self) -> bool {
if let PatKind::Ident(_, ident, _) = self.pat.node {
- ident.node.name == keywords::SelfValue.name()
+ ident.name == keywords::SelfValue.name()
} else {
false
}
}
- pub fn from_self(eself: ExplicitSelf, eself_ident: SpannedIdent) -> Arg {
+ pub fn from_self(eself: ExplicitSelf, eself_ident: Ident) -> Arg {
let span = eself.span.to(eself_ident.span);
let infer_ty = P(Ty {
id: DUMMY_NODE_ID,
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct Variant_ {
- pub name: Ident,
+ pub ident: Ident,
pub attrs: Vec<Attribute>,
pub data: VariantData,
/// Explicit discriminant, e.g. `Foo = 1`
match self.kind {
UseTreeKind::Simple(Some(rename)) => rename,
UseTreeKind::Simple(None) =>
- self.prefix.segments.last().expect("empty prefix in a simple import").identifier,
+ self.prefix.segments.last().expect("empty prefix in a simple import").ident,
_ => panic!("`UseTree::ident` can only be used on a simple import"),
}
}
Static(P<Ty>, bool),
/// A foreign type
Ty,
+ /// A macro invocation
+ Macro(Mac),
}
impl ForeignItemKind {
ForeignItemKind::Fn(..) => "foreign function",
ForeignItemKind::Static(..) => "foreign static item",
ForeignItemKind::Ty => "foreign type",
+ ForeignItemKind::Macro(..) => "macro in foreign module",
}
}
}
use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind};
use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind};
use codemap::{Spanned, respan, dummy_spanned};
-use syntax_pos::{Span, DUMMY_SP};
+use syntax_pos::Span;
use errors::Handler;
use feature_gate::{Features, GatedCfg};
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
/// Returns the name of the meta item, e.g. `foo` in `#[foo]`,
/// `#[foo="bar"]` and `#[foo(bar)]`, if self is a MetaItem
pub fn name(&self) -> Option<Name> {
- self.meta_item().and_then(|meta_item| Some(meta_item.name()))
+ self.meta_item().and_then(|meta_item| Some(meta_item.ident.name))
}
/// Gets the string value if self is a MetaItem and the MetaItem is a
if meta_item_list.len() == 1 {
let nested_item = &meta_item_list[0];
if nested_item.is_literal() {
- Some((meta_item.name(), nested_item.literal().unwrap()))
+ Some((meta_item.ident.name, nested_item.literal().unwrap()))
} else {
None
}
pub fn name(&self) -> Option<Name> {
match self.path.segments.len() {
- 1 => Some(self.path.segments[0].identifier.name),
+ 1 => Some(self.path.segments[0].ident.name),
_ => None,
}
}
}
impl MetaItem {
- pub fn name(&self) -> Name {
- self.name
- }
-
pub fn value_str(&self) -> Option<Symbol> {
match self.node {
MetaItemKind::NameValue(ref v) => {
pub fn span(&self) -> Span { self.span }
pub fn check_name(&self, name: &str) -> bool {
- self.name() == name
+ self.ident.name == name
}
pub fn is_value_str(&self) -> bool {
pub fn meta(&self) -> Option<MetaItem> {
let mut tokens = self.tokens.trees().peekable();
Some(MetaItem {
- name: match self.path.segments.len() {
- 1 => self.path.segments[0].identifier.name,
+ ident: match self.path.segments.len() {
+ 1 => self.path.segments[0].ident,
_ => return None,
},
node: if let Some(node) = MetaItemKind::from_tokens(&mut tokens) {
}
Ok(MetaItem {
- name: self.path.segments.last().unwrap().identifier.name,
+ ident: self.path.segments.last().unwrap().ident,
node: self.parse(sess, |parser| parser.parse_meta_item_kind())?,
span: self.span,
})
if self.is_sugared_doc {
let comment = self.value_str().unwrap();
let meta = mk_name_value_item_str(
- Symbol::intern("doc"),
- Symbol::intern(&strip_doc_comment_decoration(&comment.as_str())));
+ Ident::from_str("doc"),
+ dummy_spanned(Symbol::intern(&strip_doc_comment_decoration(&comment.as_str()))));
let mut attr = if self.style == ast::AttrStyle::Outer {
mk_attr_outer(self.span, self.id, meta)
} else {
/* Constructors */
-pub fn mk_name_value_item_str(name: Name, value: Symbol) -> MetaItem {
- let value_lit = dummy_spanned(LitKind::Str(value, ast::StrStyle::Cooked));
- mk_spanned_name_value_item(DUMMY_SP, name, value_lit)
+pub fn mk_name_value_item_str(ident: Ident, value: Spanned<Symbol>) -> MetaItem {
+ let value = respan(value.span, LitKind::Str(value.node, ast::StrStyle::Cooked));
+ mk_name_value_item(ident.span.to(value.span), ident, value)
}
-pub fn mk_name_value_item(name: Name, value: ast::Lit) -> MetaItem {
- mk_spanned_name_value_item(DUMMY_SP, name, value)
+pub fn mk_name_value_item(span: Span, ident: Ident, value: ast::Lit) -> MetaItem {
+ MetaItem { ident, span, node: MetaItemKind::NameValue(value) }
}
-pub fn mk_list_item(name: Name, items: Vec<NestedMetaItem>) -> MetaItem {
- mk_spanned_list_item(DUMMY_SP, name, items)
+pub fn mk_list_item(span: Span, ident: Ident, items: Vec<NestedMetaItem>) -> MetaItem {
+ MetaItem { ident, span, node: MetaItemKind::List(items) }
}
-pub fn mk_list_word_item(name: Name) -> ast::NestedMetaItem {
- dummy_spanned(NestedMetaItemKind::MetaItem(mk_spanned_word_item(DUMMY_SP, name)))
+pub fn mk_word_item(ident: Ident) -> MetaItem {
+ MetaItem { ident, span: ident.span, node: MetaItemKind::Word }
}
-
-pub fn mk_word_item(name: Name) -> MetaItem {
- mk_spanned_word_item(DUMMY_SP, name)
-}
-
-pub fn mk_spanned_name_value_item(sp: Span, name: Name, value: ast::Lit) -> MetaItem {
- MetaItem { span: sp, name: name, node: MetaItemKind::NameValue(value) }
-}
-
-pub fn mk_spanned_list_item(sp: Span, name: Name, items: Vec<NestedMetaItem>) -> MetaItem {
- MetaItem { span: sp, name: name, node: MetaItemKind::List(items) }
-}
-
-pub fn mk_spanned_word_item(sp: Span, name: Name) -> MetaItem {
- MetaItem { span: sp, name: name, node: MetaItemKind::Word }
+pub fn mk_nested_word_item(ident: Ident) -> NestedMetaItem {
+ respan(ident.span, NestedMetaItemKind::MetaItem(mk_word_item(ident)))
}
pub fn mk_attr_id() -> AttrId {
Attribute {
id,
style: ast::AttrStyle::Inner,
- path: ast::Path::from_ident(item.span, ast::Ident::with_empty_ctxt(item.name)),
+ path: ast::Path::from_ident(item.ident),
tokens: item.node.tokens(item.span),
is_sugared_doc: false,
span: sp,
Attribute {
id,
style: ast::AttrStyle::Outer,
- path: ast::Path::from_ident(item.span, ast::Ident::with_empty_ctxt(item.name)),
+ path: ast::Path::from_ident(item.ident),
tokens: item.node.tokens(item.span),
is_sugared_doc: false,
span: sp,
Attribute {
id,
style,
- path: ast::Path::from_ident(span, ast::Ident::from_str("doc")),
+ path: ast::Path::from_ident(Ident::from_str("doc").with_span_pos(span)),
tokens: MetaItemKind::NameValue(lit).tokens(span),
is_sugared_doc: true,
span,
item.check_name("feature") &&
item.meta_item_list().map(|list| {
list.iter().any(|mi| {
- mi.word().map(|w| w.name() == feature_name)
+ mi.word().map(|w| w.ident.name == feature_name)
.unwrap_or(false)
})
}).unwrap_or(false)
if let (Some(feats), Some(gated_cfg)) = (features, GatedCfg::gate(cfg)) {
gated_cfg.check_and_emit(sess, feats);
}
- sess.config.contains(&(cfg.name(), cfg.value_str()))
+ sess.config.contains(&(cfg.ident.name, cfg.value_str()))
})
}
// The unwraps below may look dangerous, but we've already asserted
// that they won't fail with the loop above.
- match &*cfg.name.as_str() {
+ match &*cfg.ident.name.as_str() {
"any" => mis.iter().any(|mi| {
eval_condition(mi.meta_item().unwrap(), sess, eval)
}),
let meta = meta.as_ref().unwrap();
let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
if item.is_some() {
- handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name()));
+ handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.ident.name));
return false
}
if let Some(v) = meta.value_str() {
)+
for meta in metas {
if let Some(mi) = meta.meta_item() {
- match &*mi.name().as_str() {
+ match &*mi.ident.name.as_str() {
$(
stringify!($name)
=> if !get(mi, &mut $name) { continue 'outer },
)+
_ => {
handle_errors(diagnostic, mi.span,
- AttrError::UnknownMetaItem(mi.name()));
+ AttrError::UnknownMetaItem(mi.ident.name));
continue 'outer
}
}
}
}
- match &*meta.name.as_str() {
+ match &*meta.ident.name.as_str() {
"rustc_deprecated" => {
if rustc_depr.is_some() {
span_err!(diagnostic, item_sp, E0540,
let mut issue = None;
for meta in metas {
if let Some(mi) = meta.meta_item() {
- match &*mi.name().as_str() {
+ match &*mi.ident.name.as_str() {
"feature" => if !get(mi, &mut feature) { continue 'outer },
"reason" => if !get(mi, &mut reason) { continue 'outer },
"issue" => if !get(mi, &mut issue) { continue 'outer },
_ => {
handle_errors(diagnostic, meta.span,
- AttrError::UnknownMetaItem(mi.name()));
+ AttrError::UnknownMetaItem(mi.ident.name));
continue 'outer
}
}
let mut since = None;
for meta in metas {
if let NestedMetaItemKind::MetaItem(ref mi) = meta.node {
- match &*mi.name().as_str() {
+ match &*mi.ident.name.as_str() {
"feature" => if !get(mi, &mut feature) { continue 'outer },
"since" => if !get(mi, &mut since) { continue 'outer },
_ => {
handle_errors(diagnostic, meta.span,
- AttrError::UnknownMetaItem(mi.name()));
+ AttrError::UnknownMetaItem(mi.ident.name));
continue 'outer
}
}
depr = if let Some(metas) = attr.meta_item_list() {
let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
if item.is_some() {
- handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name()));
+ handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.ident.name));
return false
}
if let Some(v) = meta.value_str() {
let mut note = None;
for meta in metas {
if let NestedMetaItemKind::MetaItem(ref mi) = meta.node {
- match &*mi.name().as_str() {
+ match &*mi.ident.name.as_str() {
"since" => if !get(mi, &mut since) { continue 'outer },
"note" => if !get(mi, &mut note) { continue 'outer },
_ => {
handle_errors(diagnostic, meta.span,
- AttrError::UnknownMetaItem(mi.name()));
+ AttrError::UnknownMetaItem(mi.ident.name));
continue 'outer
}
}
let mut recognised = false;
if let Some(mi) = item.word() {
- let word = &*mi.name().as_str();
+ let word = &*mi.ident.name.as_str();
let hint = match word {
"C" => Some(ReprC),
"packed" => Some(ReprPacked),
impl MetaItem {
fn tokens(&self) -> TokenStream {
- let ident = TokenTree::Token(self.span,
- Token::from_ast_ident(Ident::with_empty_ctxt(self.name)));
+ let ident = TokenTree::Token(self.span, Token::from_ast_ident(self.ident));
TokenStream::concat(vec![ident.into(), self.node.tokens(self.span)])
}
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
where I: Iterator<Item = TokenTree>,
{
- let (span, name) = match tokens.next() {
- Some(TokenTree::Token(span, Token::Ident(ident, _))) => (span, ident.name),
+ let (span, ident) = match tokens.next() {
+ Some(TokenTree::Token(span, Token::Ident(ident, _))) => (span, ident),
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 {
- token::Nonterminal::NtIdent(ident, _) => (ident.span, ident.node.name),
+ token::Nonterminal::NtIdent(ident, _) => (ident.span, ident),
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
_ => return None,
},
MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(span.hi()),
_ => span.hi(),
};
- Some(MetaItem { name, node, span: span.with_hi(hi) })
+ Some(MetaItem { ident, node, span: span.with_hi(hi) })
}
}
self.configure(v).map(|v| {
Spanned {
node: ast::Variant_ {
- name: v.node.name,
+ ident: v.node.ident,
attrs: v.node.attrs,
data: self.configure_variant_data(v.node.data),
disr_expr: v.node.disr_expr,
use std::iter;
use std::path::PathBuf;
use std::rc::Rc;
-use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::sync::{self, Lrc};
use std::default::Default;
use tokenstream::{self, TokenStream};
Item(P<ast::Item>),
TraitItem(P<ast::TraitItem>),
ImplItem(P<ast::ImplItem>),
+ ForeignItem(P<ast::ForeignItem>),
Stmt(P<ast::Stmt>),
Expr(P<ast::Expr>),
}
Annotatable::Item(ref item) => &item.attrs,
Annotatable::TraitItem(ref trait_item) => &trait_item.attrs,
Annotatable::ImplItem(ref impl_item) => &impl_item.attrs,
+ Annotatable::ForeignItem(ref foreign_item) => &foreign_item.attrs,
Annotatable::Stmt(ref stmt) => stmt.attrs(),
Annotatable::Expr(ref expr) => &expr.attrs,
}
Annotatable::Item(item) => Annotatable::Item(item.map_attrs(f)),
Annotatable::TraitItem(trait_item) => Annotatable::TraitItem(trait_item.map_attrs(f)),
Annotatable::ImplItem(impl_item) => Annotatable::ImplItem(impl_item.map_attrs(f)),
+ Annotatable::ForeignItem(foreign_item) =>
+ Annotatable::ForeignItem(foreign_item.map_attrs(f)),
Annotatable::Stmt(stmt) => Annotatable::Stmt(stmt.map_attrs(f)),
Annotatable::Expr(expr) => Annotatable::Expr(expr.map_attrs(f)),
}
Annotatable::Item(ref item) => item.span,
Annotatable::TraitItem(ref trait_item) => trait_item.span,
Annotatable::ImplItem(ref impl_item) => impl_item.span,
+ Annotatable::ForeignItem(ref foreign_item) => foreign_item.span,
Annotatable::Stmt(ref stmt) => stmt.span,
Annotatable::Expr(ref expr) => expr.span,
}
}
}
+ pub fn expect_foreign_item(self) -> ast::ForeignItem {
+ match self {
+ Annotatable::ForeignItem(i) => i.into_inner(),
+ _ => panic!("expected foreign item")
+ }
+ }
+
pub fn derive_allowed(&self) -> bool {
match *self {
Annotatable::Item(ref item) => match item.node {
if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt {
if let token::NtIdent(ident, is_raw) = nt.0 {
return tokenstream::TokenTree::Token(ident.span,
- token::Ident(ident.node, is_raw));
+ token::Ident(ident, is_raw));
}
}
fold::noop_fold_tt(tt, self)
None
}
+ /// Create zero or more items in an `extern {}` block
+ fn make_foreign_items(self: Box<Self>) -> Option<SmallVector<ast::ForeignItem>> { None }
+
/// Create a pattern.
fn make_pat(self: Box<Self>) -> Option<P<ast::Pat>> {
None
items: SmallVector<P<ast::Item>>,
impl_items: SmallVector<ast::ImplItem>,
trait_items: SmallVector<ast::TraitItem>,
+ foreign_items: SmallVector<ast::ForeignItem>,
stmts: SmallVector<ast::Stmt>,
ty: P<ast::Ty>,
}
self.trait_items
}
+ fn make_foreign_items(self: Box<Self>) -> Option<SmallVector<ast::ForeignItem>> {
+ self.foreign_items
+ }
+
fn make_stmts(self: Box<Self>) -> Option<SmallVector<ast::Stmt>> {
match self.stmts.as_ref().map_or(0, |s| s.len()) {
0 => make_stmts_default!(self),
}
}
+ fn make_foreign_items(self: Box<Self>) -> Option<SmallVector<ast::ForeignItem>> {
+ if self.expr_only {
+ None
+ } else {
+ Some(SmallVector::new())
+ }
+ }
+
fn make_stmts(self: Box<DummyResult>) -> Option<SmallVector<ast::Stmt>> {
Some(SmallVector::one(ast::Stmt {
id: ast::DUMMY_NODE_ID,
/// `#[derive(...)]` is a `MultiItemDecorator`.
///
/// Prefer ProcMacro or MultiModifier since they are more flexible.
- MultiDecorator(Box<MultiItemDecorator>),
+ MultiDecorator(Box<MultiItemDecorator + sync::Sync + sync::Send>),
/// A syntax extension that is attached to an item and modifies it
/// in-place. Also allows decoration, i.e., creating new items.
- MultiModifier(Box<MultiItemModifier>),
+ MultiModifier(Box<MultiItemModifier + sync::Sync + sync::Send>),
/// A function-like procedural macro. TokenStream -> TokenStream.
- ProcMacro(Box<ProcMacro>),
+ ProcMacro(Box<ProcMacro + sync::Sync + sync::Send>),
/// An attribute-like procedural macro. TokenStream, TokenStream -> TokenStream.
/// The first TokenSteam is the attribute, the second is the annotated item.
/// Allows modification of the input items and adding new items, similar to
/// MultiModifier, but uses TokenStreams, rather than AST nodes.
- AttrProcMacro(Box<AttrProcMacro>),
+ AttrProcMacro(Box<AttrProcMacro + sync::Sync + sync::Send>),
/// A normal, function-like syntax extension.
///
/// `bytes!` is a `NormalTT`.
NormalTT {
- expander: Box<TTMacroExpander>,
+ expander: Box<TTMacroExpander + sync::Sync + sync::Send>,
def_info: Option<(ast::NodeId, Span)>,
/// Whether the contents of the macro can
/// directly use `#[unstable]` things (true == yes).
/// A function-like syntax extension that has an extra ident before
/// the block.
///
- IdentTT(Box<IdentMacroExpander>, Option<Span>, bool),
+ IdentTT(Box<IdentMacroExpander + sync::Sync + sync::Send>, Option<Span>, bool),
/// An attribute-like procedural macro. TokenStream -> TokenStream.
/// The input is the annotated item.
/// Allows generating code to implement a Trait for a given struct
/// or enum item.
- ProcMacroDerive(Box<MultiItemModifier>, Vec<Symbol> /* inert attribute names */),
+ ProcMacroDerive(Box<MultiItemModifier +
+ sync::Sync +
+ sync::Send>, Vec<Symbol> /* inert attribute names */),
/// An attribute-like procedural macro that derives a builtin trait.
BuiltinDerive(BuiltinDeriveFn),
/// A declarative macro, e.g. `macro m() {}`.
///
/// The second element is the definition site span.
- DeclMacro(Box<TTMacroExpander>, Option<(ast::NodeId, Span)>),
+ DeclMacro(Box<TTMacroExpander + sync::Sync + sync::Send>, Option<(ast::NodeId, Span)>),
}
impl SyntaxExtension {
ast::Ident::from_str(st)
}
pub fn std_path(&self, components: &[&str]) -> Vec<ast::Ident> {
- let def_site = SyntaxContext::empty().apply_mark(self.current_expansion.mark);
- iter::once(Ident { ctxt: def_site, ..keywords::DollarCrate.ident() })
+ let def_site = DUMMY_SP.apply_mark(self.current_expansion.mark);
+ iter::once(Ident::new(keywords::DollarCrate.name(), def_site))
.chain(components.iter().map(|s| self.ident_of(s)))
.collect()
}
-> Option<Spanned<(Symbol, ast::StrStyle)>> {
// Update `expr.span`'s ctxt now in case expr is an `include!` macro invocation.
let expr = expr.map(|mut expr| {
- expr.span = expr.span.with_ctxt(expr.span.ctxt().apply_mark(cx.current_expansion.mark));
+ expr.span = expr.span.apply_mark(cx.current_expansion.mark);
expr
});
fn qpath(&self, self_type: P<ast::Ty>,
trait_path: ast::Path,
- ident: ast::SpannedIdent)
+ ident: ast::Ident)
-> (ast::QSelf, ast::Path);
fn qpath_all(&self, self_type: P<ast::Ty>,
trait_path: ast::Path,
- ident: ast::SpannedIdent,
+ ident: ast::Ident,
lifetimes: Vec<ast::Lifetime>,
types: Vec<P<ast::Ty>>,
bindings: Vec<ast::TypeBinding>)
types: Vec<P<ast::Ty>>,
bindings: Vec<ast::TypeBinding> )
-> ast::Path {
- let last_identifier = idents.pop().unwrap();
+ let last_ident = idents.pop().unwrap();
let mut segments: Vec<ast::PathSegment> = Vec::new();
- segments.extend(idents.into_iter().map(|i| ast::PathSegment::from_ident(i, span)));
+ segments.extend(idents.into_iter().map(|ident| {
+ ast::PathSegment::from_ident(ident.with_span_pos(span))
+ }));
let parameters = if !lifetimes.is_empty() || !types.is_empty() || !bindings.is_empty() {
ast::AngleBracketedParameterData { lifetimes, types, bindings, span }.into()
} else {
None
};
- segments.push(ast::PathSegment { identifier: last_identifier, span, parameters });
+ segments.push(ast::PathSegment { ident: last_ident.with_span_pos(span), parameters });
let mut path = ast::Path { span, segments };
if global {
if let Some(seg) = path.make_root() {
fn qpath(&self,
self_type: P<ast::Ty>,
trait_path: ast::Path,
- ident: ast::SpannedIdent)
+ ident: ast::Ident)
-> (ast::QSelf, ast::Path) {
self.qpath_all(self_type, trait_path, ident, vec![], vec![], vec![])
}
fn qpath_all(&self,
self_type: P<ast::Ty>,
trait_path: ast::Path,
- ident: ast::SpannedIdent,
+ ident: ast::Ident,
lifetimes: Vec<ast::Lifetime>,
types: Vec<P<ast::Ty>>,
bindings: Vec<ast::TypeBinding>)
} else {
None
};
- path.segments.push(ast::PathSegment {
- identifier: ident.node,
- span: ident.span,
- parameters,
- });
+ path.segments.push(ast::PathSegment { ident, parameters });
(ast::QSelf {
ty: self_type,
fn typaram(&self,
span: Span,
- id: ast::Ident,
+ ident: ast::Ident,
attrs: Vec<ast::Attribute>,
bounds: ast::TyParamBounds,
default: Option<P<ast::Ty>>) -> ast::TyParam {
ast::TyParam {
- ident: id,
+ ident: ident.with_span_pos(span),
id: ast::DUMMY_NODE_ID,
attrs: attrs.into(),
bounds,
default,
- span,
}
}
}
fn lifetime(&self, span: Span, ident: ast::Ident) -> ast::Lifetime {
- ast::Lifetime { id: ast::DUMMY_NODE_ID, span: span, ident: ident }
+ ast::Lifetime { id: ast::DUMMY_NODE_ID, ident: ident.with_span_pos(span) }
}
fn lifetime_def(&self,
}
fn expr_field_access(&self, sp: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> {
- let id = Spanned { node: ident, span: sp };
- self.expr(sp, ast::ExprKind::Field(expr, id))
+ self.expr(sp, ast::ExprKind::Field(expr, ident.with_span_pos(sp)))
}
fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: usize) -> P<ast::Expr> {
let id = Spanned { node: idx, span: sp };
ident: ast::Ident,
mut args: Vec<P<ast::Expr>> ) -> P<ast::Expr> {
args.insert(0, expr);
- self.expr(span, ast::ExprKind::MethodCall(ast::PathSegment::from_ident(ident, span), args))
+ let segment = ast::PathSegment::from_ident(ident.with_span_pos(span));
+ self.expr(span, ast::ExprKind::MethodCall(segment, args))
}
fn expr_block(&self, b: P<ast::Block>) -> P<ast::Expr> {
self.expr(b.span, ast::ExprKind::Block(b))
}
- fn field_imm(&self, span: Span, name: Ident, e: P<ast::Expr>) -> ast::Field {
+ fn field_imm(&self, span: Span, ident: Ident, e: P<ast::Expr>) -> ast::Field {
ast::Field {
- ident: respan(span, name),
+ ident: ident.with_span_pos(span),
expr: e,
span,
is_shorthand: false,
span: Span,
ident: ast::Ident,
bm: ast::BindingMode) -> P<ast::Pat> {
- let pat = PatKind::Ident(bm, Spanned{span: span, node: ident}, None);
+ let pat = PatKind::Ident(bm, ident.with_span_pos(span), None);
self.pat(span, pat)
}
fn pat_path(&self, span: Span, path: ast::Path) -> P<ast::Pat> {
body)
}
- fn variant(&self, span: Span, name: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant {
+ fn variant(&self, span: Span, ident: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant {
let fields: Vec<_> = tys.into_iter().map(|ty| {
ast::StructField {
span: ty.span,
respan(span,
ast::Variant_ {
- name,
+ ident,
attrs: Vec::new(),
data: vdata,
disr_expr: None,
}
fn meta_word(&self, sp: Span, w: ast::Name) -> ast::MetaItem {
- attr::mk_spanned_word_item(sp, w)
+ attr::mk_word_item(Ident::with_empty_ctxt(w).with_span_pos(sp))
}
fn meta_list_item_word(&self, sp: Span, w: ast::Name) -> ast::NestedMetaItem {
- respan(sp, ast::NestedMetaItemKind::MetaItem(attr::mk_spanned_word_item(sp, w)))
+ attr::mk_nested_word_item(Ident::with_empty_ctxt(w).with_span_pos(sp))
}
fn meta_list(&self, sp: Span, name: ast::Name, mis: Vec<ast::NestedMetaItem>)
-> ast::MetaItem {
- attr::mk_spanned_list_item(sp, name, mis)
+ attr::mk_list_item(sp, Ident::with_empty_ctxt(name).with_span_pos(sp), mis)
}
fn meta_name_value(&self, sp: Span, name: ast::Name, value: ast::LitKind)
-> ast::MetaItem {
- attr::mk_spanned_name_value_item(sp, name, respan(sp, value))
+ attr::mk_name_value_item(sp, Ident::with_empty_ctxt(name).with_span_pos(sp),
+ respan(sp, value))
}
fn item_use(&self, sp: Span,
pretty_name.push_str(", ");
}
pretty_name.push_str(&path.to_string());
- names.insert(unwrap_or!(path.segments.get(0), continue).identifier.name);
+ names.insert(unwrap_or!(path.segments.get(0), continue).ident.name);
}
pretty_name.push(')');
"trait item", .make_trait_items, lift .fold_trait_item, lift .visit_trait_item;
ImplItems: SmallVector<ast::ImplItem> [SmallVector, ast::ImplItem],
"impl item", .make_impl_items, lift .fold_impl_item, lift .visit_impl_item;
+ ForeignItems: SmallVector<ast::ForeignItem> [SmallVector, ast::ForeignItem],
+ "foreign item", .make_foreign_items, lift .fold_foreign_item, lift .visit_foreign_item;
}
impl ExpansionKind {
Expansion::ImplItems(items.map(Annotatable::expect_impl_item).collect()),
ExpansionKind::TraitItems =>
Expansion::TraitItems(items.map(Annotatable::expect_trait_item).collect()),
+ ExpansionKind::ForeignItems =>
+ Expansion::ForeignItems(items.map(Annotatable::expect_foreign_item).collect()),
_ => unreachable!(),
}
}
path_str.push_str("::");
}
- if segment.identifier.name != keywords::CrateRoot.name() &&
- segment.identifier.name != keywords::DollarCrate.name()
+ if segment.ident.name != keywords::CrateRoot.name() &&
+ segment.ident.name != keywords::DollarCrate.name()
{
- path_str.push_str(&segment.identifier.name.as_str())
+ path_str.push_str(&segment.ident.name.as_str())
}
}
Annotatable::ImplItem(item) => {
Annotatable::ImplItem(item.map(|item| cfg.fold_impl_item(item).pop().unwrap()))
}
+ Annotatable::ForeignItem(item) => {
+ Annotatable::ForeignItem(
+ item.map(|item| cfg.fold_foreign_item(item).pop().unwrap())
+ )
+ }
Annotatable::Stmt(stmt) => {
Annotatable::Stmt(stmt.map(|stmt| cfg.fold_stmt(stmt).pop().unwrap()))
}
Annotatable::Item(item) => token::NtItem(item),
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
+ Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
Annotatable::Expr(expr) => token::NtExpr(expr),
})).into();
opt_expanded
} else {
let msg = format!("non-{kind} macro in {kind} position: {name}",
- name = path.segments[0].identifier.name, kind = kind.name());
+ name = path.segments[0].ident.name, kind = kind.name());
self.cx.span_err(path.span, &msg);
self.cx.trace_macros_diag();
kind.dummy(span)
invoc.expansion_data.mark.set_expn_info(expn_info);
let span = span.with_ctxt(self.cx.backtrace());
let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this
- name: keywords::Invalid.name(),
+ ident: keywords::Invalid.ident(),
span: DUMMY_SP,
node: ast::MetaItemKind::Word,
};
}
Expansion::ImplItems(items)
}
+ ExpansionKind::ForeignItems => {
+ let mut items = SmallVector::new();
+ while self.token != token::Eof {
+ if let Some(item) = self.parse_foreign_item()? {
+ items.push(item);
+ }
+ }
+ Expansion::ForeignItems(items)
+ }
ExpansionKind::Stmts => {
let mut stmts = SmallVector::new();
while self.token != token::Eof &&
noop_fold_foreign_mod(self.cfg.configure_foreign_mod(foreign_mod), self)
}
+ fn fold_foreign_item(&mut self,
+ foreign_item: ast::ForeignItem) -> SmallVector<ast::ForeignItem> {
+ let (attr, traits, foreign_item) = self.classify_item(foreign_item);
+
+ let explain = if self.cx.ecfg.proc_macro_enabled() {
+ feature_gate::EXPLAIN_PROC_MACROS_IN_EXTERN
+ } else {
+ feature_gate::EXPLAIN_MACROS_IN_EXTERN
+ };
+
+ if attr.is_some() || !traits.is_empty() {
+ if !self.cx.ecfg.macros_in_extern_enabled() {
+ if let Some(ref attr) = attr {
+ emit_feature_err(&self.cx.parse_sess, "macros_in_extern", attr.span,
+ GateIssue::Language, explain);
+ }
+ }
+
+ let item = Annotatable::ForeignItem(P(foreign_item));
+ return self.collect_attr(attr, traits, item, ExpansionKind::ForeignItems)
+ .make_foreign_items();
+ }
+
+ if let ast::ForeignItemKind::Macro(mac) = foreign_item.node {
+ self.check_attributes(&foreign_item.attrs);
+
+ if !self.cx.ecfg.macros_in_extern_enabled() {
+ emit_feature_err(&self.cx.parse_sess, "macros_in_extern", foreign_item.span,
+ GateIssue::Language, explain);
+ }
+
+ return self.collect_bang(mac, foreign_item.span, ExpansionKind::ForeignItems)
+ .make_foreign_items();
+ }
+
+ noop_fold_foreign_item(foreign_item, self)
+ }
+
fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind {
match item {
ast::ItemKind::MacroDef(..) => item,
let include_info = vec![
dummy_spanned(ast::NestedMetaItemKind::MetaItem(
- attr::mk_name_value_item_str("file".into(),
- file))),
+ attr::mk_name_value_item_str(Ident::from_str("file"),
+ dummy_spanned(file)))),
dummy_spanned(ast::NestedMetaItemKind::MetaItem(
- attr::mk_name_value_item_str("contents".into(),
- (&*src).into()))),
+ attr::mk_name_value_item_str(Ident::from_str("contents"),
+ dummy_spanned(Symbol::intern(&src))))),
];
- items.push(dummy_spanned(ast::NestedMetaItemKind::MetaItem(
- attr::mk_list_item("include".into(), include_info))));
+ let include_ident = Ident::from_str("include");
+ let item = attr::mk_list_item(DUMMY_SP, include_ident, include_info);
+ items.push(dummy_spanned(ast::NestedMetaItemKind::MetaItem(item)));
}
Err(_) => {
self.cx.span_err(at.span,
}
}
- let meta = attr::mk_list_item("doc".into(), items);
+ let meta = attr::mk_list_item(DUMMY_SP, Ident::from_str("doc"), items);
match at.style {
ast::AttrStyle::Inner =>
Some(attr::mk_spanned_attr_inner(at.span, at.id, meta)),
fn enable_allow_internal_unstable = allow_internal_unstable,
fn enable_custom_derive = custom_derive,
fn proc_macro_enabled = proc_macro,
+ fn macros_in_extern_enabled = macros_in_extern,
}
}
impl Folder for Marker {
fn fold_ident(&mut self, mut ident: Ident) -> Ident {
- ident.ctxt = ident.ctxt.apply_mark(self.0);
+ ident.span = ident.span.apply_mark(self.0);
ident
}
fn new_span(&mut self, span: Span) -> Span {
- span.with_ctxt(span.ctxt().apply_mark(self.0))
+ span.apply_mark(self.0)
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
defaultness: ast::Defaultness::Final,
tokens: None,
})),
+ ExpansionKind::ForeignItems => Expansion::ForeignItems(SmallVector::one(ast::ForeignItem {
+ id, span, ident, vis, attrs,
+ node: ast::ForeignItemKind::Macro(mac_placeholder()),
+ })),
ExpansionKind::Pat => Expansion::Pat(P(ast::Pat {
id, span, node: ast::PatKind::Mac(mac_placeholder()),
})),
}
}
+ fn fold_foreign_item(&mut self, item: ast::ForeignItem) -> SmallVector<ast::ForeignItem> {
+ match item.node {
+ ast::ForeignItemKind::Macro(_) => self.remove(item.id).make_foreign_items(),
+ _ => noop_fold_foreign_item(item, self),
+ }
+ }
+
fn fold_expr(&mut self, expr: P<ast::Expr>) -> P<ast::Expr> {
match expr.node {
ast::ExprKind::Mac(_) => self.remove(expr.id).make_expr(),
impl ToTokens for ast::Ident {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
- vec![TokenTree::Token(DUMMY_SP, Token::from_ast_ident(*self))]
+ vec![TokenTree::Token(self.span, Token::from_ast_ident(*self))]
}
}
impl ToTokens for ast::Lifetime {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
- vec![TokenTree::Token(DUMMY_SP, token::Lifetime(self.ident))]
+ vec![TokenTree::Token(self.ident.span, token::Lifetime(self.ident))]
}
}
inner.push(TokenTree::Token(self.span, token::Colon).into());
}
inner.push(TokenTree::Token(
- self.span, token::Token::from_ast_ident(segment.identifier)
+ self.span, token::Token::from_ast_ident(segment.ident)
).into());
}
inner.push(self.tokens.clone());
use ast::Ident;
use syntax_pos::{self, BytePos, Span};
-use codemap::respan;
use errors::FatalError;
use ext::tt::quoted::{self, TokenTree};
use parse::{Directory, ParseSess};
fn token_name_eq(t1: &Token, t2: &Token) -> bool {
if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) {
id1.name == id2.name && is_raw1 == is_raw2
- } else if let (&token::Lifetime(id1), &token::Lifetime(id2)) = (t1, t2) {
+ } else if let (Some(id1), Some(id2)) = (t1.lifetime(), t2.lifetime()) {
id1.name == id2.name
} else {
*t1 == *t2
"ty" => token::NtTy(panictry!(p.parse_ty())),
// this could be handled like a token, since it is one
"ident" => if let Some((ident, is_raw)) = get_macro_ident(&p.token) {
+ let span = p.span;
p.bump();
- token::NtIdent(respan(p.prev_span, ident), is_raw)
+ token::NtIdent(Ident::new(ident.name, span), is_raw)
} else {
let token_str = pprust::token_to_string(&p.token);
p.fatal(&format!("expected ident, found {}", &token_str)).emit();
"path" => token::NtPath(panictry!(p.parse_path_common(PathStyle::Type, false))),
"meta" => token::NtMeta(panictry!(p.parse_meta_item())),
"vis" => token::NtVis(panictry!(p.parse_visibility(true))),
- "lifetime" => token::NtLifetime(p.expect_lifetime()),
+ "lifetime" => token::NtLifetime(p.expect_lifetime().ident),
// this is not supposed to happen, since it has been checked
// when compiling the macro.
_ => p.span_bug(sp, "invalid fragment specifier"),
}
// Make sure we don't have any tokens left to parse so we don't silently drop anything.
- let path = ast::Path::from_ident(site_span, macro_ident);
+ let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span));
parser.ensure_complete_parse(&path, kind.name(), site_span);
expansion
}
// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
// metavariable that names the crate of the invokation.
Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
- let (ident, _) = token.ident().unwrap();
+ let (ident, is_raw) = token.ident().unwrap();
let span = ident_span.with_lo(span.lo());
- if ident.name == keywords::Crate.name() {
- let ident = ast::Ident {
- name: keywords::DollarCrate.name(),
- ..ident
- };
- TokenTree::Token(span, token::Ident(ident, false))
+ if ident.name == keywords::Crate.name() && !is_raw {
+ let ident = ast::Ident::new(keywords::DollarCrate.name(), ident.span);
+ TokenTree::Token(span, token::Ident(ident, is_raw))
} else {
TokenTree::MetaVar(span, ident)
}
if let NtTT(ref tt) = **nt {
result.push(tt.clone().into());
} else {
- sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
+ sp = sp.apply_mark(cx.current_expansion.mark);
let token = TokenTree::Token(sp, Token::interpolated((**nt).clone()));
result.push(token.into());
}
}
} else {
let ident =
- Ident { ctxt: ident.ctxt.apply_mark(cx.current_expansion.mark), ..ident };
- sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
+ Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
+ sp = sp.apply_mark(cx.current_expansion.mark);
result.push(TokenTree::Token(sp, token::Dollar).into());
result.push(TokenTree::Token(sp, token::Token::from_ast_ident(ident)).into());
}
}
quoted::TokenTree::Delimited(mut span, delimited) => {
- span = span.with_ctxt(span.ctxt().apply_mark(cx.current_expansion.mark));
+ span = span.apply_mark(cx.current_expansion.mark);
stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
result_stack.push(mem::replace(&mut result, Vec::new()));
}
(active, rustc_attrs, "1.0.0", Some(29642), None),
// Allows the use of non lexical lifetimes; RFC 2094
- (active, nll, "1.0.0", Some(43234), None),
+ (active, nll, "1.0.0", Some(43234), Some(Edition::Edition2018)),
// Allows the use of #[allow_internal_unstable]. This is an
// attribute on macro_rules! and can't use the attribute handling
// rustc internal
(active, compiler_builtins, "1.13.0", None, None),
- // Allows attributes on lifetime/type formal parameters in generics (RFC 1327)
- (active, generic_param_attrs, "1.11.0", Some(34761), None),
-
// Allows #[link(..., cfg(..))]
(active, link_cfg, "1.14.0", Some(37406), None),
(active, dyn_trait, "1.22.0", Some(44662), Some(Edition::Edition2018)),
// `crate` as visibility modifier, synonymous to `pub(crate)`
- (active, crate_visibility_modifier, "1.23.0", Some(45388), None),
+ (active, crate_visibility_modifier, "1.23.0", Some(45388), Some(Edition::Edition2018)),
// extern types
(active, extern_types, "1.23.0", Some(43467), None),
(active, arbitrary_self_types, "1.23.0", Some(44874), None),
// `crate` in paths
- (active, crate_in_paths, "1.23.0", Some(45477), None),
+ (active, crate_in_paths, "1.23.0", Some(45477), Some(Edition::Edition2018)),
// In-band lifetime bindings (e.g. `fn foo(x: &'a u8) -> &'a u8`)
- (active, in_band_lifetimes, "1.23.0", Some(44524), None),
+ (active, in_band_lifetimes, "1.23.0", Some(44524), Some(Edition::Edition2018)),
// generic associated types (RFC 1598)
(active, generic_associated_types, "1.23.0", Some(44265), None),
(active, extern_absolute_paths, "1.24.0", Some(44660), None),
// `foo.rs` as an alternative to `foo/mod.rs`
- (active, non_modrs_mods, "1.24.0", Some(44660), None),
+ (active, non_modrs_mods, "1.24.0", Some(44660), Some(Edition::Edition2018)),
// Termination trait in tests (RFC 1937)
- (active, termination_trait_test, "1.24.0", Some(48854), None),
+ (active, termination_trait_test, "1.24.0", Some(48854), Some(Edition::Edition2018)),
// Allows use of the :lifetime macro fragment specifier
(active, macro_lifetime_matcher, "1.24.0", Some(46895), None),
// Allows keywords to be escaped for use as identifiers
(active, raw_identifiers, "1.26.0", Some(48589), None),
+
+ // Allows macro invocations in `extern {}` blocks
+ (active, macros_in_extern, "1.27.0", Some(49476), None),
);
declare_features! (
// allow empty structs and enum variants with braces
(accepted, braced_empty_structs, "1.8.0", Some(29720), None),
// Allows indexing into constant arrays.
- (accepted, const_indexing, "1.24.0", Some(29947), None),
+ (accepted, const_indexing, "1.26.0", Some(29947), None),
(accepted, default_type_params, "1.0.0", None, None),
(accepted, globs, "1.0.0", None, None),
(accepted, if_let, "1.0.0", None, None),
(accepted, match_default_bindings, "1.26.0", Some(42640), None),
// allow `'_` placeholder lifetimes
(accepted, underscore_lifetimes, "1.26.0", Some(44524), None),
+ // Allows attributes on lifetime/type formal parameters in generics (RFC 1327)
+ (accepted, generic_param_attrs, "1.26.0", Some(48848), None),
);
// If you change this, please modify src/doc/unstable-book as well. You must
impl GatedCfg {
pub fn gate(cfg: &ast::MetaItem) -> Option<GatedCfg> {
- let name = cfg.name().as_str();
+ let name = cfg.ident.name.as_str();
GATED_CFGS.iter()
.position(|info| info.0 == name)
.map(|idx| {
pub const EXPLAIN_MACRO_AT_MOST_ONCE_REP: &'static str =
"Using the `?` macro Kleene operator for \"at most one\" repetition is unstable";
+pub const EXPLAIN_MACROS_IN_EXTERN: &'static str =
+ "Macro invocations in `extern {}` blocks are experimental.";
+
+// mention proc-macros when enabled
+pub const EXPLAIN_PROC_MACROS_IN_EXTERN: &'static str =
+ "Macro and proc-macro invocations in `extern {}` blocks are experimental.";
+
struct PostExpansionVisitor<'a> {
context: &'a Context<'a>,
}
gate_feature_post!(&self, extern_types, i.span,
"extern types are experimental");
}
+ ast::ForeignItemKind::Macro(..) => {}
}
visit::walk_foreign_item(self, i)
fn visit_path(&mut self, path: &'a ast::Path, _id: NodeId) {
for segment in &path.segments {
- if segment.identifier.name == keywords::Crate.name() {
- gate_feature_post!(&self, crate_in_paths, segment.span,
+ // Identifiers we are going to check could come from a legacy macro (e.g. `#[test]`).
+ // For such macros identifiers must have empty context, because this context is
+ // used during name resolution and produced names must be unhygienic for compatibility.
+ // On the other hand, we need the actual non-empty context for feature gate checking
+ // because it's hygienic even for legacy macros. As previously stated, such context
+ // cannot be kept in identifiers, so it's kept in paths instead and we take it from
+ // there while keeping location info from the ident span.
+ let span = segment.ident.span.with_ctxt(path.span.ctxt());
+ if segment.ident.name == keywords::Crate.name() {
+ gate_feature_post!(&self, crate_in_paths, span,
"`crate` in paths is experimental");
- } else if segment.identifier.name == keywords::Extern.name() {
- gate_feature_post!(&self, extern_in_paths, segment.span,
+ } else if segment.ident.name == keywords::Extern.name() {
+ gate_feature_post!(&self, extern_in_paths, span,
"`extern` in paths is experimental");
}
}
}
visit::walk_vis(self, vis);
}
-
- fn visit_generic_param(&mut self, param: &'a ast::GenericParam) {
- let (attrs, explain) = match *param {
- ast::GenericParam::Lifetime(ref ld) =>
- (&ld.attrs, "attributes on lifetime bindings are experimental"),
- ast::GenericParam::Type(ref t) =>
- (&t.attrs, "attributes on type parameter bindings are experimental"),
- };
-
- if !attrs.is_empty() {
- gate_feature_post!(&self, generic_param_attrs, attrs[0].span, explain);
- }
-
- visit::walk_generic_param(self, param)
- }
}
pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute],
for mi in list {
let name = if let Some(word) = mi.word() {
- word.name()
+ word.ident.name
} else {
span_err!(span_handler, mi.span, E0556,
"malformed feature, expected just one word");
noop_fold_use_tree(use_tree, self)
}
- fn fold_foreign_item(&mut self, ni: ForeignItem) -> ForeignItem {
+ fn fold_foreign_item(&mut self, ni: ForeignItem) -> SmallVector<ForeignItem> {
noop_fold_foreign_item(ni, self)
}
+ fn fold_foreign_item_simple(&mut self, ni: ForeignItem) -> ForeignItem {
+ noop_fold_foreign_item_simple(ni, self)
+ }
+
fn fold_item(&mut self, i: P<Item>) -> SmallVector<P<Item>> {
noop_fold_item(i, self)
}
fld: &mut T) -> ForeignMod {
ForeignMod {
abi,
- items: items.move_map(|x| fld.fold_foreign_item(x)),
+ items: items.move_flat_map(|x| fld.fold_foreign_item(x)),
}
}
pub fn noop_fold_variant<T: Folder>(v: Variant, fld: &mut T) -> Variant {
Spanned {
node: Variant_ {
- name: fld.fold_ident(v.node.name),
+ ident: fld.fold_ident(v.node.ident),
attrs: fold_attrs(v.node.attrs, fld),
data: fld.fold_variant_data(v.node.data),
disr_expr: v.node.disr_expr.map(|e| fld.fold_expr(e)),
}
}
-pub fn noop_fold_ident<T: Folder>(i: Ident, _: &mut T) -> Ident {
- i
+pub fn noop_fold_ident<T: Folder>(ident: Ident, fld: &mut T) -> Ident {
+ Ident::new(ident.name, fld.new_span(ident.span))
}
pub fn noop_fold_usize<T: Folder>(i: usize, _: &mut T) -> usize {
pub fn noop_fold_path<T: Folder>(Path { segments, span }: Path, fld: &mut T) -> Path {
Path {
- segments: segments.move_map(|PathSegment {identifier, span, parameters}| PathSegment {
- identifier: fld.fold_ident(identifier),
- span: fld.new_span(span),
+ segments: segments.move_map(|PathSegment {ident, parameters}| PathSegment {
+ ident: fld.fold_ident(ident),
parameters: parameters.map(|ps| ps.map(|ps| fld.fold_path_parameters(ps))),
}),
span: fld.new_span(span)
pub fn noop_fold_meta_item<T: Folder>(mi: MetaItem, fld: &mut T) -> MetaItem {
MetaItem {
- name: mi.name,
+ ident: mi.ident,
node: match mi.node {
MetaItemKind::Word => MetaItemKind::Word,
MetaItemKind::List(mis) => {
token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)),
token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)),
token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)),
- token::NtIdent(id, is_raw) =>
- token::NtIdent(Spanned::<Ident>{node: fld.fold_ident(id.node), ..id}, is_raw),
+ token::NtIdent(ident, is_raw) => token::NtIdent(fld.fold_ident(ident), is_raw),
+ token::NtLifetime(ident) => token::NtLifetime(fld.fold_ident(ident)),
token::NtMeta(meta) => token::NtMeta(fld.fold_meta_item(meta)),
token::NtPath(path) => token::NtPath(fld.fold_path(path)),
token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)),
token::NtWhereClause(fld.fold_where_clause(where_clause)),
token::NtArg(arg) => token::NtArg(fld.fold_arg(arg)),
token::NtVis(vis) => token::NtVis(fld.fold_vis(vis)),
- token::NtLifetime(lifetime) => token::NtLifetime(fld.fold_lifetime(lifetime)),
+ token::NtForeignItem(ni) =>
+ token::NtForeignItem(fld.fold_foreign_item(ni)
+ // see reasoning above
+ .expect_one("expected fold to produce exactly one item")),
}
}
}
pub fn noop_fold_ty_param<T: Folder>(tp: TyParam, fld: &mut T) -> TyParam {
- let TyParam {attrs, id, ident, bounds, default, span} = tp;
+ let TyParam {attrs, id, ident, bounds, default} = tp;
let attrs: Vec<_> = attrs.into();
TyParam {
attrs: attrs.into_iter()
ident: fld.fold_ident(ident),
bounds: fld.fold_bounds(bounds),
default: default.map(|x| fld.fold_ty(x)),
- span: fld.new_span(span),
}
}
pub fn noop_fold_label<T: Folder>(label: Label, fld: &mut T) -> Label {
Label {
ident: fld.fold_ident(label.ident),
- span: fld.new_span(label.span),
}
}
Lifetime {
id: fld.new_id(l.id),
ident: fld.fold_ident(l.ident),
- span: fld.new_span(l.span)
}
}
pub fn noop_fold_field<T: Folder>(f: Field, folder: &mut T) -> Field {
Field {
- ident: respan(f.ident.span, folder.fold_ident(f.ident.node)),
+ ident: folder.fold_ident(f.ident),
expr: folder.fold_expr(f.expr),
span: folder.new_span(f.span),
is_shorthand: f.is_shorthand,
}
}
-pub fn noop_fold_foreign_item<T: Folder>(ni: ForeignItem, folder: &mut T) -> ForeignItem {
+pub fn noop_fold_foreign_item<T: Folder>(ni: ForeignItem, folder: &mut T)
+-> SmallVector<ForeignItem> {
+ SmallVector::one(folder.fold_foreign_item_simple(ni))
+}
+
+pub fn noop_fold_foreign_item_simple<T: Folder>(ni: ForeignItem, folder: &mut T) -> ForeignItem {
ForeignItem {
id: folder.new_id(ni.id),
vis: folder.fold_vis(ni.vis),
ForeignItemKind::Static(folder.fold_ty(t), m)
}
ForeignItemKind::Ty => ForeignItemKind::Ty,
+ ForeignItemKind::Macro(mac) => ForeignItemKind::Macro(folder.fold_mac(mac)),
},
span: folder.new_span(ni.span)
}
id: folder.new_id(id),
node: match node {
PatKind::Wild => PatKind::Wild,
- PatKind::Ident(binding_mode, pth1, sub) => {
+ PatKind::Ident(binding_mode, ident, sub) => {
PatKind::Ident(binding_mode,
- Spanned{span: folder.new_span(pth1.span),
- node: folder.fold_ident(pth1.node)},
- sub.map(|x| folder.fold_pat(x)))
+ folder.fold_ident(ident),
+ sub.map(|x| folder.fold_pat(x)))
}
PatKind::Lit(e) => PatKind::Lit(folder.fold_expr(e)),
PatKind::TupleStruct(pth, pats, ddpos) => {
ExprKind::MethodCall(seg, args) => {
ExprKind::MethodCall(
PathSegment {
- identifier: folder.fold_ident(seg.identifier),
- span: folder.new_span(seg.span),
+ ident: folder.fold_ident(seg.ident),
parameters: seg.parameters.map(|ps| {
ps.map(|ps| folder.fold_path_parameters(ps))
}),
folder.fold_expr(er))
}
ExprKind::Field(el, ident) => {
- ExprKind::Field(folder.fold_expr(el),
- respan(folder.new_span(ident.span),
- folder.fold_ident(ident.node)))
+ ExprKind::Field(folder.fold_expr(el), folder.fold_ident(ident))
}
- ExprKind::TupField(el, ident) => {
+ ExprKind::TupField(el, index) => {
ExprKind::TupField(folder.fold_expr(el),
- respan(folder.new_span(ident.span),
- folder.fold_usize(ident.node)))
+ respan(folder.new_span(index.span),
+ folder.fold_usize(index.node)))
}
ExprKind::Index(el, er) => {
ExprKind::Index(folder.fold_expr(el), folder.fold_expr(er))
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/",
test(attr(deny(warnings))))]
-#![deny(warnings)]
#![feature(unicode)]
#![feature(rustc_diagnostic_macros)]
-#![cfg_attr(stage0, feature(match_default_bindings))]
#![feature(non_exhaustive)]
-#![cfg_attr(stage0, feature(i128_type))]
#![feature(const_atomic_usize_new)]
#![feature(rustc_attrs)]
+#![recursion_limit="256"]
+
// See librustc_cratesio_shim/Cargo.toml for a comment explaining this.
#[allow(unused_extern_crates)]
extern crate rustc_cratesio_shim;
};
Ok(if let Some(meta) = meta {
self.bump();
- (ast::Path::from_ident(meta.span, ast::Ident::with_empty_ctxt(meta.name)),
- meta.node.tokens(meta.span))
+ (ast::Path::from_ident(meta.ident), meta.node.tokens(meta.span))
} else {
(self.parse_path(PathStyle::Mod)?, self.parse_tokens())
})
let lo = self.span;
let ident = self.parse_ident()?;
let node = self.parse_meta_item_kind()?;
- Ok(ast::MetaItem { name: ident.name, node: node, span: lo.to(self.prev_span) })
+ Ok(ast::MetaItem { ident, node: node, span: lo.to(self.prev_span) })
}
pub fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
fn mk_ident(&self, string: &str) -> Ident {
let mut ident = Ident::from_str(string);
if let Some(span) = self.override_span {
- ident.ctxt = span.ctxt();
+ ident.span = span;
}
ident
}
use errors;
use feature_gate::UnstableFeatures;
use parse::token;
- use std::cell::RefCell;
use std::collections::HashSet;
use std::io;
use std::path::PathBuf;
span_diagnostic: errors::Handler::with_emitter(true, false, Box::new(emitter)),
unstable_features: UnstableFeatures::from_environment(),
config: CrateConfig::new(),
- included_mod_stack: RefCell::new(Vec::new()),
+ included_mod_stack: Lock::new(Vec::new()),
code_map: cm,
- missing_fragment_specifiers: RefCell::new(HashSet::new()),
- raw_identifier_spans: RefCell::new(Vec::new()),
+ missing_fragment_specifiers: Lock::new(HashSet::new()),
+ raw_identifier_spans: Lock::new(Vec::new()),
registered_diagnostics: Lock::new(ErrorMap::new()),
- non_modrs_mods: RefCell::new(vec![]),
+ non_modrs_mods: Lock::new(vec![]),
}
}
use tokenstream::{TokenStream, TokenTree};
use diagnostics::plugin::ErrorMap;
-use std::cell::RefCell;
use std::collections::HashSet;
use std::iter;
use std::path::{Path, PathBuf};
pub span_diagnostic: Handler,
pub unstable_features: UnstableFeatures,
pub config: CrateConfig,
- pub missing_fragment_specifiers: RefCell<HashSet<Span>>,
+ pub missing_fragment_specifiers: Lock<HashSet<Span>>,
/// Places where raw identifiers were used. This is used for feature gating
/// raw identifiers
- pub raw_identifier_spans: RefCell<Vec<Span>>,
+ pub raw_identifier_spans: Lock<Vec<Span>>,
/// The registered diagnostics codes
pub registered_diagnostics: Lock<ErrorMap>,
// Spans where a `mod foo;` statement was included in a non-mod.rs file.
// These are used to issue errors if the non_modrs_mods feature is not enabled.
- pub non_modrs_mods: RefCell<Vec<(ast::Ident, Span)>>,
+ pub non_modrs_mods: Lock<Vec<(ast::Ident, Span)>>,
/// Used to determine and report recursive mod inclusions
- included_mod_stack: RefCell<Vec<PathBuf>>,
+ included_mod_stack: Lock<Vec<PathBuf>>,
code_map: Lrc<CodeMap>,
}
span_diagnostic: handler,
unstable_features: UnstableFeatures::from_environment(),
config: HashSet::new(),
- missing_fragment_specifiers: RefCell::new(HashSet::new()),
- raw_identifier_spans: RefCell::new(Vec::new()),
+ missing_fragment_specifiers: Lock::new(HashSet::new()),
+ raw_identifier_spans: Lock::new(Vec::new()),
registered_diagnostics: Lock::new(ErrorMap::new()),
- included_mod_stack: RefCell::new(vec![]),
+ included_mod_stack: Lock::new(vec![]),
code_map,
- non_modrs_mods: RefCell::new(vec![]),
+ non_modrs_mods: Lock::new(vec![]),
}
}
}
fn str2seg(s: &str, lo: u32, hi: u32) -> ast::PathSegment {
- ast::PathSegment::from_ident(Ident::from_str(s), sp(lo, hi))
+ ast::PathSegment::from_ident(Ident::new(Symbol::intern(s), sp(lo, hi)))
}
#[test] fn path_exprs_1() {
== P(ast::Pat{
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable),
- Spanned{ span:sp(0, 1),
- node: Ident::from_str("b")
- },
- None),
+ Ident::new(Symbol::intern("b"), sp(0, 1)),
+ None),
span: sp(0,1)}));
parser_done(parser);
})
node: PatKind::Ident(
ast::BindingMode::ByValue(
ast::Mutability::Immutable),
- Spanned{
- span: sp(6,7),
- node: Ident::from_str("b")},
+ Ident::new(Symbol::intern("b"), sp(6, 7)),
None
),
span: sp(6,7)
use ast::{Label, Lifetime, LifetimeDef, Lit, LitKind, UintTy};
use ast::Local;
use ast::MacStmtStyle;
-use ast::Mac_;
+use ast::{Mac, Mac_};
use ast::{MutTy, Mutability};
use ast::{Pat, PatKind, PathSegment};
use ast::{PolyTraitRef, QSelf};
/// Create a placeholder argument.
fn dummy_arg(span: Span) -> Arg {
- let spanned = Spanned {
- span,
- node: keywords::Invalid.ident()
- };
+ let ident = Ident::new(keywords::Invalid.name(), span);
let pat = P(Pat {
id: ast::DUMMY_NODE_ID,
- node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), spanned, None),
+ node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None),
span,
});
let ty = Ty {
fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, ast::Ident> {
match self.token {
- token::Ident(i, _) => {
+ token::Ident(ident, _) => {
if self.token.is_reserved_ident() {
let mut err = self.expected_ident_found();
if recover {
return Err(err);
}
}
+ let span = self.span;
self.bump();
- Ok(i)
+ Ok(Ident::new(ident.name, span))
}
_ => {
Err(if self.prev_token_kind == PrevTokenKind::DocComment {
self.check_keyword(keywords::Extern) && self.is_extern_non_path()
}
- fn eat_label(&mut self) -> Option<Label> {
- let ident = match self.token {
- token::Lifetime(ref ident) => *ident,
- token::Interpolated(ref nt) => match nt.0 {
- token::NtLifetime(lifetime) => lifetime.ident,
- _ => return None,
- },
- _ => return None,
- };
- self.bump();
- Some(Label { ident, span: self.prev_span })
- }
-
/// parse a TyKind::BareFn type:
pub fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>)
-> PResult<'a, TyKind> {
None
};
(ident, TraitItemKind::Const(ty, default), ast::Generics::default())
- } else if self.token.is_path_start() && !self.is_extern_non_path() {
+ } else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? {
// trait item macro.
- // code copied from parse_macro_use_or_failure... abstraction!
- let prev_span = self.prev_span;
- let lo = self.span;
- let pth = self.parse_path(PathStyle::Mod)?;
-
- if pth.segments.len() == 1 {
- if !self.eat(&token::Not) {
- return Err(self.missing_assoc_item_kind_err("trait", prev_span));
- }
- } else {
- self.expect(&token::Not)?;
- }
-
- // eat a matched-delimiter token tree:
- let (delim, tts) = self.expect_delimited_token_tree()?;
- if delim != token::Brace {
- self.expect(&token::Semi)?
- }
-
- let mac = respan(lo.to(self.prev_span), Mac_ { path: pth, tts: tts });
(keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac), ast::Generics::default())
} else {
let (constness, unsafety, abi) = self.parse_fn_front_matter()?;
pat
} else {
debug!("parse_arg_general ident_to_pat");
- let sp = self.prev_span;
- let spanned = Spanned { span: sp, node: keywords::Invalid.ident() };
+ let ident = Ident::new(keywords::Invalid.name(), self.prev_span);
P(Pat {
id: ast::DUMMY_NODE_ID,
- node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable),
- spanned, None),
- span: sp
+ node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None),
+ span: ident.span,
})
};
pub fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> {
match self.token {
- token::Ident(sid, _) if self.token.is_path_segment_keyword() => {
+ token::Ident(ident, _) if self.token.is_path_segment_keyword() => {
+ let span = self.span;
self.bump();
- Ok(sid)
+ Ok(Ident::new(ident.name, span))
}
_ => self.parse_ident(),
- }
- }
+ }
+ }
/// Parses qualified path.
/// Assumes that the leading `<` has been parsed already.
let meta_ident = match self.token {
token::Interpolated(ref nt) => match nt.0 {
token::NtMeta(ref meta) => match meta.node {
- ast::MetaItemKind::Word => Some(ast::Ident::with_empty_ctxt(meta.name)),
+ ast::MetaItemKind::Word => Some(meta.ident),
_ => None,
},
_ => None,
};
if let Some(ident) = meta_ident {
self.bump();
- return Ok(ast::Path::from_ident(self.prev_span, ident));
+ return Ok(ast::Path::from_ident(ident));
}
self.parse_path(style)
}
fn parse_path_segment(&mut self, style: PathStyle, enable_warning: bool)
-> PResult<'a, PathSegment> {
- let ident_span = self.span;
let ident = self.parse_path_segment_ident()?;
let is_args_start = |token: &token::Token| match *token {
ParenthesizedParameterData { inputs, output, span }.into()
};
- PathSegment { identifier: ident, span: ident_span, parameters }
+ PathSegment { ident, parameters }
} else {
// Generic arguments are not found.
- PathSegment::from_ident(ident, ident_span)
+ PathSegment::from_ident(ident)
})
}
/// Parse single lifetime 'a or panic.
pub fn expect_lifetime(&mut self) -> Lifetime {
- if let Some(lifetime) = self.token.lifetime(self.span) {
+ if let Some(ident) = self.token.lifetime() {
+ let span = self.span;
self.bump();
- lifetime
+ Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID }
} else {
self.span_bug(self.span, "not a lifetime")
}
}
+ fn eat_label(&mut self) -> Option<Label> {
+ if let Some(ident) = self.token.lifetime() {
+ let span = self.span;
+ self.bump();
+ Some(Label { ident: Ident::new(ident.name, span) })
+ } else {
+ None
+ }
+ }
+
/// Parse mutability (`mut` or nothing).
fn parse_mutability(&mut self) -> Mutability {
if self.eat_keyword(keywords::Mut) {
pub fn parse_field_name(&mut self) -> PResult<'a, Ident> {
if let token::Literal(token::Integer(name), None) = self.token {
self.bump();
- Ok(Ident::with_empty_ctxt(name))
+ Ok(Ident::new(name, self.prev_span))
} else {
self.parse_ident_common(false)
}
pub fn parse_field(&mut self) -> PResult<'a, Field> {
let attrs = self.parse_outer_attributes()?;
let lo = self.span;
- let hi;
// Check if a colon exists one ahead. This means we're parsing a fieldname.
let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
let fieldname = self.parse_field_name()?;
- hi = self.prev_span;
- self.bump();
+ self.bump(); // `:`
(fieldname, self.parse_expr()?, false)
} else {
let fieldname = self.parse_ident_common(false)?;
- hi = self.prev_span;
// Mimic `x: x` for the `x` field shorthand.
- let path = ast::Path::from_ident(lo.to(hi), fieldname);
- (fieldname, self.mk_expr(lo.to(hi), ExprKind::Path(None, path), ThinVec::new()), true)
+ let path = ast::Path::from_ident(fieldname);
+ let expr = self.mk_expr(fieldname.span, ExprKind::Path(None, path), ThinVec::new());
+ (fieldname, expr, true)
};
Ok(ast::Field {
- ident: respan(lo.to(hi), fieldname),
+ ident: fieldname,
span: lo.to(expr.span),
expr,
is_shorthand,
return self.parse_while_expr(None, lo, attrs);
}
if let Some(label) = self.eat_label() {
- let lo = label.span;
+ let lo = label.ident.span;
self.expect(&token::Colon)?;
if self.eat_keyword(keywords::While) {
return self.parse_while_expr(Some(label), lo, attrs)
}
let span = lo.to(self.prev_span);
- let ident = respan(segment.span, segment.identifier);
- self.mk_expr(span, ExprKind::Field(self_arg, ident), ThinVec::new())
+ self.mk_expr(span, ExprKind::Field(self_arg, segment.ident), ThinVec::new())
}
})
}
}
pub fn process_potential_macro_variable(&mut self) {
- let (ident, is_raw) = match self.token {
+ let (token, span) = match self.token {
token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
self.look_ahead(1, |t| t.is_ident()) => {
self.bump();
}
token::Interpolated(ref nt) => {
self.meta_var_span = Some(self.span);
+ // Interpolated identifier and lifetime tokens are replaced with usual identifier
+ // and lifetime tokens, so the former are never encountered during normal parsing.
match nt.0 {
- token::NtIdent(ident, is_raw) => (ident, is_raw),
+ token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span),
+ token::NtLifetime(ident) => (token::Lifetime(ident), ident.span),
_ => return,
}
}
_ => return,
};
- self.token = token::Ident(ident.node, is_raw);
- self.span = ident.span;
+ self.token = token;
+ self.span = span;
}
/// parse a single token tree from the input.
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), ExprKind::Box(e))
}
- _ => return self.parse_dot_or_call_expr(Some(attrs))
+ token::Ident(..) if self.token.is_ident_named("not") => {
+ // `not` is just an ordinary identifier in Rust-the-language,
+ // but as `rustc`-the-compiler, we can issue clever diagnostics
+ // for confused users who really want to say `!`
+ let token_cannot_continue_expr = |t: &token::Token| match *t {
+ // These tokens can start an expression after `!`, but
+ // can't continue an expression after an ident
+ token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
+ token::Literal(..) | token::Pound => true,
+ token::Interpolated(ref nt) => match nt.0 {
+ token::NtIdent(..) | token::NtExpr(..) |
+ token::NtBlock(..) | token::NtPath(..) => true,
+ _ => false,
+ },
+ _ => false
+ };
+ let cannot_continue_expr = self.look_ahead(1, token_cannot_continue_expr);
+ if cannot_continue_expr {
+ self.bump();
+ // Emit the error ...
+ let mut err = self.diagnostic()
+ .struct_span_err(self.span,
+ &format!("unexpected {} after identifier",
+ self.this_token_descr()));
+ // span the `not` plus trailing whitespace to avoid
+ // trailing whitespace after the `!` in our suggestion
+ let to_replace = self.sess.codemap()
+ .span_until_non_whitespace(lo.to(self.span));
+ err.span_suggestion_short(to_replace,
+ "use `!` to perform logical negation",
+ "!".to_owned());
+ err.emit();
+ // —and recover! (just as if we were in the block
+ // for the `token::Not` arm)
+ let e = self.parse_prefix_expr(None);
+ let (span, e) = self.interpolated_or_expr_span(e)?;
+ (lo.to(span), self.mk_unary(UnOp::Not, e))
+ } else {
+ return self.parse_dot_or_call_expr(Some(attrs));
+ }
+ }
+ _ => { return self.parse_dot_or_call_expr(Some(attrs)); }
};
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
}
(false, true) => BindingMode::ByValue(Mutability::Mutable),
(false, false) => BindingMode::ByValue(Mutability::Immutable),
};
- let fieldpath = codemap::Spanned{span:self.prev_span, node:fieldname};
let fieldpat = P(Pat {
id: ast::DUMMY_NODE_ID,
- node: PatKind::Ident(bind_type, fieldpath, None),
+ node: PatKind::Ident(bind_type, fieldname, None),
span: boxed_span.to(hi),
});
fn parse_pat_ident(&mut self,
binding_mode: ast::BindingMode)
-> PResult<'a, PatKind> {
- let ident_span = self.span;
let ident = self.parse_ident()?;
- let name = codemap::Spanned{span: ident_span, node: ident};
let sub = if self.eat(&token::At) {
Some(self.parse_pat()?)
} else {
"expected identifier, found enum pattern"))
}
- Ok(PatKind::Ident(binding_mode, name, sub))
+ Ok(PatKind::Ident(binding_mode, ident, sub))
}
/// Parse a local variable declaration
// Which is valid in other languages, but not Rust.
match self.parse_stmt_without_recovery(false) {
Ok(Some(stmt)) => {
+ if self.look_ahead(1, |t| t == &token::OpenDelim(token::Brace)) {
+ // if the next token is an open brace (e.g., `if a b {`), the place-
+ // inside-a-block suggestion would be more likely wrong than right
+ return Err(e);
+ }
let mut stmt_span = stmt.span;
// expand the span to include the semicolon, if it exists
if self.eat(&token::Semi) {
/// Matches typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?
fn parse_ty_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, TyParam> {
- let span = self.span;
let ident = self.parse_ident()?;
// Parse optional colon and param bounds.
id: ast::DUMMY_NODE_ID,
bounds,
default,
- span,
})
}
/// TraitItemAssocTy = Ident ["<"...">"] [":" [TyParamBounds]] ["where" ...] ["=" Ty]
fn parse_trait_item_assoc_ty(&mut self, preceding_attrs: Vec<Attribute>)
-> PResult<'a, (ast::Generics, TyParam)> {
- let span = self.span;
let ident = self.parse_ident()?;
let mut generics = self.parse_generics()?;
id: ast::DUMMY_NODE_ID,
bounds,
default,
- span,
}))
}
fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
let expect_ident = |this: &mut Self| match this.token {
// Preserve hygienic context.
- token::Ident(ident, _) => {
- let sp = this.span; this.bump(); codemap::respan(sp, ident)
- }
+ token::Ident(ident, _) =>
+ { let span = this.span; this.bump(); Ident::new(ident.name, span) }
_ => unreachable!()
};
let isolated_self = |this: &mut Self, n| {
fn missing_assoc_item_kind_err(&mut self, item_type: &str, prev_span: Span)
-> DiagnosticBuilder<'a>
{
+ let expected_kinds = if item_type == "extern" {
+ "missing `fn`, `type`, or `static`"
+ } else {
+ "missing `fn`, `type`, or `const`"
+ };
+
// Given this code `path(`, it seems like this is not
// setting the visibility of a macro invocation, but rather
// a mistyped method declaration.
let sp = prev_span.between(self.prev_span);
let mut err = self.diagnostic().struct_span_err(
sp,
- &format!("missing `fn`, `type`, or `const` for {}-item declaration",
- item_type));
- err.span_label(sp, "missing `fn`, `type`, or `const`");
+ &format!("{} for {}-item declaration",
+ expected_kinds, item_type));
+ err.span_label(sp, expected_kinds);
err
}
-> PResult<'a, (Ident, Vec<Attribute>, ast::Generics,
ast::ImplItemKind)> {
// code copied from parse_macro_use_or_failure... abstraction!
- if self.token.is_path_start() && !self.is_extern_non_path() {
+ if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
// Method macro.
-
- let prev_span = self.prev_span;
-
- let lo = self.span;
- let pth = self.parse_path(PathStyle::Mod)?;
- if pth.segments.len() == 1 {
- if !self.eat(&token::Not) {
- return Err(self.missing_assoc_item_kind_err("impl", prev_span));
- }
- } else {
- self.expect(&token::Not)?;
- }
-
- self.complain_if_pub_macro(&vis.node, prev_span);
-
- // eat a matched-delimiter token tree:
- *at_end = true;
- let (delim, tts) = self.expect_delimited_token_tree()?;
- if delim != token::Brace {
- self.expect(&token::Semi)?
- }
-
- let mac = respan(lo.to(self.prev_span), Mac_ { path: pth, tts: tts });
Ok((keywords::Invalid.ident(), vec![], ast::Generics::default(),
ast::ImplItemKind::Macro(mac)))
} else {
TyKind::Path(None, path) => path,
_ => {
self.span_err(ty_first.span, "expected a trait, found type");
- ast::Path::from_ident(ty_first.span, keywords::Invalid.ident())
+ ast::Path::from_ident(Ident::new(keywords::Invalid.name(), ty_first.span))
}
};
let trait_ref = TraitRef { path, ref_id: ty_first.id };
let attr = Attribute {
id: attr::mk_attr_id(),
style: ast::AttrStyle::Outer,
- path: ast::Path::from_ident(syntax_pos::DUMMY_SP,
- Ident::from_str("warn_directory_ownership")),
+ path: ast::Path::from_ident(Ident::from_str("warn_directory_ownership")),
tokens: TokenStream::empty(),
is_sugared_doc: false,
span: syntax_pos::DUMMY_SP,
}
let vr = ast::Variant_ {
- name: ident,
+ ident,
attrs: variant_attrs,
data: struct_def,
disr_expr,
}
/// Parse a foreign item.
- fn parse_foreign_item(&mut self) -> PResult<'a, Option<ForeignItem>> {
+ pub fn parse_foreign_item(&mut self) -> PResult<'a, Option<ForeignItem>> {
+ maybe_whole!(self, NtForeignItem, |ni| Some(ni));
+
let attrs = self.parse_outer_attributes()?;
let lo = self.span;
let visibility = self.parse_visibility(false)?;
return Ok(Some(self.parse_item_foreign_type(visibility, lo, attrs)?));
}
- // FIXME #5668: this will occur for a macro invocation:
- match self.parse_macro_use_or_failure(attrs, true, false, lo, visibility)? {
- Some(item) => {
- return Err(self.span_fatal(item.span, "macros cannot expand to foreign items"));
+ match self.parse_assoc_macro_invoc("extern", Some(&visibility), &mut false)? {
+ Some(mac) => {
+ Ok(Some(
+ ForeignItem {
+ ident: keywords::Invalid.ident(),
+ span: lo.to(self.prev_span),
+ id: ast::DUMMY_NODE_ID,
+ attrs,
+ vis: visibility,
+ node: ForeignItemKind::Macro(mac),
+ }
+ ))
+ }
+ None => {
+ if !attrs.is_empty() {
+ self.expected_item_err(&attrs);
+ }
+
+ Ok(None)
}
- None => Ok(None)
}
}
Ok(None)
}
+ /// Parse a macro invocation inside a `trait`, `impl` or `extern` block
+ fn parse_assoc_macro_invoc(&mut self, item_kind: &str, vis: Option<&Visibility>,
+ at_end: &mut bool) -> PResult<'a, Option<Mac>>
+ {
+ if self.token.is_path_start() && !self.is_extern_non_path() {
+ let prev_span = self.prev_span;
+ let lo = self.span;
+ let pth = self.parse_path(PathStyle::Mod)?;
+
+ if pth.segments.len() == 1 {
+ if !self.eat(&token::Not) {
+ return Err(self.missing_assoc_item_kind_err(item_kind, prev_span));
+ }
+ } else {
+ self.expect(&token::Not)?;
+ }
+
+ if let Some(vis) = vis {
+ self.complain_if_pub_macro(&vis.node, prev_span);
+ }
+
+ *at_end = true;
+
+ // eat a matched-delimiter token tree:
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ if delim != token::Brace {
+ self.expect(&token::Semi)?
+ }
+
+ Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts: tts })))
+ } else {
+ Ok(None)
+ }
+ }
+
fn collect_tokens<F, R>(&mut self, f: F) -> PResult<'a, (R, TokenStream)>
where F: FnOnce(&mut Self) -> PResult<'a, R>
{
use tokenstream::{TokenStream, TokenTree};
use tokenstream;
-use std::cell::Cell;
use std::{cmp, fmt};
-use rustc_data_structures::sync::Lrc;
+use rustc_data_structures::sync::{Lrc, Lock};
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
pub enum BinOpToken {
}
}
-fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
+pub(crate) fn ident_can_begin_expr(ident: ast::Ident, is_raw: bool) -> bool {
let ident_token: Token = Ident(ident, is_raw);
!ident_token.is_reserved_ident() ||
}
}
- pub fn ident(&self) -> Option<(ast::Ident, bool)> {
+ /// Returns an identifier if this token is an identifier.
+ pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
match *self {
Ident(ident, is_raw) => Some((ident, is_raw)),
Interpolated(ref nt) => match nt.0 {
- NtIdent(ident, is_raw) => Some((ident.node, is_raw)),
+ NtIdent(ident, is_raw) => Some((ident, is_raw)),
+ _ => None,
+ },
+ _ => None,
+ }
+ }
+ /// Returns a lifetime identifier if this token is a lifetime.
+ pub fn lifetime(&self) -> Option<ast::Ident> {
+ match *self {
+ Lifetime(ident) => Some(ident),
+ Interpolated(ref nt) => match nt.0 {
+ NtLifetime(ident) => Some(ident),
_ => None,
},
_ => None,
}
}
-
/// Returns `true` if the token is an identifier.
pub fn is_ident(&self) -> bool {
self.ident().is_some()
}
+ /// Returns `true` if the token is a lifetime.
+ pub fn is_lifetime(&self) -> bool {
+ self.lifetime().is_some()
+ }
+
+ /// Returns `true` if the token is a identifier whose name is the given
+ /// string slice.
+ pub fn is_ident_named(&self, name: &str) -> bool {
+ match self.ident() {
+ Some((ident, _)) => ident.name.as_str() == name,
+ None => false
+ }
+ }
/// Returns `true` if the token is a documentation comment.
pub fn is_doc_comment(&self) -> bool {
false
}
- /// Returns a lifetime with the span and a dummy id if it is a lifetime,
- /// or the original lifetime if it is an interpolated lifetime, ignoring
- /// the span.
- pub fn lifetime(&self, span: Span) -> Option<ast::Lifetime> {
- match *self {
- Lifetime(ident) =>
- Some(ast::Lifetime { ident: ident, span: span, id: ast::DUMMY_NODE_ID }),
- Interpolated(ref nt) => match nt.0 {
- NtLifetime(lifetime) => Some(lifetime),
- _ => None,
- },
- _ => None,
- }
- }
-
- /// Returns `true` if the token is a lifetime.
- pub fn is_lifetime(&self) -> bool {
- self.lifetime(syntax_pos::DUMMY_SP).is_some()
- }
-
/// Returns `true` if the token is either the `mut` or `const` keyword.
pub fn is_mutability(&self) -> bool {
self.is_keyword(keywords::Mut) ||
}
}
+ /// Returns `true` if the token is either a special identifier or a keyword.
+ pub fn is_reserved_ident(&self) -> bool {
+ match self.ident() {
+ Some((id, false)) => is_reserved_ident(id),
+ _ => false,
+ }
+ }
+
pub fn glue(self, joint: Token) -> Option<Token> {
Some(match self {
Eq => match joint {
}
}
- /// Returns `true` if the token is either a special identifier or a keyword.
- pub fn is_reserved_ident(&self) -> bool {
- match self.ident() {
- Some((id, false)) => is_reserved_ident(id),
- _ => false,
- }
- }
-
pub fn interpolated_to_tokenstream(&self, sess: &ParseSess, span: Span)
-> TokenStream
{
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
}
Nonterminal::NtIdent(ident, is_raw) => {
- let token = Token::Ident(ident.node, is_raw);
+ let token = Token::Ident(ident, is_raw);
tokens = Some(TokenTree::Token(ident.span, token).into());
}
- Nonterminal::NtLifetime(lifetime) => {
- let token = Token::Lifetime(lifetime.ident);
- tokens = Some(TokenTree::Token(lifetime.span, token).into());
+ Nonterminal::NtLifetime(ident) => {
+ let token = Token::Lifetime(ident);
+ tokens = Some(TokenTree::Token(ident.span, token).into());
}
Nonterminal::NtTT(ref tt) => {
tokens = Some(tt.clone().into());
NtPat(P<ast::Pat>),
NtExpr(P<ast::Expr>),
NtTy(P<ast::Ty>),
- NtIdent(ast::SpannedIdent, /* is_raw */ bool),
+ NtIdent(ast::Ident, /* is_raw */ bool),
+ NtLifetime(ast::Ident),
/// Stuff inside brackets for attributes
NtMeta(ast::MetaItem),
NtPath(ast::Path),
NtArm(ast::Arm),
NtImplItem(ast::ImplItem),
NtTraitItem(ast::TraitItem),
+ NtForeignItem(ast::ForeignItem),
NtGenerics(ast::Generics),
NtWhereClause(ast::WhereClause),
NtArg(ast::Arg),
- NtLifetime(ast::Lifetime),
}
impl fmt::Debug for Nonterminal {
NtArm(..) => f.pad("NtArm(..)"),
NtImplItem(..) => f.pad("NtImplItem(..)"),
NtTraitItem(..) => f.pad("NtTraitItem(..)"),
+ NtForeignItem(..) => f.pad("NtForeignItem(..)"),
NtGenerics(..) => f.pad("NtGenerics(..)"),
NtWhereClause(..) => f.pad("NtWhereClause(..)"),
NtArg(..) => f.pad("NtArg(..)"),
}
}
-pub struct LazyTokenStream(Cell<Option<TokenStream>>);
-
-impl Clone for LazyTokenStream {
- fn clone(&self) -> Self {
- let opt_stream = self.0.take();
- self.0.set(opt_stream.clone());
- LazyTokenStream(Cell::new(opt_stream))
- }
-}
+#[derive(Clone)]
+pub struct LazyTokenStream(Lock<Option<TokenStream>>);
impl cmp::Eq for LazyTokenStream {}
impl PartialEq for LazyTokenStream {
impl LazyTokenStream {
pub fn new() -> Self {
- LazyTokenStream(Cell::new(None))
+ LazyTokenStream(Lock::new(None))
}
pub fn force<F: FnOnce() -> TokenStream>(&self, f: F) -> TokenStream {
- let mut opt_stream = self.0.take();
+ let mut opt_stream = self.0.lock();
if opt_stream.is_none() {
- opt_stream = Some(f());
+ *opt_stream = Some(f());
}
- self.0.set(opt_stream.clone());
opt_stream.clone().unwrap()
}
}
use print::pp::Breaks::{Consistent, Inconsistent};
use ptr::P;
use std_inject;
-use symbol::{Symbol, keywords};
+use symbol::keywords;
use syntax_pos::{DUMMY_SP, FileName};
use tokenstream::{self, TokenStream, TokenTree};
is_expanded: bool) -> io::Result<()> {
let mut s = State::new_from_input(cm, sess, filename, input, out, ann, is_expanded);
- if is_expanded && !std_inject::injected_crate_name().is_none() {
+ if is_expanded && std_inject::injected_crate_name().is_some() {
// We need to print `#![no_std]` (and its feature gate) so that
// compiling pretty-printed source won't inject libstd again.
// However we don't want these attributes in the AST because
// of the feature gate, so we fake them up here.
// #![feature(prelude_import)]
- let prelude_import_meta = attr::mk_list_word_item(Symbol::intern("prelude_import"));
- let list = attr::mk_list_item(Symbol::intern("feature"), vec![prelude_import_meta]);
+ let pi_nested = attr::mk_nested_word_item(ast::Ident::from_str("prelude_import"));
+ let list = attr::mk_list_item(DUMMY_SP, ast::Ident::from_str("feature"), vec![pi_nested]);
let fake_attr = attr::mk_attr_inner(DUMMY_SP, attr::mk_attr_id(), list);
s.print_attribute(&fake_attr)?;
// #![no_std]
- let no_std_meta = attr::mk_word_item(Symbol::intern("no_std"));
+ let no_std_meta = attr::mk_word_item(ast::Ident::from_str("no_std"));
let fake_attr = attr::mk_attr_inner(DUMMY_SP, attr::mk_attr_id(), no_std_meta);
s.print_attribute(&fake_attr)?;
}
token::Shebang(s) => format!("/* shebang: {}*/", s),
token::Interpolated(ref nt) => match nt.0 {
- token::NtExpr(ref e) => expr_to_string(e),
- token::NtMeta(ref e) => meta_item_to_string(e),
- token::NtTy(ref e) => ty_to_string(e),
- token::NtPath(ref e) => path_to_string(e),
- token::NtItem(ref e) => item_to_string(e),
- token::NtBlock(ref e) => block_to_string(e),
- token::NtStmt(ref e) => stmt_to_string(e),
- token::NtPat(ref e) => pat_to_string(e),
- token::NtIdent(ref e, false) => ident_to_string(e.node),
- token::NtIdent(ref e, true) => format!("r#{}", ident_to_string(e.node)),
- token::NtTT(ref tree) => tt_to_string(tree.clone()),
- token::NtArm(ref e) => arm_to_string(e),
- token::NtImplItem(ref e) => impl_item_to_string(e),
- token::NtTraitItem(ref e) => trait_item_to_string(e),
- token::NtGenerics(ref e) => generic_params_to_string(&e.params),
- token::NtWhereClause(ref e) => where_clause_to_string(e),
- token::NtArg(ref e) => arg_to_string(e),
- token::NtVis(ref e) => vis_to_string(e),
- token::NtLifetime(ref e) => lifetime_to_string(e),
+ token::NtExpr(ref e) => expr_to_string(e),
+ token::NtMeta(ref e) => meta_item_to_string(e),
+ token::NtTy(ref e) => ty_to_string(e),
+ token::NtPath(ref e) => path_to_string(e),
+ token::NtItem(ref e) => item_to_string(e),
+ token::NtBlock(ref e) => block_to_string(e),
+ token::NtStmt(ref e) => stmt_to_string(e),
+ token::NtPat(ref e) => pat_to_string(e),
+ token::NtIdent(e, false) => ident_to_string(e),
+ token::NtIdent(e, true) => format!("r#{}", ident_to_string(e)),
+ token::NtLifetime(e) => ident_to_string(e),
+ token::NtTT(ref tree) => tt_to_string(tree.clone()),
+ token::NtArm(ref e) => arm_to_string(e),
+ token::NtImplItem(ref e) => impl_item_to_string(e),
+ token::NtTraitItem(ref e) => trait_item_to_string(e),
+ token::NtGenerics(ref e) => generic_params_to_string(&e.params),
+ token::NtWhereClause(ref e) => where_clause_to_string(e),
+ token::NtArg(ref e) => arg_to_string(e),
+ token::NtVis(ref e) => vis_to_string(e),
+ token::NtForeignItem(ref e) => foreign_item_to_string(e),
}
}
}
to_string(|s| s.print_mac(arg, ::parse::token::Paren))
}
+pub fn foreign_item_to_string(arg: &ast::ForeignItem) -> String {
+ to_string(|s| s.print_foreign_item(arg))
+}
+
pub fn visibility_qualified(vis: &ast::Visibility, s: &str) -> String {
format!("{}{}", to_string(|s| s.print_visibility(vis)), s)
}
if i > 0 {
self.writer().word("::")?
}
- if segment.identifier.name != keywords::CrateRoot.name() &&
- segment.identifier.name != keywords::DollarCrate.name() {
- self.writer().word(&segment.identifier.name.as_str())?;
- } else if segment.identifier.name == keywords::DollarCrate.name() {
- self.print_dollar_crate(segment.identifier.ctxt)?;
+ if segment.ident.name != keywords::CrateRoot.name() &&
+ segment.ident.name != keywords::DollarCrate.name() {
+ self.writer().word(&segment.ident.name.as_str())?;
+ } else if segment.ident.name == keywords::DollarCrate.name() {
+ self.print_dollar_crate(segment.ident.span.ctxt())?;
}
}
self.writer().space()?;
self.ibox(INDENT_UNIT)?;
match item.node {
ast::MetaItemKind::Word => {
- self.writer().word(&item.name.as_str())?;
+ self.writer().word(&item.ident.name.as_str())?;
}
ast::MetaItemKind::NameValue(ref value) => {
- self.word_space(&item.name.as_str())?;
+ self.word_space(&item.ident.name.as_str())?;
self.word_space("=")?;
self.print_literal(value)?;
}
ast::MetaItemKind::List(ref items) => {
- self.writer().word(&item.name.as_str())?;
+ self.writer().word(&item.ident.name.as_str())?;
self.popen()?;
self.commasep(Consistent,
&items[..],
self.end()?; // end the head-ibox
self.end() // end the outer cbox
}
+ ast::ForeignItemKind::Macro(ref m) => {
+ self.print_mac(m, token::Paren)?;
+ self.s.word(";")
+ }
}
}
pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> {
self.head("")?;
let generics = ast::Generics::default();
- self.print_struct(&v.node.data, &generics, v.node.name, v.span, false)?;
+ self.print_struct(&v.node.data, &generics, v.node.ident, v.span, false)?;
match v.node.disr_expr {
Some(ref d) => {
self.s.space()?;
|s, field| {
s.ibox(INDENT_UNIT)?;
if !field.is_shorthand {
- s.print_ident(field.ident.node)?;
+ s.print_ident(field.ident)?;
s.word_space(":")?;
}
s.print_expr(&field.expr)?;
let base_args = &args[1..];
self.print_expr_maybe_paren(&args[0], parser::PREC_POSTFIX)?;
self.s.word(".")?;
- self.print_ident(segment.identifier)?;
+ self.print_ident(segment.ident)?;
if let Some(ref parameters) = segment.parameters {
self.print_path_parameters(parameters, true)?;
}
self.word_space("=")?;
self.print_expr_maybe_paren(rhs, prec)?;
}
- ast::ExprKind::Field(ref expr, id) => {
+ ast::ExprKind::Field(ref expr, ident) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX)?;
self.s.word(".")?;
- self.print_ident(id.node)?;
+ self.print_ident(ident)?;
}
ast::ExprKind::TupField(ref expr, id) => {
self.print_expr_maybe_paren(expr, parser::PREC_POSTFIX)?;
colons_before_params: bool)
-> io::Result<()>
{
- if segment.identifier.name != keywords::CrateRoot.name() &&
- segment.identifier.name != keywords::DollarCrate.name() {
- self.print_ident(segment.identifier)?;
+ if segment.ident.name != keywords::CrateRoot.name() &&
+ segment.ident.name != keywords::DollarCrate.name() {
+ self.print_ident(segment.ident)?;
if let Some(ref parameters) = segment.parameters {
self.print_path_parameters(parameters, colons_before_params)?;
}
- } else if segment.identifier.name == keywords::DollarCrate.name() {
- self.print_dollar_crate(segment.identifier.ctxt)?;
+ } else if segment.ident.name == keywords::DollarCrate.name() {
+ self.print_dollar_crate(segment.ident.span.ctxt())?;
}
Ok(())
}
self.s.word(">")?;
self.s.word("::")?;
let item_segment = path.segments.last().unwrap();
- self.print_ident(item_segment.identifier)?;
+ self.print_ident(item_segment.ident)?;
match item_segment.parameters {
Some(ref parameters) => self.print_path_parameters(parameters, colons_before_params),
None => Ok(()),
is that it doesn't matter */
match pat.node {
PatKind::Wild => self.s.word("_")?,
- PatKind::Ident(binding_mode, ref path1, ref sub) => {
+ PatKind::Ident(binding_mode, ident, ref sub) => {
match binding_mode {
ast::BindingMode::ByRef(mutbl) => {
self.word_nbsp("ref")?;
self.word_nbsp("mut")?;
}
}
- self.print_ident(path1.node)?;
+ self.print_ident(ident)?;
if let Some(ref p) = *sub {
self.s.word("@")?;
self.print_pat(p)?;
self.print_explicit_self(&eself)?;
} else {
let invalid = if let PatKind::Ident(_, ident, _) = input.pat.node {
- ident.node.name == keywords::Invalid.name()
+ ident.name == keywords::Invalid.name()
} else {
false
};
let ident = ast::Ident::from_str("principal_skinner");
let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ {
- name: ident,
+ ident,
attrs: Vec::new(),
// making this up as I go.... ?
data: ast::VariantData::Unit(ast::DUMMY_NODE_ID),
}
pub fn maybe_inject_crates_ref(mut krate: ast::Crate, alt_std_name: Option<&str>) -> ast::Crate {
- let name = if attr::contains_name(&krate.attrs, "no_core") {
+ // the first name in this list is the crate name of the crate with the prelude
+ let names: &[&str] = if attr::contains_name(&krate.attrs, "no_core") {
return krate;
} else if attr::contains_name(&krate.attrs, "no_std") {
- "core"
+ if attr::contains_name(&krate.attrs, "compiler_builtins") {
+ &["core"]
+ } else {
+ &["core", "compiler_builtins"]
+ }
} else {
- "std"
+ &["std"]
};
- INJECTED_CRATE_NAME.with(|opt_name| opt_name.set(Some(name)));
+ for name in names {
+ krate.module.items.insert(0, P(ast::Item {
+ attrs: vec![attr::mk_attr_outer(DUMMY_SP,
+ attr::mk_attr_id(),
+ attr::mk_word_item(ast::Ident::from_str("macro_use")))],
+ vis: dummy_spanned(ast::VisibilityKind::Inherited),
+ node: ast::ItemKind::ExternCrate(alt_std_name.map(Symbol::intern)),
+ ident: ast::Ident::from_str(name),
+ id: ast::DUMMY_NODE_ID,
+ span: DUMMY_SP,
+ tokens: None,
+ }));
+ }
- krate.module.items.insert(0, P(ast::Item {
- attrs: vec![attr::mk_attr_outer(DUMMY_SP,
- attr::mk_attr_id(),
- attr::mk_word_item(Symbol::intern("macro_use")))],
- vis: dummy_spanned(ast::VisibilityKind::Inherited),
- node: ast::ItemKind::ExternCrate(alt_std_name.map(Symbol::intern)),
- ident: ast::Ident::from_str(name),
- id: ast::DUMMY_NODE_ID,
- span: DUMMY_SP,
- tokens: None,
- }));
+ // the crates have been injected, the assumption is that the first one is the one with
+ // the prelude.
+ let name = names[0];
+
+ INJECTED_CRATE_NAME.with(|opt_name| opt_name.set(Some(name)));
let span = ignored_span(DUMMY_SP);
krate.module.items.insert(0, P(ast::Item {
attrs: vec![ast::Attribute {
style: ast::AttrStyle::Outer,
- path: ast::Path::from_ident(span, ast::Ident::from_str("prelude_import")),
+ path: ast::Path::from_ident(ast::Ident::new(Symbol::intern("prelude_import"), span)),
tokens: TokenStream::empty(),
id: attr::mk_attr_id(),
is_sugared_doc: false,
node: ast::ItemKind::Use(P(ast::UseTree {
prefix: ast::Path {
segments: [name, "prelude", "v1"].into_iter().map(|name| {
- ast::PathSegment::from_ident(ast::Ident::from_str(name), DUMMY_SP)
+ ast::PathSegment::from_ident(ast::Ident::from_str(name))
}).collect(),
span,
},
EntryPointType::MainAttr |
EntryPointType::Start =>
folded.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| {
- let allow_str = Symbol::intern("allow");
- let dead_code_str = Symbol::intern("dead_code");
- let word_vec = vec![attr::mk_list_word_item(dead_code_str)];
- let allow_dead_code_item = attr::mk_list_item(allow_str, word_vec);
+ let allow_ident = Ident::from_str("allow");
+ let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code"));
+ let allow_dead_code_item = attr::mk_list_item(DUMMY_SP, allow_ident,
+ vec![dc_nested]);
let allow_dead_code = attr::mk_attr_outer(DUMMY_SP,
attr::mk_attr_id(),
allow_dead_code_item);
fn path_node(ids: Vec<Ident>) -> ast::Path {
ast::Path {
span: DUMMY_SP,
- segments: ids.into_iter().map(|id| ast::PathSegment::from_ident(id, DUMMY_SP)).collect(),
+ segments: ids.into_iter().map(|id| ast::PathSegment::from_ident(id)).collect(),
}
}
}
impl<'ast> Visitor<'ast> for NodeCounter {
- fn visit_ident(&mut self, span: Span, ident: Ident) {
+ fn visit_ident(&mut self, ident: Ident) {
self.count += 1;
- walk_ident(self, span, ident);
+ walk_ident(self, ident);
}
fn visit_mod(&mut self, m: &Mod, _s: Span, _a: &[Attribute], _n: NodeId) {
self.count += 1;
fn visit_name(&mut self, _span: Span, _name: Name) {
// Nothing to do.
}
- fn visit_ident(&mut self, span: Span, ident: Ident) {
- walk_ident(self, span, ident);
+ fn visit_ident(&mut self, ident: Ident) {
+ walk_ident(self, ident);
}
fn visit_mod(&mut self, m: &'ast Mod, _s: Span, _attrs: &[Attribute], _n: NodeId) {
walk_mod(self, m);
}
}
-pub fn walk_ident<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, ident: Ident) {
- visitor.visit_name(span, ident.name);
+pub fn walk_ident<'a, V: Visitor<'a>>(visitor: &mut V, ident: Ident) {
+ visitor.visit_name(ident.span, ident.name);
}
pub fn walk_crate<'a, V: Visitor<'a>>(visitor: &mut V, krate: &'a Crate) {
}
pub fn walk_label<'a, V: Visitor<'a>>(visitor: &mut V, label: &'a Label) {
- visitor.visit_ident(label.span, label.ident);
+ visitor.visit_ident(label.ident);
}
pub fn walk_lifetime<'a, V: Visitor<'a>>(visitor: &mut V, lifetime: &'a Lifetime) {
- visitor.visit_ident(lifetime.span, lifetime.ident);
+ visitor.visit_ident(lifetime.ident);
}
pub fn walk_poly_trait_ref<'a, V>(visitor: &mut V,
pub fn walk_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a Item) {
visitor.visit_vis(&item.vis);
- visitor.visit_ident(item.span, item.ident);
+ visitor.visit_ident(item.ident);
match item.node {
ItemKind::ExternCrate(orig_name) => {
if let Some(orig_name) = orig_name {
item_id: NodeId)
where V: Visitor<'a>,
{
- visitor.visit_ident(variant.span, variant.node.name);
- visitor.visit_variant_data(&variant.node.data, variant.node.name,
+ visitor.visit_ident(variant.node.ident);
+ visitor.visit_variant_data(&variant.node.data, variant.node.ident,
generics, item_id, variant.span);
walk_list!(visitor, visit_expr, &variant.node.disr_expr);
walk_list!(visitor, visit_attribute, &variant.node.attrs);
match use_tree.kind {
UseTreeKind::Simple(rename) => {
if let Some(rename) = rename {
- visitor.visit_ident(use_tree.span, rename);
+ visitor.visit_ident(rename);
}
}
UseTreeKind::Glob => {},
pub fn walk_path_segment<'a, V: Visitor<'a>>(visitor: &mut V,
path_span: Span,
segment: &'a PathSegment) {
- visitor.visit_ident(path_span, segment.identifier);
+ visitor.visit_ident(segment.ident);
if let Some(ref parameters) = segment.parameters {
visitor.visit_path_parameters(path_span, parameters);
}
pub fn walk_assoc_type_binding<'a, V: Visitor<'a>>(visitor: &mut V,
type_binding: &'a TypeBinding) {
- visitor.visit_ident(type_binding.span, type_binding.ident);
+ visitor.visit_ident(type_binding.ident);
visitor.visit_ty(&type_binding.ty);
}
visitor.visit_path(path, pattern.id);
for field in fields {
walk_list!(visitor, visit_attribute, field.node.attrs.iter());
- visitor.visit_ident(field.span, field.node.ident);
+ visitor.visit_ident(field.node.ident);
visitor.visit_pat(&field.node.pat)
}
}
PatKind::Paren(ref subpattern) => {
visitor.visit_pat(subpattern)
}
- PatKind::Ident(_, ref pth1, ref optional_subpattern) => {
- visitor.visit_ident(pth1.span, pth1.node);
+ PatKind::Ident(_, ident, ref optional_subpattern) => {
+ visitor.visit_ident(ident);
walk_list!(visitor, visit_pat, optional_subpattern);
}
PatKind::Lit(ref expression) => visitor.visit_expr(expression),
pub fn walk_foreign_item<'a, V: Visitor<'a>>(visitor: &mut V, foreign_item: &'a ForeignItem) {
visitor.visit_vis(&foreign_item.vis);
- visitor.visit_ident(foreign_item.span, foreign_item.ident);
+ visitor.visit_ident(foreign_item.ident);
match foreign_item.node {
ForeignItemKind::Fn(ref function_declaration, ref generics) => {
}
ForeignItemKind::Static(ref typ, _) => visitor.visit_ty(typ),
ForeignItemKind::Ty => (),
+ ForeignItemKind::Macro(ref mac) => visitor.visit_mac(mac),
}
walk_list!(visitor, visit_attribute, &foreign_item.attrs);
walk_list!(visitor, visit_attribute, &*l.attrs);
}
GenericParam::Type(ref t) => {
- visitor.visit_ident(t.span, t.ident);
+ visitor.visit_ident(t.ident);
walk_list!(visitor, visit_ty_param_bound, &t.bounds);
walk_list!(visitor, visit_ty, &t.default);
walk_list!(visitor, visit_attribute, &*t.attrs);
}
pub fn walk_trait_item<'a, V: Visitor<'a>>(visitor: &mut V, trait_item: &'a TraitItem) {
- visitor.visit_ident(trait_item.span, trait_item.ident);
+ visitor.visit_ident(trait_item.ident);
walk_list!(visitor, visit_attribute, &trait_item.attrs);
visitor.visit_generics(&trait_item.generics);
match trait_item.node {
pub fn walk_impl_item<'a, V: Visitor<'a>>(visitor: &mut V, impl_item: &'a ImplItem) {
visitor.visit_vis(&impl_item.vis);
- visitor.visit_ident(impl_item.span, impl_item.ident);
+ visitor.visit_ident(impl_item.ident);
walk_list!(visitor, visit_attribute, &impl_item.attrs);
visitor.visit_generics(&impl_item.generics);
match impl_item.node {
pub fn walk_struct_field<'a, V: Visitor<'a>>(visitor: &mut V, struct_field: &'a StructField) {
visitor.visit_vis(&struct_field.vis);
if let Some(ident) = struct_field.ident {
- visitor.visit_ident(struct_field.span, ident);
+ visitor.visit_ident(ident);
}
visitor.visit_ty(&struct_field.ty);
walk_list!(visitor, visit_attribute, &struct_field.attrs);
visitor.visit_path(path, expression.id);
for field in fields {
walk_list!(visitor, visit_attribute, field.attrs.iter());
- visitor.visit_ident(field.ident.span, field.ident.node);
+ visitor.visit_ident(field.ident);
visitor.visit_expr(&field.expr)
}
walk_list!(visitor, visit_expr, optional_base);
visitor.visit_expr(left_expression);
visitor.visit_expr(right_expression);
}
- ExprKind::Field(ref subexpression, ref ident) => {
+ ExprKind::Field(ref subexpression, ident) => {
visitor.visit_expr(subexpression);
- visitor.visit_ident(ident.span, ident.node);
+ visitor.visit_ident(ident);
}
ExprKind::TupField(ref subexpression, _) => {
visitor.visit_expr(subexpression);
use syntax::ext::build::AstBuilder;
use syntax::parse::token;
use syntax::print::pprust;
+use syntax::symbol::Symbol;
use syntax::tokenstream::{TokenStream, TokenTree};
use syntax_pos::{Span, DUMMY_SP};
None
};
- let sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
+ let sp = sp.apply_mark(cx.current_expansion.mark);
let panic_call = Mac_ {
- path: Path::from_ident(sp, Ident::from_str("panic")),
+ path: Path::from_ident(Ident::new(Symbol::intern("panic"), sp)),
tts: if let Some(ts) = custom_msg_args {
ts.into()
} else {
sp: Span,
tts: &[tokenstream::TokenTree])
-> Box<base::MacResult + 'static> {
- let sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
+ let sp = sp.apply_mark(cx.current_expansion.mark);
let mut p = cx.new_parser_from_tts(tts);
let cfg = panictry!(p.parse_meta_item());
}
}
}
- let sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
+ let sp = sp.apply_mark(cx.current_expansion.mark);
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&accumulator)))
}
use syntax::ptr::P;
use syntax_pos::Span;
use syntax_pos::symbol::Symbol;
-use syntax_pos::hygiene::SyntaxContext;
use syntax::tokenstream::TokenTree;
pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
}
}
}
- let res = ast::Ident {
- name: Symbol::intern(&res_str),
- ctxt: SyntaxContext::empty().apply_mark(cx.current_expansion.mark),
- };
- struct Result {
- ident: ast::Ident,
- span: Span,
- };
+ let ident = ast::Ident::new(Symbol::intern(&res_str), sp.apply_mark(cx.current_expansion.mark));
- impl Result {
- fn path(&self) -> ast::Path {
- ast::Path {
- span: self.span,
- segments: vec![ast::PathSegment::from_ident(self.ident, self.span)],
- }
- }
- }
+ struct ConcatIdentsResult { ident: ast::Ident }
- impl base::MacResult for Result {
+ impl base::MacResult for ConcatIdentsResult {
fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> {
Some(P(ast::Expr {
id: ast::DUMMY_NODE_ID,
- node: ast::ExprKind::Path(None, self.path()),
- span: self.span,
+ node: ast::ExprKind::Path(None, ast::Path::from_ident(self.ident)),
+ span: self.ident.span,
attrs: ast::ThinVec::new(),
}))
}
fn make_ty(self: Box<Self>) -> Option<P<ast::Ty>> {
Some(P(ast::Ty {
id: ast::DUMMY_NODE_ID,
- node: ast::TyKind::Path(None, self.path()),
- span: self.span,
+ node: ast::TyKind::Path(None, ast::Path::from_ident(self.ident)),
+ span: self.ident.span,
}))
}
}
- Box::new(Result {
- ident: res,
- span: sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark)),
- })
+ Box::new(ConcatIdentsResult { ident })
}
vdata = vdata_;
}
EnumMatching(.., variant, ref af) => {
- ctor_path = cx.path(trait_span, vec![substr.type_ident, variant.node.name]);
+ ctor_path = cx.path(trait_span, vec![substr.type_ident, variant.node.ident]);
all_fields = af;
vdata = &variant.node.data;
}
Annotatable::Item(item) => item,
Annotatable::ImplItem(_) |
Annotatable::TraitItem(_) |
+ Annotatable::ForeignItem(_) |
Annotatable::Stmt(_) |
Annotatable::Expr(_) => {
ecx.span_err(span, "proc-macro derives may only be \
// based on the "shape".
let (ident, is_struct) = match *substr.fields {
Struct(vdata, _) => (substr.type_ident, vdata.is_struct()),
- EnumMatching(_, _, v, _) => (v.node.name, v.node.data.is_struct()),
+ EnumMatching(_, _, v, _) => (v.node.ident, v.node.data.is_struct()),
EnumNonMatchingCollapsed(..) |
StaticStruct(..) |
StaticEnum(..) => cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`"),
}
let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg);
- let name = cx.expr_str(trait_span, variant.node.name.name);
+ let name = cx.expr_str(trait_span, variant.node.ident.name);
let call = cx.expr_method_call(trait_span,
blkencoder,
cx.ident_of("emit_enum_variant"),
use syntax::ast::{
self, BinOpKind, EnumDef, Expr, GenericParam, Generics, Ident, PatKind, VariantData
};
+
use syntax::attr;
use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder;
fn visit_ty(&mut self, ty: &'a ast::Ty) {
if let ast::TyKind::Path(_, ref path) = ty.node {
if let Some(segment) = path.segments.first() {
- if self.ty_param_names.contains(&segment.identifier.name) {
+ if self.ty_param_names.contains(&segment.ident.name) {
self.types.push(P(ty.clone()));
}
}
// if we have already handled this type, skip it
if let ast::TyKind::Path(_, ref p) = ty.node {
if p.segments.len() == 1 &&
- ty_param_names.contains(&p.segments[0].identifier.name) ||
+ ty_param_names.contains(&p.segments[0].ident.name) ||
processed_field_types.contains(&p.segments) {
continue;
};
let args = {
let self_args = explicit_self.map(|explicit_self| {
ast::Arg::from_self(explicit_self,
- respan(trait_.span, keywords::SelfValue.ident()))
+ keywords::SelfValue.ident().with_span_pos(trait_.span))
});
let nonself_args = arg_types.into_iter()
.map(|(name, ty)| cx.arg(trait_.span, name, ty));
let summary = enum_def.variants
.iter()
.map(|v| {
- let ident = v.node.name;
let sp = v.span.with_ctxt(trait_.span.ctxt());
let summary = trait_.summarise_struct(cx, &v.node.data);
- (ident, sp, summary)
+ (v.node.ident, sp, summary)
})
.collect();
self.call_substructure_method(cx,
fn create_subpatterns(&self,
cx: &mut ExtCtxt,
- field_paths: Vec<ast::SpannedIdent>,
+ field_paths: Vec<ast::Ident>,
mutbl: ast::Mutability,
use_temporaries: bool)
-> Vec<P<ast::Pat>> {
for (i, struct_field) in struct_def.fields().iter().enumerate() {
let sp = struct_field.span.with_ctxt(self.span.ctxt());
let ident = cx.ident_of(&format!("{}_{}", prefix, i));
- paths.push(codemap::Spanned {
- span: sp,
- node: ident,
- });
+ paths.push(ident.with_span_pos(sp));
let val = cx.expr_path(cx.path_ident(sp, ident));
let val = if use_temporaries {
val
prefix: &str,
mutbl: ast::Mutability)
-> (P<ast::Pat>, Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])>) {
- let variant_ident = variant.node.name;
let sp = variant.span.with_ctxt(self.span.ctxt());
- let variant_path = cx.path(sp, vec![enum_ident, variant_ident]);
+ let variant_path = cx.path(sp, vec![enum_ident, variant.node.ident]);
let use_temporaries = false; // enums can't be repr(packed)
self.create_struct_pattern(cx, variant_path, &variant.node.data, prefix, mutbl,
use_temporaries)
use syntax::ast::{Expr, GenericParam, Generics, Ident, SelfKind};
use syntax::ext::base::ExtCtxt;
use syntax::ext::build::AstBuilder;
-use syntax::codemap::respan;
+use syntax::codemap::{respan, DUMMY_SP};
use syntax::ptr::P;
use syntax_pos::Span;
-use syntax_pos::hygiene::SyntaxContext;
use syntax_pos::symbol::keywords;
/// The types of pointers
PathKind::Global => cx.path_all(span, true, idents, lt, tys, Vec::new()),
PathKind::Local => cx.path_all(span, false, idents, lt, tys, Vec::new()),
PathKind::Std => {
- let def_site = SyntaxContext::empty().apply_mark(cx.current_expansion.mark);
- idents.insert(0, Ident { ctxt: def_site, ..keywords::DollarCrate.ident() });
+ let def_site = DUMMY_SP.apply_mark(cx.current_expansion.mark);
+ idents.insert(0, Ident::new(keywords::DollarCrate.name(), def_site));
cx.path_all(span, false, idents, lt, tys, Vec::new())
}
}
Some(v) => v,
};
- let sp = sp.with_ctxt(sp.ctxt().apply_mark(cx.current_expansion.mark));
+ let sp = sp.apply_mark(cx.current_expansion.mark);
let e = match env::var(&*var.as_str()) {
Err(..) => {
let lt = cx.lifetime(sp, keywords::StaticLifetime.ident());
ty: &ArgumentType,
arg: ast::Ident)
-> P<ast::Expr> {
- sp = sp.with_ctxt(sp.ctxt().apply_mark(ecx.current_expansion.mark));
+ sp = sp.apply_mark(ecx.current_expansion.mark);
let arg = ecx.expr_ident(sp, arg);
let trait_ = match *ty {
Placeholder(ref tyname) => {
mut sp: Span,
tts: &[tokenstream::TokenTree])
-> Box<base::MacResult + 'cx> {
- sp = sp.with_ctxt(sp.ctxt().apply_mark(ecx.current_expansion.mark));
+ sp = sp.apply_mark(ecx.current_expansion.mark);
match parse_args(ecx, sp, tts) {
Some((efmt, args, names)) => {
MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt, args, names))
let arg_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect();
let arg_unique_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect();
let mut macsp = ecx.call_site();
- macsp = macsp.with_ctxt(macsp.ctxt().apply_mark(ecx.current_expansion.mark));
+ macsp = macsp.apply_mark(ecx.current_expansion.mark);
let msg = "format argument must be a string literal.";
let fmt = match expr_to_spanned_string(ecx, efmt, msg) {
Some(fmt) => fmt,
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(proc_macro_internals)]
#![feature(decl_macro)]
use syntax::ext::base::ExtCtxt;
use syntax::ext::build::AstBuilder;
use syntax::ext::expand::ExpansionConfig;
-use syntax::ext::hygiene::{Mark, SyntaxContext};
+use syntax::ext::hygiene::Mark;
use syntax::fold::Folder;
use syntax::parse::ParseSess;
use syntax::ptr::P;
allow_internal_unsafe: false,
}
});
- let span = DUMMY_SP.with_ctxt(SyntaxContext::empty().apply_mark(mark));
+ let span = DUMMY_SP.apply_mark(mark);
let proc_macro = Ident::from_str("proc_macro");
let krate = cx.item(span,
marks: Vec<MarkData>,
syntax_contexts: Vec<SyntaxContextData>,
markings: HashMap<(SyntaxContext, Mark), SyntaxContext>,
- gensym_to_ctxt: HashMap<Symbol, SyntaxContext>,
+ gensym_to_ctxt: HashMap<Symbol, Span>,
}
impl HygieneData {
pub fn from_ident(ident: Ident) -> Symbol {
HygieneData::with(|data| {
let gensym = ident.name.gensymed();
- data.gensym_to_ctxt.insert(gensym, ident.ctxt);
+ data.gensym_to_ctxt.insert(gensym, ident.span);
gensym
})
}
pub fn to_ident(self) -> Ident {
HygieneData::with(|data| {
match data.gensym_to_ctxt.get(&self) {
- Some(&ctxt) => Ident { name: self.interned(), ctxt: ctxt },
+ Some(&span) => Ident::new(self.interned(), span),
None => Ident::with_empty_ctxt(self),
}
})
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(warnings)]
#![feature(const_fn)]
#![feature(custom_attribute)]
-#![cfg_attr(stage0, feature(i128_type))]
#![feature(optin_builtin_traits)]
#![allow(unused_attributes)]
#![feature(specialization)]
extern crate unicode_width;
pub mod hygiene;
-pub use hygiene::{SyntaxContext, ExpnInfo, ExpnFormat, NameAndSpan, CompilerDesugaringKind};
+pub use hygiene::{Mark, SyntaxContext, ExpnInfo, ExpnFormat, NameAndSpan, CompilerDesugaringKind};
mod span_encoding;
pub use span_encoding::{Span, DUMMY_SP};
if end.ctxt == SyntaxContext::empty() { end.ctxt } else { span.ctxt },
)
}
+
+ #[inline]
+ pub fn apply_mark(self, mark: Mark) -> Span {
+ let span = self.data();
+ span.with_ctxt(span.ctxt.apply_mark(mark))
+ }
+
+ #[inline]
+ pub fn remove_mark(&mut self) -> Mark {
+ let mut span = self.data();
+ let mark = span.ctxt.remove_mark();
+ *self = Span::new(span.lo, span.hi, span.ctxt);
+ mark
+ }
+
+ #[inline]
+ pub fn adjust(&mut self, expansion: Mark) -> Option<Mark> {
+ let mut span = self.data();
+ let mark = span.ctxt.adjust(expansion);
+ *self = Span::new(span.lo, span.hi, span.ctxt);
+ mark
+ }
+
+ #[inline]
+ pub fn glob_adjust(&mut self, expansion: Mark, glob_ctxt: SyntaxContext)
+ -> Option<Option<Mark>> {
+ let mut span = self.data();
+ let mark = span.ctxt.glob_adjust(expansion, glob_ctxt);
+ *self = Span::new(span.lo, span.hi, span.ctxt);
+ mark
+ }
+
+ #[inline]
+ pub fn reverse_glob_adjust(&mut self, expansion: Mark, glob_ctxt: SyntaxContext)
+ -> Option<Option<Mark>> {
+ let mut span = self.data();
+ let mark = span.ctxt.reverse_glob_adjust(expansion, glob_ctxt);
+ *self = Span::new(span.lo, span.hi, span.ctxt);
+ mark
+ }
+
+ #[inline]
+ pub fn modern(self) -> Span {
+ let span = self.data();
+ span.with_ctxt(span.ctxt.modern())
+ }
}
#[derive(Clone, Debug)]
//! type, and vice versa.
use hygiene::SyntaxContext;
-use GLOBALS;
+use {Span, DUMMY_SP, GLOBALS};
use serialize::{Decodable, Decoder, Encodable, Encoder};
use std::collections::HashMap;
use std::fmt;
+use std::hash::{Hash, Hasher};
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+#[derive(Copy, Clone, Eq)]
pub struct Ident {
pub name: Symbol,
- pub ctxt: SyntaxContext,
+ pub span: Span,
}
impl Ident {
+ #[inline]
+ pub const fn new(name: Symbol, span: Span) -> Ident {
+ Ident { name, span }
+ }
+ #[inline]
pub const fn with_empty_ctxt(name: Symbol) -> Ident {
- Ident { name: name, ctxt: SyntaxContext::empty() }
+ Ident::new(name, DUMMY_SP)
}
/// Maps a string to an identifier with an empty syntax context.
Ident::with_empty_ctxt(Symbol::intern(string))
}
- pub fn without_first_quote(&self) -> Ident {
- Ident { name: Symbol::from(self.name.as_str().trim_left_matches('\'')), ctxt: self.ctxt }
+ /// Replace `lo` and `hi` with those from `span`, but keep hygiene context.
+ pub fn with_span_pos(self, span: Span) -> Ident {
+ Ident::new(self.name, span.with_ctxt(self.span.ctxt()))
+ }
+
+ pub fn without_first_quote(self) -> Ident {
+ Ident::new(Symbol::intern(self.name.as_str().trim_left_matches('\'')), self.span)
}
pub fn modern(self) -> Ident {
- Ident { name: self.name, ctxt: self.ctxt.modern() }
+ Ident::new(self.name, self.span.modern())
+ }
+}
+
+impl PartialEq for Ident {
+ fn eq(&self, rhs: &Self) -> bool {
+ self.name == rhs.name && self.span.ctxt() == rhs.span.ctxt()
+ }
+}
+
+impl Hash for Ident {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.name.hash(state);
+ self.span.ctxt().hash(state);
}
}
impl fmt::Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "{}{:?}", self.name, self.ctxt)
+ write!(f, "{}{:?}", self.name, self.span.ctxt())
}
}
impl Encodable for Ident {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
- if self.ctxt.modern() == SyntaxContext::empty() {
+ if self.span.ctxt().modern() == SyntaxContext::empty() {
s.emit_str(&self.name.as_str())
} else { // FIXME(jseyfried) intercrate hygiene
let mut string = "#".to_owned();
}
}
-impl<'a> From<&'a str> for Symbol {
- fn from(string: &'a str) -> Symbol {
- Symbol::intern(string)
- }
-}
-
impl fmt::Debug for Symbol {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let is_gensymed = with_interner(|interner| interner.is_gensymed(*self));
html_playground_url = "https://play.rust-lang.org/",
test(attr(deny(warnings))))]
#![deny(missing_docs)]
-#![deny(warnings)]
#![cfg_attr(windows, feature(libc))]
// Handle rustfmt skips
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/", test(attr(deny(warnings))))]
-#![deny(warnings)]
#![feature(asm)]
#![feature(fnbox)]
#![cfg_attr(any(unix, target_os = "cloudabi"), feature(libc))]
[dependencies]
core = { path = "../libcore" }
libc = { path = "../rustc/libc_shim" }
+compiler_builtins = { path = "../rustc/compiler_builtins_shim" }
#![no_std]
#![unstable(feature = "panic_unwind", issue = "32837")]
-#![deny(warnings)]
#![feature(cfg_target_vendor)]
#![feature(link_cfg)]
pub type _Unwind_Exception_Cleanup_Fn = extern "C" fn(unwind_code: _Unwind_Reason_Code,
exception: *mut _Unwind_Exception);
extern "C" {
- #[cfg_attr(stage0, unwind)]
- #[cfg_attr(not(stage0), unwind(allowed))]
+ #[unwind(allowed)]
pub fn _Unwind_Resume(exception: *mut _Unwind_Exception) -> !;
pub fn _Unwind_DeleteException(exception: *mut _Unwind_Exception);
pub fn _Unwind_GetLanguageSpecificData(ctx: *mut _Unwind_Context) -> *mut c_void;
if #[cfg(not(all(target_os = "ios", target_arch = "arm")))] {
// Not 32-bit iOS
extern "C" {
- #[cfg_attr(stage0, unwind)]
- #[cfg_attr(not(stage0), unwind(allowed))]
+ #[unwind(allowed)]
pub fn _Unwind_RaiseException(exception: *mut _Unwind_Exception) -> _Unwind_Reason_Code;
pub fn _Unwind_Backtrace(trace: _Unwind_Trace_Fn,
trace_argument: *mut c_void)
} else {
// 32-bit iOS uses SjLj and does not provide _Unwind_Backtrace()
extern "C" {
- #[cfg_attr(stage0, unwind)]
- #[cfg_attr(not(stage0), unwind(allowed))]
+ #[unwind(allowed)]
pub fn _Unwind_SjLj_RaiseException(e: *mut _Unwind_Exception) -> _Unwind_Reason_Code;
}
[features]
c = []
default = ["c", "rustbuild", "compiler-builtins"]
+mem = []
rustbuild = []
compiler-builtins = []
[dependencies]
core = { path = "../../libcore" }
+compiler_builtins = { path = "../../rustc/compiler_builtins_shim" }
alloc = { path = "../../liballoc" }
#
# See https://github.com/rust-lang/rfcs/pull/1133.
core = { path = "../../libcore" }
+compiler_builtins = { path = "../compiler_builtins_shim" }
+
[features]
# Certain parts of libc are conditionally compiled differently than when used
// Set the stack size at link time on Windows. See rustc_driver::in_rustc_thread
// for the rationale.
+#[allow(unused_attributes)]
#[cfg_attr(all(windows, target_env = "msvc"), link_args = "/STACK:16777216")]
// We only build for msvc and gnu now, but we use a exhaustive condition here
// so we can expect either the stack size to be set or the build fails.
# source tarball for a stable release you'll likely see `1.x.0` for rustc and
# `0.x.0` for Cargo where they were released on `date`.
-date: 2018-03-18
+date: 2018-04-04
rustc: beta
cargo: beta
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags:-Zshare-generics=yes
+
+#![crate_type="rlib"]
+
+pub fn generic_fn<T>(x: T, y: T) -> (T, T) {
+ (x, y)
+}
+
+pub fn use_generic_fn_f32() -> (f32, f32) {
+ generic_fn(0.0f32, 1.0f32)
+}
// ignore-tidy-linelength
// We specify -Z incremental here because we want to test the partitioning for
// incremental compilation
-// compile-flags:-Zprint-trans-items=eager -Zincremental=tmp/partitioning-tests/extern-generic
+// compile-flags:-Zprint-trans-items=eager -Zincremental=tmp/partitioning-tests/extern-generic -Zshare-generics=y
#![allow(dead_code)]
#![crate_type="lib"]
// Make sure the two generic functions from the extern crate get instantiated
// once for the current crate
//~ TRANS_ITEM fn cgu_generic_function::foo[0]<&str> @@ cgu_generic_function.volatile[External]
-//~ TRANS_ITEM fn cgu_generic_function::bar[0]<&str> @@ cgu_generic_function.volatile[Internal]
+//~ TRANS_ITEM fn cgu_generic_function::bar[0]<&str> @@ cgu_generic_function.volatile[External]
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-tidy-linelength
+// compile-flags:-Zprint-trans-items=eager -Zshare-generics=yes -Zincremental=tmp/partitioning-tests/shared-generics-exe
+
+#![crate_type="rlib"]
+
+// aux-build:shared_generics_aux.rs
+extern crate shared_generics_aux;
+
+//~ TRANS_ITEM fn shared_generics::foo[0]
+pub fn foo() {
+
+ //~ TRANS_ITEM fn shared_generics_aux::generic_fn[0]<u16> @@ shared_generics_aux.volatile[External]
+ let _ = shared_generics_aux::generic_fn(0u16, 1u16);
+
+ // This should not generate a monomorphization because it's already
+ // available in `shared_generics_aux`.
+ let _ = shared_generics_aux::generic_fn(0.0f32, 3.0f32);
+}
+
+// TRANS_ITEM drop-glue i8
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-flags: -C no-prepopulate-passes -Zshare-generics=yes
+
+// Check that local generics are internalized if they are in the same CGU
+
+// CHECK: define internal {{.*}} @_ZN34local_generics_in_exe_internalized3foo{{.*}}
+pub fn foo<T>(x: T, y: T) -> (T, T) {
+ (x, y)
+}
+
+fn main() {
+ let _ = foo(0u8, 1u8);
+}
}
})
}
- // these are covered in proc_macro/attr-stmt-expr.rs
+ // covered in proc_macro/macros-in-extern.rs
+ Annotatable::ForeignItem(_) => unimplemented!(),
+ // covered in proc_macro/attr-stmt-expr.rs
Annotatable::Stmt(_) | Annotatable::Expr(_) => panic!("expected item")
}
}
let copy_name = match mi.node {
ast::MetaItemKind::List(ref xs) => {
if let Some(word) = xs[0].word() {
- ast::Ident::with_empty_ctxt(word.name())
+ word.ident
} else {
cx.span_err(mi.span, "Expected word");
return;
new_it.ident = copy_name;
push(Annotatable::TraitItem(P(new_it)));
}
+ // covered in proc_macro/macros-in-extern.rs
+ Annotatable::ForeignItem(_) => unimplemented!(),
// covered in proc_macro/attr-stmt-expr.rs
Annotatable::Stmt(_) | Annotatable::Expr(_) => panic!("expected item")
}
extern crate proc_macro;
-use proc_macro::{TokenStream, TokenTree, TokenNode, Delimiter, Literal, Spacing};
+use proc_macro::{TokenStream, TokenTree, Delimiter, Literal, Spacing, Group};
#[proc_macro_attribute]
pub fn foo(attr: TokenStream, input: TokenStream) -> TokenStream {
}
fn assert_inline(slice: &mut &[TokenTree]) {
- match slice[0].kind {
- TokenNode::Op('#', _) => {}
+ match &slice[0] {
+ TokenTree::Op(tt) => assert_eq!(tt.op(), '#'),
_ => panic!("expected '#' char"),
}
- match slice[1].kind {
- TokenNode::Group(Delimiter::Bracket, _) => {}
+ match &slice[1] {
+ TokenTree::Group(tt) => assert_eq!(tt.delimiter(), Delimiter::Bracket),
_ => panic!("expected brackets"),
}
*slice = &slice[2..];
}
fn assert_doc(slice: &mut &[TokenTree]) {
- match slice[0].kind {
- TokenNode::Op('#', Spacing::Alone) => {}
+ match &slice[0] {
+ TokenTree::Op(tt) => {
+ assert_eq!(tt.op(), '#');
+ assert_eq!(tt.spacing(), Spacing::Alone);
+ }
_ => panic!("expected #"),
}
- let inner = match slice[1].kind {
- TokenNode::Group(Delimiter::Bracket, ref s) => s.clone(),
+ let inner = match &slice[1] {
+ TokenTree::Group(tt) => {
+ assert_eq!(tt.delimiter(), Delimiter::Bracket);
+ tt.stream()
+ }
_ => panic!("expected brackets"),
};
let tokens = inner.into_iter().collect::<Vec<_>>();
panic!("expected three tokens in doc")
}
- match tokens[0].kind {
- TokenNode::Term(ref t) => assert_eq!("doc", t.as_str()),
+ match &tokens[0] {
+ TokenTree::Term(tt) => assert_eq!("doc", tt.as_str()),
_ => panic!("expected `doc`"),
}
- match tokens[1].kind {
- TokenNode::Op('=', Spacing::Alone) => {}
+ match &tokens[1] {
+ TokenTree::Op(tt) => {
+ assert_eq!(tt.op(), '=');
+ assert_eq!(tt.spacing(), Spacing::Alone);
+ }
_ => panic!("expected equals"),
}
- match tokens[2].kind {
- TokenNode::Literal(_) => {}
+ match tokens[2] {
+ TokenTree::Literal(_) => {}
_ => panic!("expected literal"),
}
}
fn assert_invoc(slice: &mut &[TokenTree]) {
- match slice[0].kind {
- TokenNode::Op('#', _) => {}
+ match &slice[0] {
+ TokenTree::Op(tt) => assert_eq!(tt.op(), '#'),
_ => panic!("expected '#' char"),
}
- match slice[1].kind {
- TokenNode::Group(Delimiter::Bracket, _) => {}
+ match &slice[1] {
+ TokenTree::Group(tt) => assert_eq!(tt.delimiter(), Delimiter::Bracket),
_ => panic!("expected brackets"),
}
*slice = &slice[2..];
}
fn assert_foo(slice: &mut &[TokenTree]) {
- match slice[0].kind {
- TokenNode::Term(ref name) => assert_eq!(name.as_str(), "fn"),
+ match &slice[0] {
+ TokenTree::Term(tt) => assert_eq!(tt.as_str(), "fn"),
_ => panic!("expected fn"),
}
- match slice[1].kind {
- TokenNode::Term(ref name) => assert_eq!(name.as_str(), "foo"),
+ match &slice[1] {
+ TokenTree::Term(tt) => assert_eq!(tt.as_str(), "foo"),
_ => panic!("expected foo"),
}
- match slice[2].kind {
- TokenNode::Group(Delimiter::Parenthesis, ref s) => assert!(s.is_empty()),
+ match &slice[2] {
+ TokenTree::Group(tt) => {
+ assert_eq!(tt.delimiter(), Delimiter::Parenthesis);
+ assert!(tt.stream().is_empty());
+ }
_ => panic!("expected parens"),
}
- match slice[3].kind {
- TokenNode::Group(Delimiter::Brace, _) => {}
+ match &slice[3] {
+ TokenTree::Group(tt) => assert_eq!(tt.delimiter(), Delimiter::Brace),
_ => panic!("expected braces"),
}
*slice = &slice[4..];
}
fn fold_tree(input: TokenTree) -> TokenTree {
- TokenTree {
- span: input.span,
- kind: fold_node(input.kind),
- }
-}
-
-fn fold_node(input: TokenNode) -> TokenNode {
match input {
- TokenNode::Group(a, b) => TokenNode::Group(a, fold_stream(b)),
- TokenNode::Op(a, b) => TokenNode::Op(a, b),
- TokenNode::Term(a) => TokenNode::Term(a),
- TokenNode::Literal(a) => {
+ TokenTree::Group(b) => {
+ TokenTree::Group(Group::new(b.delimiter(), fold_stream(b.stream())))
+ }
+ TokenTree::Op(b) => TokenTree::Op(b),
+ TokenTree::Term(a) => TokenTree::Term(a),
+ TokenTree::Literal(a) => {
if a.to_string() != "\"foo\"" {
- TokenNode::Literal(a)
+ TokenTree::Literal(a)
} else {
- TokenNode::Literal(Literal::integer(3))
+ TokenTree::Literal(Literal::i32_unsuffixed(3))
}
}
}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+#![feature(proc_macro)]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[proc_macro_attribute]
+pub fn nop_attr(_attr: TokenStream, input: TokenStream) -> TokenStream {
+ assert!(_attr.to_string().is_empty());
+ input
+}
+
+#[proc_macro_attribute]
+pub fn no_output(_attr: TokenStream, _input: TokenStream) -> TokenStream {
+ assert!(_attr.to_string().is_empty());
+ assert!(!_input.to_string().is_empty());
+ "".parse().unwrap()
+}
+
+#[proc_macro]
+pub fn emit_input(input: TokenStream) -> TokenStream {
+ input
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:test-macros.rs
+// ignore-stage1
+// ignore-wasm32
+
+#![feature(proc_macro)]
+
+extern crate test_macros;
+
+use test_macros::{nop_attr, no_output, emit_input};
+
+fn main() {
+ assert_eq!(unsafe { rust_get_test_int() }, 0isize);
+ assert_eq!(unsafe { rust_dbg_extern_identity_u32(0xDEADBEEF) }, 0xDEADBEEF);
+}
+
+#[link(name = "rust_test_helpers", kind = "static")]
+extern {
+ #[no_output]
+ //~^ ERROR Macro and proc-macro invocations in `extern {}` blocks are experimental.
+ fn some_definitely_unknown_symbol_which_should_be_removed();
+
+ #[nop_attr]
+ //~^ ERROR Macro and proc-macro invocations in `extern {}` blocks are experimental.
+ fn rust_get_test_int() -> isize;
+
+ emit_input!(fn rust_dbg_extern_identity_u32(arg: u32) -> u32;);
+ //~^ ERROR Macro and proc-macro invocations in `extern {}` blocks are experimental.
+}
// `#[oops]` is left dangling (that is, it is unattached, with no
// formal binding following it).
-#![feature(generic_param_attrs, rustc_attrs)]
-#![allow(dead_code)]
+#![feature(rustc_attrs)]
struct RefIntPair<'a, 'b>(&'a u32, &'b u32);
// `#[oops]` is left dangling (that is, it is unattached, with no
// formal binding following it).
-#![feature(generic_param_attrs, rustc_attrs)]
-#![allow(dead_code)]
+#![feature(rustc_attrs)]
struct RefAny<'a, T>(&'a T);
let y: Box<_> = box &mut x;
let p = &y;
***p = 2; //[ast]~ ERROR cannot assign to data in a `&` reference
- //[mir]~^ ERROR cannot assign to immutable item `***p`
+ //[mir]~^ ERROR cannot assign to data in a `&` reference
drop(p);
}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(nll)]
+
+struct Node {
+ elem: i32,
+ next: Option<Box<Node>>,
+}
+
+fn a() {
+ let mut node = Node {
+ elem: 5,
+ next: None,
+ };
+
+ let mut src = &mut node;
+ {src};
+ src.next = None; //~ ERROR use of moved value: `src` [E0382]
+}
+
+fn b() {
+ let mut src = &mut (22, 44);
+ {src};
+ src.0 = 66; //~ ERROR use of moved value: `src` [E0382]
+}
+
+fn main() {
+ a();
+ b();
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-wasm32
+
+#![feature(decl_macro)]
+
+macro_rules! returns_isize(
+ ($ident:ident) => (
+ fn $ident() -> isize;
+ )
+);
+
+macro takes_u32_returns_u32($ident:ident) {
+ fn $ident (arg: u32) -> u32;
+}
+
+macro_rules! emits_nothing(
+ () => ()
+);
+
+fn main() {
+ assert_eq!(unsafe { rust_get_test_int() }, 0isize);
+ assert_eq!(unsafe { rust_dbg_extern_identity_u32(0xDEADBEEF) }, 0xDEADBEEFu32);
+}
+
+#[link(name = "rust_test_helpers", kind = "static")]
+extern {
+ returns_isize!(rust_get_test_int);
+ //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
+ //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ emits_nothing!();
+ //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+}
--- /dev/null
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(non_exhaustive)]
+
+#[non_exhaustive(anything)]
+//~^ ERROR attribute should be empty [E0911]
+struct Foo;
+
+#[non_exhaustive]
+//~^ ERROR attribute can only be applied to a struct or enum [E0910]
+trait Bar { }
+
+#[non_exhaustive]
+//~^ ERROR attribute can only be applied to a struct or enum [E0910]
+union Baz {
+ f1: u16,
+ f2: u16
+}
+
+fn main() { }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(generic_param_attrs, rustc_attrs)]
+#![feature(rustc_attrs)]
fn func<#[rustc_synthetic] T>(_: T) {}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub const A: &str = "hello";
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+pub const A: &str = "xxxxx";
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// revisions:cfail1 cfail2 cfail3
+// compile-flags: -Z query-dep-graph --test
+// must-compile-successfully
+
+#![feature(rustc_attrs)]
+#![crate_type = "rlib"]
+
+#![rustc_partition_translated(module="issue_49595-tests", cfg="cfail2")]
+#![rustc_partition_translated(module="issue_49595-lit_test", cfg="cfail3")]
+
+mod tests {
+ #[cfg_attr(not(cfail1), ignore)]
+ #[test]
+ fn test() {
+ }
+}
+
+
+// Checks that changing a string literal without changing its span
+// takes effect.
+
+// replacing a module to have a stable span
+#[cfg_attr(not(cfail3), path = "auxiliary/lit_a.rs")]
+#[cfg_attr(cfail3, path = "auxiliary/lit_b.rs")]
+mod lit;
+
+pub mod lit_test {
+ #[test]
+ fn lit_test() {
+ println!("{}", ::lit::A);
+ }
+}
// A scenario with significant destruction code extents (which have
// suffix "dce" in current `-Z identify_regions` rendering).
-#![feature(generic_param_attrs)]
#![feature(dropck_eyepatch)]
fn main() {
// compile-flags: -Z parse-only
-// error-pattern:unmatched visibility `pub`
+// error-pattern:expected one of `(`, `fn`, `static`, `type`, or `}` here
extern {
pub pub fn foo();
}
// compile-flags: -Z parse-only
-extern {
- f(); //~ ERROR expected one of `!` or `::`, found `(`
+extern { //~ ERROR missing `fn`, `type`, or `static` for extern-item declaration
+ f();
}
fn main() {
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// must-compile-successfully
+// failure-status: 1
+
+#![feature(dyn_trait)]
+
+use std::error::Error;
+use std::io;
+
+fn main() -> Result<(), Box<dyn Error>> {
+ Err(Box::new(io::Error::new(io::ErrorKind::Other, "returned Box<dyn Error> from main()")))
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// error-pattern: An error message for you
+// failure-status: 1
+
+fn main() -> Result<(), &'static str> {
+ Err("An error message for you")
+}
--- /dev/null
+-include ../tools.mk
+
+all:
+ $(RUSTC) -C extra-filename=-hash foo.rs
+ $(RUSTC) bar.rs
+ mv $(TMPDIR)/libfoo-hash.rlib $(TMPDIR)/libfoo-another-hash.rlib
+ $(RUSTC) baz.rs
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type = "rlib"]
+
+extern crate foo;
+
+pub fn bar() { foo::foo() }
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type = "rlib"]
+
+extern crate bar;
+
+pub fn baz() { bar::bar() }
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type = "rlib"]
+
+pub fn foo() {}
all:
$(RUSTC) bar.rs
$(RUSTC) foo.rs $(FLAGS)
+ $(RUSTC) foo.rs $(FLAGS) -C panic=abort
endif
all:
- $(RUSTC) an_rlib.rs
- $(RUSTC) a_cdylib.rs
- $(RUSTC) a_rust_dylib.rs
- $(RUSTC) an_executable.rs
- $(RUSTC) a_cdylib.rs --crate-name combined_rlib_dylib --crate-type=rlib,cdylib
+ $(RUSTC) -Zshare-generics=no an_rlib.rs
+ $(RUSTC) -Zshare-generics=no a_cdylib.rs
+ $(RUSTC) -Zshare-generics=no a_rust_dylib.rs
+ $(RUSTC) -Zshare-generics=no an_executable.rs
+ $(RUSTC) -Zshare-generics=no a_cdylib.rs --crate-name combined_rlib_dylib --crate-type=rlib,cdylib
# Check that a cdylib exports its public #[no_mangle] functions
[ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -c public_c_function_from_cdylib)" -eq "1" ]
# Check that a Rust dylib exports its monomorphic functions
[ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_c_function_from_rust_dylib)" -eq "1" ]
[ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c _ZN.*public_rust_function_from_rust_dylib.*E)" -eq "1" ]
+ # Check that a Rust dylib does not export generics if -Zshare-generics=no
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c _ZN.*public_generic_function_from_rust_dylib.*E)" -eq "0" ]
+
# Check that a Rust dylib exports the monomorphic functions from its dependencies
[ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_c_function_from_rlib)" -eq "1" ]
[ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_rust_function_from_rlib)" -eq "1" ]
+ # Check that a Rust dylib does not export generics if -Zshare-generics=no
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c _ZN.*public_generic_function_from_rlib.*E)" -eq "0" ]
# Check that an executable does not export any dynamic symbols
[ "$$($(NM) $(TMPDIR)/$(EXE_NAME) | grep -c public_c_function_from_rlib)" -eq "0" ]
[ "$$($(NM) $(TMPDIR)/$(COMBINED_CDYLIB_NAME) | grep -c public_c_function_from_rlib)" -eq "1" ]
# Check that a cdylib DOES NOT export any public Rust functions
[ "$$($(NM) $(TMPDIR)/$(COMBINED_CDYLIB_NAME) | grep -c _ZN.*h.*E)" -eq "0" ]
+
+
+ $(RUSTC) -Zshare-generics=yes an_rlib.rs
+ $(RUSTC) -Zshare-generics=yes a_cdylib.rs
+ $(RUSTC) -Zshare-generics=yes a_rust_dylib.rs
+ $(RUSTC) -Zshare-generics=yes an_executable.rs
+
+ # Check that a cdylib exports its public #[no_mangle] functions
+ [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -c public_c_function_from_cdylib)" -eq "1" ]
+ # Check that a cdylib exports the public #[no_mangle] functions of dependencies
+ [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -c public_c_function_from_rlib)" -eq "1" ]
+ # Check that a cdylib DOES NOT export any public Rust functions
+ [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -c _ZN.*h.*E)" -eq "0" ]
+
+ # Check that a Rust dylib exports its monomorphic functions, including generics this time
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_c_function_from_rust_dylib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c _ZN.*public_rust_function_from_rust_dylib.*E)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c _ZN.*public_generic_function_from_rust_dylib.*E)" -eq "1" ]
+
+ # Check that a Rust dylib exports the monomorphic functions from its dependencies
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_c_function_from_rlib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_rust_function_from_rlib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c _ZN.*public_generic_function_from_rlib.*E)" -eq "1" ]
+
+ # Check that an executable does not export any dynamic symbols
+ [ "$$($(NM) $(TMPDIR)/$(EXE_NAME) | grep -c public_c_function_from_rlib)" -eq "0" ]
+ [ "$$($(NM) $(TMPDIR)/$(EXE_NAME) | grep -c public_rust_function_from_exe)" -eq "0" ]
endif
// This should be exported
#[no_mangle]
-pub extern "C" fn public_c_function_from_rust_dylib() {}
+pub extern "C" fn public_c_function_from_rust_dylib() {
+ let _ = public_generic_function_from_rust_dylib(1u16);
+}
+
+// This should be exported if -Zshare-generics=yes
+pub fn public_generic_function_from_rust_dylib<T>(x: T) -> T { x }
pub fn public_rust_function_from_rlib() {}
#[no_mangle]
-pub extern "C" fn public_c_function_from_rlib() {}
+pub extern "C" fn public_c_function_from_rlib() {
+ let _ = public_generic_function_from_rlib(0u64);
+}
+
+pub fn public_generic_function_from_rlib<T>(x: T) -> T {
+ x
+}
extern crate proc_macro;
-use proc_macro::{TokenStream, TokenNode, quote};
+use proc_macro::*;
#[proc_macro]
pub fn cond(input: TokenStream) -> TokenStream {
let mut conds = Vec::new();
let mut input = input.into_iter().peekable();
while let Some(tree) = input.next() {
- let cond = match tree.kind {
- TokenNode::Group(_, cond) => cond,
+ let cond = match tree {
+ TokenTree::Group(tt) => tt.stream(),
_ => panic!("Invalid input"),
};
let mut cond_trees = cond.clone().into_iter();
if rhs.is_empty() {
panic!("Invalid macro usage in cond: {}", cond);
}
- let is_else = match test.kind {
- TokenNode::Term(word) => word.as_str() == "else",
+ let is_else = match test {
+ TokenTree::Term(word) => word.as_str() == "else",
_ => false,
};
conds.push(if is_else || input.peek().is_none() {
});
}
- conds.into_iter().collect()
+ conds.into_iter().flat_map(|x| x.into_iter()).collect()
}
}
})
],
- // these are covered in proc_macro/attr-stmt-expr.rs
+ // covered in proc_macro/macros-in-extern.rs
+ Annotatable::ForeignItem(..) => unimplemented!(),
+ // covered in proc_macro/attr-stmt-expr.rs
Annotatable::Stmt(_) | Annotatable::Expr(_) => panic!("expected item"),
}
}
let copy_name = match mi.node {
ast::MetaItemKind::List(ref xs) => {
if let Some(word) = xs[0].word() {
- ast::Ident::with_empty_ctxt(word.name())
+ word.ident
} else {
cx.span_err(mi.span, "Expected word");
return;
new_it.ident = copy_name;
push(Annotatable::TraitItem(P(new_it)));
}
- // these are covered in proc_macro/attr-stmt-expr.rs
+ // covered in proc_macro/macros-in-extern.rs
+ Annotatable::ForeignItem(..) => unimplemented!(),
+ // covered in proc_macro/attr-stmt-expr.rs
Annotatable::Stmt(_) | Annotatable::Expr(_) => panic!("expected item")
}
}
extern crate proc_macro;
-use proc_macro::{TokenStream, quote};
+use proc_macro::*;
#[proc_macro_attribute]
pub fn attr_tru(_attr: TokenStream, item: TokenStream) -> TokenStream {
}
fn make_x() -> P<Expr> {
- let seg = PathSegment {
- identifier: Ident::from_str("x"),
- span: DUMMY_SP,
- parameters: None,
- };
- let path = Path {
- span: DUMMY_SP,
- segments: vec![seg],
- };
+ let seg = PathSegment::from_ident(Ident::from_str("x"));
+ let path = Path { segments: vec![seg], span: DUMMY_SP };
expr(ExprKind::Path(None, path))
}
0 => iter_exprs(depth - 1, &mut |e| g(ExprKind::Box(e))),
1 => iter_exprs(depth - 1, &mut |e| g(ExprKind::Call(e, vec![]))),
2 => {
- let seg = PathSegment {
- identifier: Ident::from_str("x"),
- span: DUMMY_SP,
- parameters: None,
- };
-
+ let seg = PathSegment::from_ident(Ident::from_str("x"));
iter_exprs(depth - 1, &mut |e| g(ExprKind::MethodCall(
seg.clone(), vec![e, make_x()])));
iter_exprs(depth - 1, &mut |e| g(ExprKind::MethodCall(
iter_exprs(depth - 1, &mut |e| g(ExprKind::Assign(make_x(), e)));
},
10 => {
- let ident = Spanned { span: DUMMY_SP, node: Ident::from_str("f") };
- iter_exprs(depth - 1, &mut |e| g(ExprKind::Field(e, ident)));
+ iter_exprs(depth - 1, &mut |e| g(ExprKind::Field(e, Ident::from_str("f"))));
},
11 => {
iter_exprs(depth - 1, &mut |e| g(ExprKind::Range(
iter_exprs(depth - 1, &mut |e| g(ExprKind::Ret(Some(e))));
},
14 => {
- let seg = PathSegment {
- identifier: Ident::from_str("S"),
- span: DUMMY_SP,
- parameters: None,
- };
- let path = Path {
- span: DUMMY_SP,
- segments: vec![seg],
- };
+ let path = Path::from_ident(Ident::from_str("S"));
g(ExprKind::Struct(path, vec![], Some(make_x())));
},
15 => {
extern crate proc_macro;
-use proc_macro::{TokenStream, TokenNode, Spacing, Literal, quote};
+use proc_macro::{TokenStream, TokenTree, Spacing, Literal, quote};
#[proc_macro]
pub fn count_compound_ops(input: TokenStream) -> TokenStream {
assert_eq!(count_compound_ops_helper(quote!(++ (&&) 4@a)), 3);
- TokenNode::Literal(Literal::u32(count_compound_ops_helper(input))).into()
+ let l = Literal::u32_suffixed(count_compound_ops_helper(input));
+ TokenTree::from(l).into()
}
fn count_compound_ops_helper(input: TokenStream) -> u32 {
let mut count = 0;
for token in input {
- match token.kind {
- TokenNode::Op(c, Spacing::Alone) => count += 1,
- TokenNode::Group(_, tokens) => count += count_compound_ops_helper(tokens),
+ match &token {
+ TokenTree::Op(tt) if tt.spacing() == Spacing::Alone => {
+ count += 1;
+ }
+ TokenTree::Group(tt) => {
+ count += count_compound_ops_helper(tt.stream());
+ }
_ => {}
}
}
#[proc_macro]
pub fn neg_one(_input: TokenStream) -> TokenStream {
- TokenTree {
- span: Span::call_site(),
- kind: TokenNode::Literal(Literal::i32(-1)),
- }.into()
+ TokenTree::Literal(Literal::i32_suffixed(-1)).into()
}
#[proc_macro]
pub fn neg_one_float(_input: TokenStream) -> TokenStream {
- TokenTree {
- span: Span::call_site(),
- kind: TokenNode::Literal(Literal::f32(-1.0)),
- }.into()
+ TokenTree::Literal(Literal::f32_suffixed(-1.0)).into()
}
#[proc_macro]
pub fn assert_fake_source_file(input: TokenStream) -> TokenStream {
for tk in input {
- let source_file = tk.span.source_file();
+ let source_file = tk.span().source_file();
assert!(!source_file.is_real(), "Source file is real: {:?}", source_file);
}
#[proc_macro]
pub fn assert_source_file(input: TokenStream) -> TokenStream {
for tk in input {
- let source_file = tk.span.source_file();
+ let source_file = tk.span().source_file();
assert!(source_file.is_real(), "Source file is not real: {:?}", source_file);
}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// no-prefer-dynamic
+
+#![crate_type = "proc-macro"]
+#![feature(proc_macro)]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+#[proc_macro_attribute]
+pub fn nop_attr(_attr: TokenStream, input: TokenStream) -> TokenStream {
+ assert!(_attr.to_string().is_empty());
+ input
+}
+
+#[proc_macro_attribute]
+pub fn no_output(_attr: TokenStream, _input: TokenStream) -> TokenStream {
+ assert!(_attr.to_string().is_empty());
+ assert!(!_input.to_string().is_empty());
+ "".parse().unwrap()
+}
+
+#[proc_macro]
+pub fn emit_input(input: TokenStream) -> TokenStream {
+ input
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// aux-build:test-macros.rs
+// ignore-stage1
+// ignore-wasm32
+
+#![feature(proc_macro, macros_in_extern)]
+
+extern crate test_macros;
+
+use test_macros::{nop_attr, no_output, emit_input};
+
+fn main() {
+ assert_eq!(unsafe { rust_get_test_int() }, 1isize);
+ assert_eq!(unsafe { rust_dbg_extern_identity_u32(0xDEADBEEF) }, 0xDEADBEEF);
+}
+
+#[link(name = "rust_test_helpers", kind = "static")]
+extern {
+ #[no_output]
+ fn some_definitely_unknown_symbol_which_should_be_removed();
+
+ #[nop_attr]
+ fn rust_get_test_int() -> isize;
+
+ emit_input!(fn rust_dbg_extern_identity_u32(arg: u32) -> u32;);
+}
// using `rustc_attrs` feature. There is a separate compile-fail/ test
// ensuring that the attribute feature-gating works in this context.)
-#![feature(generic_param_attrs, rustc_attrs)]
+#![feature(rustc_attrs)]
#![allow(dead_code)]
struct StLt<#[rustc_lt_struct] 'a>(&'a u32);
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(generic_param_attrs)]
#![feature(dropck_eyepatch)]
// The point of this test is to illustrate that the `#[may_dangle]`
// Not even for well-formedness.
struct WellFormedProjection<A, T=<A as Iterator>::Item>(A, T);
+// Issue #49344, predicates with lifetimes should not be checked.
+trait Scope<'a> {}
+struct Request<'a, S: Scope<'a> = i32>(S, &'a ());
+
fn main() {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(generic_param_attrs)]
#![feature(dropck_eyepatch)]
// The point of this test is to test uses of `#[may_dangle]` attribute
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(generic_param_attrs)]
#![feature(dropck_eyepatch)]
// The point of this test is to illustrate that the `#[may_dangle]`
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test that we are able to reinitilize box with moved referent
+#![feature(nll)]
+static mut ORDER: [usize; 3] = [0, 0, 0];
+static mut INDEX: usize = 0;
+
+struct Dropee (usize);
+
+impl Drop for Dropee {
+ fn drop(&mut self) {
+ unsafe {
+ ORDER[INDEX] = self.0;
+ INDEX = INDEX + 1;
+ }
+ }
+}
+
+fn add_sentintel() {
+ unsafe {
+ ORDER[INDEX] = 2;
+ INDEX = INDEX + 1;
+ }
+}
+
+fn main() {
+ let mut x = Box::new(Dropee(1));
+ *x; // move out from `*x`
+ add_sentintel();
+ *x = Dropee(3); // re-initialize `*x`
+ {x}; // drop value
+ unsafe {
+ assert_eq!(ORDER, [1, 2, 3]);
+ }
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// ignore-wasm32
+
+#![feature(decl_macro, macros_in_extern)]
+
+macro_rules! returns_isize(
+ ($ident:ident) => (
+ fn $ident() -> isize;
+ )
+);
+
+macro takes_u32_returns_u32($ident:ident) {
+ fn $ident (arg: u32) -> u32;
+}
+
+macro_rules! emits_nothing(
+ () => ()
+);
+
+fn main() {
+ assert_eq!(unsafe { rust_get_test_int() }, 1isize);
+ assert_eq!(unsafe { rust_dbg_extern_identity_u32(0xDEADBEEF) }, 0xDEADBEEFu32);
+}
+
+#[link(name = "rust_test_helpers", kind = "static")]
+extern {
+ returns_isize!(rust_get_test_int);
+ takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
+ emits_nothing!();
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(dyn_trait)]
+
+use std::error::Error;
+
+fn main() -> Result<(), Box<dyn Error>> {
+ Ok(())
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() -> Result<(), &'static str> {
+ Ok(())
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_name = "foo"]
+
+// @has foo/all.html '//a[@href="struct.Struct.html"]' 'Struct'
+// @has foo/all.html '//a[@href="enum.Enum.html"]' 'Enum'
+// @has foo/all.html '//a[@href="union.Union.html"]' 'Union'
+// @has foo/all.html '//a[@href="constant.CONST.html"]' 'CONST'
+// @has foo/all.html '//a[@href="static.STATIC.html"]' 'STATIC'
+// @has foo/all.html '//a[@href="fn.function.html"]' 'function'
+
+pub struct Struct;
+pub enum Enum {
+ X,
+ Y,
+}
+pub union Union {
+ x: u32,
+}
+pub const CONST: u32 = 0;
+pub static STATIC: &str = "baguette";
+pub fn function() {}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_name = "foo"]
+
+pub struct Bar;
+
+// @!has foo/struct.Bar.html '//*[@id="implementations"]'
extern crate proc_macro;
-use proc_macro::{TokenStream, TokenTree, TokenNode, Span};
+use proc_macro::{TokenStream, TokenTree, Span};
fn lit_span(tt: TokenTree) -> (Span, String) {
- use TokenNode::*;
- match tt.kind {
- Literal(..) | Group(..) => (tt.span, tt.to_string().trim().into()),
+ match tt {
+ TokenTree::Literal(..) |
+ TokenTree::Group(..) => (tt.span(), tt.to_string().trim().into()),
_ => panic!("expected a literal in token tree, got: {:?}", tt)
}
}
extern crate proc_macro;
-use proc_macro::{TokenStream, TokenNode, Span, Diagnostic};
+use proc_macro::{TokenStream, TokenTree, Span, Diagnostic};
fn parse(input: TokenStream) -> Result<(), Diagnostic> {
let mut count = 0;
let mut last_span = Span::def_site();
for tree in input {
- let span = tree.span;
+ let span = tree.span();
if count >= 3 {
return Err(span.error(format!("expected EOF, found `{}`.", tree))
.span_note(last_span, "last good input was here")
.help("input must be: `===`"))
}
- if let TokenNode::Op('=', _) = tree.kind {
- count += 1;
- } else {
- return Err(span.error(format!("expected `=`, found `{}`.", tree)));
+ if let TokenTree::Op(tt) = tree {
+ if tt.op() == '=' {
+ count += 1;
+ last_span = span;
+ continue
+ }
}
-
- last_span = span;
+ return Err(span.error(format!("expected `=`, found `{}`.", tree)));
}
if count < 3 {
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(nll)]
+
+struct Foo {
+}
+
+impl Foo {
+ fn method(&mut self, foo: &mut Foo) {
+ }
+}
+
+fn main() {
+ let mut foo = Foo { };
+ foo.method(&mut foo);
+ //~^ cannot borrow `foo` as mutable more than once at a time
+ //~^^ cannot borrow `foo` as mutable more than once at a time
+}
--- /dev/null
+error[E0499]: cannot borrow `foo` as mutable more than once at a time
+ --> $DIR/two-phase-multi-mut.rs:23:16
+ |
+LL | foo.method(&mut foo);
+ | -----------^^^^^^^^-
+ | | |
+ | | second mutable borrow occurs here
+ | first mutable borrow occurs here
+ | borrow later used here
+
+error[E0499]: cannot borrow `foo` as mutable more than once at a time
+ --> $DIR/two-phase-multi-mut.rs:23:5
+ |
+LL | foo.method(&mut foo);
+ | ^^^^^^^^^^^--------^
+ | | |
+ | | first mutable borrow occurs here
+ | second mutable borrow occurs here
+ | borrow later used here
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0499`.
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// revisions: lxl nll
+//[lxl]compile-flags: -Z borrowck=mir -Z two-phase-borrows
+//[nll]compile-flags: -Z borrowck=mir -Z two-phase-borrows -Z nll
+
+// run-pass
+
+use std::io::Result;
+
+struct Foo {}
+
+pub trait FakeRead {
+ fn read_to_end(&mut self, buf: &mut Vec<u8>) -> Result<usize>;
+}
+
+impl FakeRead for Foo {
+ fn read_to_end(&mut self, buf: &mut Vec<u8>) -> Result<usize> {
+ Ok(4)
+ }
+}
+
+fn main() {
+ let mut a = Foo {};
+ let mut v = Vec::new();
+ a.read_to_end(&mut v);
+}
#![feature(rustc_attrs)]
#[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<S, T, U>) :-
+ //~| ERROR FromEnv
+ //~| ERROR FromEnv
+ //~| ERROR FromEnv
trait Foo<S, T, U> {
fn s(S) -> S;
fn t(T) -> T;
LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<S, T, U>) :-
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: aborting due to previous error
+error: FromEnv(S: std::marker::Sized) :- FromEnv(Self: Foo<S, T, U>).
+ --> $DIR/lower_trait.rs:13:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<S, T, U>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: FromEnv(T: std::marker::Sized) :- FromEnv(Self: Foo<S, T, U>).
+ --> $DIR/lower_trait.rs:13:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<S, T, U>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: FromEnv(U: std::marker::Sized) :- FromEnv(Self: Foo<S, T, U>).
+ --> $DIR/lower_trait.rs:13:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<S, T, U>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 4 previous errors
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(rustc_attrs)]
+
+#[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<F>) :-
+ //~| ERROR FromEnv
+ //~| ERROR FromEnv
+ //~| ERROR FromEnv
+trait Foo<F> where for<'a> F: Fn(&'a (u8, u16)) -> &'a u8
+{
+ fn s(F) -> F;
+}
+
+fn main() {
+ println!("hello");
+}
--- /dev/null
+error: Implemented(Self: Foo<F>) :- FromEnv(Self: Foo<F>).
+ --> $DIR/lower_trait_higher_rank.rs:13:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<F>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: FromEnv(F: std::marker::Sized) :- FromEnv(Self: Foo<F>).
+ --> $DIR/lower_trait_higher_rank.rs:13:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<F>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: FromEnv(F: std::ops::Fn<(&'a (u8, u16),)>) :- FromEnv(Self: Foo<F>).
+ --> $DIR/lower_trait_higher_rank.rs:13:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<F>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: FromEnv(<F as std::ops::FnOnce<(&'a (u8, u16),)>>::Output == &'a u8) :- FromEnv(Self: Foo<F>).
+ --> $DIR/lower_trait_higher_rank.rs:13:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<F>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 4 previous errors
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(rustc_attrs)]
+
+use std::fmt::{Debug, Display};
+use std::borrow::Borrow;
+
+#[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<'a, 'b, S, T, U>) :-
+ //~| ERROR FromEnv
+ //~| ERROR FromEnv
+ //~| ERROR FromEnv
+ //~| ERROR FromEnv
+ //~| ERROR RegionOutlives
+ //~| ERROR TypeOutlives
+trait Foo<'a, 'b, S, T, U> where S: Debug, T: Borrow<U>, U: ?Sized, 'a: 'b, U: 'b {
+ fn s(S) -> S;
+ fn t(T) -> T;
+ fn u(U) -> U;
+}
+
+fn main() {
+ println!("hello");
+}
--- /dev/null
+error: Implemented(Self: Foo<'a, 'b, S, T, U>) :- FromEnv(Self: Foo<'a, 'b, S, T, U>).
+ --> $DIR/lower_trait_where_clause.rs:16:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<'a, 'b, S, T, U>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: FromEnv(S: std::marker::Sized) :- FromEnv(Self: Foo<'a, 'b, S, T, U>).
+ --> $DIR/lower_trait_where_clause.rs:16:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<'a, 'b, S, T, U>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: FromEnv(T: std::marker::Sized) :- FromEnv(Self: Foo<'a, 'b, S, T, U>).
+ --> $DIR/lower_trait_where_clause.rs:16:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<'a, 'b, S, T, U>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: FromEnv(S: std::fmt::Debug) :- FromEnv(Self: Foo<'a, 'b, S, T, U>).
+ --> $DIR/lower_trait_where_clause.rs:16:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<'a, 'b, S, T, U>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: FromEnv(T: std::borrow::Borrow<U>) :- FromEnv(Self: Foo<'a, 'b, S, T, U>).
+ --> $DIR/lower_trait_where_clause.rs:16:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<'a, 'b, S, T, U>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: RegionOutlives('a : 'b) :- FromEnv(Self: Foo<'a, 'b, S, T, U>).
+ --> $DIR/lower_trait_where_clause.rs:16:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<'a, 'b, S, T, U>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: TypeOutlives(U : 'b) :- FromEnv(Self: Foo<'a, 'b, S, T, U>).
+ --> $DIR/lower_trait_where_clause.rs:16:1
+ |
+LL | #[rustc_dump_program_clauses] //~ ERROR Implemented(Self: Foo<'a, 'b, S, T, U>) :-
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 7 previous errors
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn gratitude() {
+ let for_you = false;
+ if not for_you {
+ //~^ ERROR unexpected `for_you` after identifier
+ println!("I couldn't");
+ }
+}
+
+fn qualification() {
+ let the_worst = true;
+ while not the_worst {
+ //~^ ERROR unexpected `the_worst` after identifier
+ println!("still pretty bad");
+ }
+}
+
+fn should_we() {
+ let not = true;
+ if not // lack of braces is [sic]
+ println!("Then when?");
+ //~^ ERROR expected `{`, found `;
+ //~| ERROR unexpected `println` after identifier
+}
+
+fn sleepy() {
+ let resource = not 2;
+ //~^ ERROR unexpected `2` after identifier
+}
+
+fn main() {
+ let be_smothered_out_before = true;
+ let young_souls = not be_smothered_out_before;
+ //~^ ERROR unexpected `be_smothered_out_before` after identifier
+}
--- /dev/null
+error: unexpected `for_you` after identifier
+ --> $DIR/issue-46836-identifier-not-instead-of-negation.rs:13:12
+ |
+LL | if not for_you {
+ | ----^^^^^^^
+ | |
+ | help: use `!` to perform logical negation
+
+error: unexpected `the_worst` after identifier
+ --> $DIR/issue-46836-identifier-not-instead-of-negation.rs:21:15
+ |
+LL | while not the_worst {
+ | ----^^^^^^^^^
+ | |
+ | help: use `!` to perform logical negation
+
+error: unexpected `println` after identifier
+ --> $DIR/issue-46836-identifier-not-instead-of-negation.rs:30:9
+ |
+LL | if not // lack of braces is [sic]
+ | ----- help: use `!` to perform logical negation
+LL | println!("Then when?");
+ | ^^^^^^^
+
+error: expected `{`, found `;`
+ --> $DIR/issue-46836-identifier-not-instead-of-negation.rs:30:31
+ |
+LL | if not // lack of braces is [sic]
+ | -- this `if` statement has a condition, but no block
+LL | println!("Then when?");
+ | ^
+
+error: unexpected `2` after identifier
+ --> $DIR/issue-46836-identifier-not-instead-of-negation.rs:36:24
+ |
+LL | let resource = not 2;
+ | ----^
+ | |
+ | help: use `!` to perform logical negation
+
+error: unexpected `be_smothered_out_before` after identifier
+ --> $DIR/issue-46836-identifier-not-instead-of-negation.rs:42:27
+ |
+LL | let young_souls = not be_smothered_out_before;
+ | ----^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | help: use `!` to perform logical negation
+
+error: aborting due to 6 previous errors
+
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(generic_param_attrs)]
#![feature(dropck_eyepatch)]
// This is a support file for ../dropck-eyepatch-extern-crate.rs
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(generic_param_attrs)]
#![feature(dropck_eyepatch)]
// This test ensures that a use of `#[may_dangle]` is rejected if
error[E0569]: requires an `unsafe impl` declaration due to `#[may_dangle]` attribute
- --> $DIR/dropck-eyepatch-implies-unsafe-impl.rs:32:1
+ --> $DIR/dropck-eyepatch-implies-unsafe-impl.rs:31:1
|
LL | / impl<#[may_dangle] A, B: fmt::Debug> Drop for Pt<A, B> {
LL | | //~^ ERROR requires an `unsafe impl` declaration due to `#[may_dangle]` attribute
| |_^
error[E0569]: requires an `unsafe impl` declaration due to `#[may_dangle]` attribute
- --> $DIR/dropck-eyepatch-implies-unsafe-impl.rs:38:1
+ --> $DIR/dropck-eyepatch-implies-unsafe-impl.rs:37:1
|
LL | / impl<#[may_dangle] 'a, 'b, B: fmt::Debug> Drop for Pr<'a, 'b, B> {
LL | | //~^ ERROR requires an `unsafe impl` declaration due to `#[may_dangle]` attribute
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(generic_param_attrs)]
#![feature(dropck_eyepatch)]
// The point of this test is to test uses of `#[may_dangle]` attribute
error[E0597]: `c` does not live long enough
- --> $DIR/dropck-eyepatch-reorder.rs:57:20
+ --> $DIR/dropck-eyepatch-reorder.rs:56:20
|
LL | dt = Dt("dt", &c);
| ^ borrowed value does not live long enough
= note: values in a scope are dropped in the opposite order they are created
error[E0597]: `c` does not live long enough
- --> $DIR/dropck-eyepatch-reorder.rs:59:20
+ --> $DIR/dropck-eyepatch-reorder.rs:58:20
|
LL | dr = Dr("dr", &c);
| ^ borrowed value does not live long enough
= note: values in a scope are dropped in the opposite order they are created
error[E0597]: `c` does not live long enough
- --> $DIR/dropck-eyepatch-reorder.rs:67:29
+ --> $DIR/dropck-eyepatch-reorder.rs:66:29
|
LL | pt = Pt("pt", &c_long, &c);
| ^ borrowed value does not live long enough
= note: values in a scope are dropped in the opposite order they are created
error[E0597]: `c` does not live long enough
- --> $DIR/dropck-eyepatch-reorder.rs:69:29
+ --> $DIR/dropck-eyepatch-reorder.rs:68:29
|
LL | pr = Pr("pr", &c_long, &c);
| ^ borrowed value does not live long enough
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(generic_param_attrs)]
#![feature(dropck_eyepatch)]
// The point of this test is to illustrate that the `#[may_dangle]`
error[E0597]: `c` does not live long enough
- --> $DIR/dropck-eyepatch.rs:80:20
+ --> $DIR/dropck-eyepatch.rs:79:20
|
LL | dt = Dt("dt", &c);
| ^ borrowed value does not live long enough
= note: values in a scope are dropped in the opposite order they are created
error[E0597]: `c` does not live long enough
- --> $DIR/dropck-eyepatch.rs:82:20
+ --> $DIR/dropck-eyepatch.rs:81:20
|
LL | dr = Dr("dr", &c);
| ^ borrowed value does not live long enough
= note: values in a scope are dropped in the opposite order they are created
error[E0597]: `c` does not live long enough
- --> $DIR/dropck-eyepatch.rs:90:29
+ --> $DIR/dropck-eyepatch.rs:89:29
|
LL | pt = Pt("pt", &c_long, &c);
| ^ borrowed value does not live long enough
= note: values in a scope are dropped in the opposite order they are created
error[E0597]: `c` does not live long enough
- --> $DIR/dropck-eyepatch.rs:92:29
+ --> $DIR/dropck-eyepatch.rs:91:29
|
LL | pr = Pr("pr", &c_long, &c);
| ^ borrowed value does not live long enough
// This test ensures that attributes on formals in generic parameter
// lists are included when we are checking for unstable attributes.
-//
-// Note that feature(generic_param_attrs) *is* enabled here. We are
-// checking feature-gating of the attributes themselves, not the
-// capability to parse such attributes in that context.
// gate-test-custom_attribute
-#![feature(generic_param_attrs)]
-#![allow(dead_code)]
-
struct StLt<#[lt_struct] 'a>(&'a u32);
//~^ ERROR The attribute `lt_struct` is currently unknown to the compiler
struct StTy<#[ty_struct] I>(I);
error[E0658]: The attribute `lt_struct` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:23:13
+ --> $DIR/feature-gate-custom_attribute2.rs:16:13
|
LL | struct StLt<#[lt_struct] 'a>(&'a u32);
| ^^^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `ty_struct` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:25:13
+ --> $DIR/feature-gate-custom_attribute2.rs:18:13
|
LL | struct StTy<#[ty_struct] I>(I);
| ^^^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `lt_enum` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:28:11
+ --> $DIR/feature-gate-custom_attribute2.rs:21:11
|
LL | enum EnLt<#[lt_enum] 'b> { A(&'b u32), B }
| ^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `ty_enum` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:30:11
+ --> $DIR/feature-gate-custom_attribute2.rs:23:11
|
LL | enum EnTy<#[ty_enum] J> { A(J), B }
| ^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `lt_trait` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:33:12
+ --> $DIR/feature-gate-custom_attribute2.rs:26:12
|
LL | trait TrLt<#[lt_trait] 'c> { fn foo(&self, _: &'c [u32]) -> &'c u32; }
| ^^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `ty_trait` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:35:12
+ --> $DIR/feature-gate-custom_attribute2.rs:28:12
|
LL | trait TrTy<#[ty_trait] K> { fn foo(&self, _: K); }
| ^^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `lt_type` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:38:11
+ --> $DIR/feature-gate-custom_attribute2.rs:31:11
|
LL | type TyLt<#[lt_type] 'd> = &'d u32;
| ^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `ty_type` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:40:11
+ --> $DIR/feature-gate-custom_attribute2.rs:33:11
|
LL | type TyTy<#[ty_type] L> = (L, );
| ^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `lt_inherent` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:43:6
+ --> $DIR/feature-gate-custom_attribute2.rs:36:6
|
LL | impl<#[lt_inherent] 'e> StLt<'e> { }
| ^^^^^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `ty_inherent` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:45:6
+ --> $DIR/feature-gate-custom_attribute2.rs:38:6
|
LL | impl<#[ty_inherent] M> StTy<M> { }
| ^^^^^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `lt_impl_for` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:48:6
+ --> $DIR/feature-gate-custom_attribute2.rs:41:6
|
LL | impl<#[lt_impl_for] 'f> TrLt<'f> for StLt<'f> {
| ^^^^^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `ty_impl_for` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:52:6
+ --> $DIR/feature-gate-custom_attribute2.rs:45:6
|
LL | impl<#[ty_impl_for] N> TrTy<N> for StTy<N> {
| ^^^^^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `lt_fn` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:57:9
+ --> $DIR/feature-gate-custom_attribute2.rs:50:9
|
LL | fn f_lt<#[lt_fn] 'g>(_: &'g [u32]) -> &'g u32 { loop { } }
| ^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `ty_fn` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:59:9
+ --> $DIR/feature-gate-custom_attribute2.rs:52:9
|
LL | fn f_ty<#[ty_fn] O>(_: O) { }
| ^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `lt_meth` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:63:13
+ --> $DIR/feature-gate-custom_attribute2.rs:56:13
|
LL | fn m_lt<#[lt_meth] 'h>(_: &'h [u32]) -> &'h u32 { loop { } }
| ^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `ty_meth` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:65:13
+ --> $DIR/feature-gate-custom_attribute2.rs:58:13
|
LL | fn m_ty<#[ty_meth] P>(_: P) { }
| ^^^^^^^^^^
= help: add #![feature(custom_attribute)] to the crate attributes to enable
error[E0658]: The attribute `lt_hof` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642)
- --> $DIR/feature-gate-custom_attribute2.rs:70:19
+ --> $DIR/feature-gate-custom_attribute2.rs:63:19
|
LL | where Q: for <#[lt_hof] 'i> Fn(&'i [u32]) -> &'i u32
| ^^^^^^^^^
+++ /dev/null
-// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// This test ensures that attributes on formals in generic parameter
-// lists are rejected if feature(generic_param_attrs) is not enabled.
-//
-// (We are prefixing all tested features with `rustc_`, to ensure that
-// the attributes themselves won't be rejected by the compiler when
-// using `rustc_attrs` feature. There is a separate compile-fail/ test
-// ensuring that the attribute feature-gating works in this context.)
-
-#![feature(rustc_attrs)]
-#![allow(dead_code)]
-
-struct StLt<#[rustc_lt_struct] 'a>(&'a u32);
-//~^ ERROR attributes on lifetime bindings are experimental (see issue #34761)
-struct StTy<#[rustc_ty_struct] I>(I);
-//~^ ERROR attributes on type parameter bindings are experimental (see issue #34761)
-
-enum EnLt<#[rustc_lt_enum] 'b> { A(&'b u32), B }
-//~^ ERROR attributes on lifetime bindings are experimental (see issue #34761)
-enum EnTy<#[rustc_ty_enum] J> { A(J), B }
-//~^ ERROR attributes on type parameter bindings are experimental (see issue #34761)
-
-trait TrLt<#[rustc_lt_trait] 'c> { fn foo(&self, _: &'c [u32]) -> &'c u32; }
-//~^ ERROR attributes on lifetime bindings are experimental (see issue #34761)
-trait TrTy<#[rustc_ty_trait] K> { fn foo(&self, _: K); }
-//~^ ERROR attributes on type parameter bindings are experimental (see issue #34761)
-
-type TyLt<#[rustc_lt_type] 'd> = &'d u32;
-//~^ ERROR attributes on lifetime bindings are experimental (see issue #34761)
-type TyTy<#[rustc_ty_type] L> = (L, );
-//~^ ERROR attributes on type parameter bindings are experimental (see issue #34761)
-
-impl<#[rustc_lt_inherent] 'e> StLt<'e> { }
-//~^ ERROR attributes on lifetime bindings are experimental (see issue #34761)
-impl<#[rustc_ty_inherent] M> StTy<M> { }
-//~^ ERROR attributes on type parameter bindings are experimental (see issue #34761)
-
-impl<#[rustc_lt_impl_for] 'f> TrLt<'f> for StLt<'f> {
- //~^ ERROR attributes on lifetime bindings are experimental (see issue #34761)
- fn foo(&self, _: &'f [u32]) -> &'f u32 { loop { } }
-}
-impl<#[rustc_ty_impl_for] N> TrTy<N> for StTy<N> {
- //~^ ERROR attributes on type parameter bindings are experimental (see issue #34761)
- fn foo(&self, _: N) { }
-}
-
-fn f_lt<#[rustc_lt_fn] 'g>(_: &'g [u32]) -> &'g u32 { loop { } }
-//~^ ERROR attributes on lifetime bindings are experimental (see issue #34761)
-fn f_ty<#[rustc_ty_fn] O>(_: O) { }
-//~^ ERROR attributes on type parameter bindings are experimental (see issue #34761)
-
-impl<I> StTy<I> {
- fn m_lt<#[rustc_lt_meth] 'h>(_: &'h [u32]) -> &'h u32 { loop { } }
- //~^ ERROR attributes on lifetime bindings are experimental (see issue #34761)
- fn m_ty<#[rustc_ty_meth] P>(_: P) { }
- //~^ ERROR attributes on type parameter bindings are experimental (see issue #34761)
-}
-
-fn hof_lt<Q>(_: Q)
- where Q: for <#[rustc_lt_hof] 'i> Fn(&'i [u32]) -> &'i u32
- //~^ ERROR attributes on lifetime bindings are experimental (see issue #34761)
-{
-}
-
-fn main() {
-
-}
+++ /dev/null
-error[E0658]: attributes on lifetime bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:22:13
- |
-LL | struct StLt<#[rustc_lt_struct] 'a>(&'a u32);
- | ^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on type parameter bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:24:13
- |
-LL | struct StTy<#[rustc_ty_struct] I>(I);
- | ^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on lifetime bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:27:11
- |
-LL | enum EnLt<#[rustc_lt_enum] 'b> { A(&'b u32), B }
- | ^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on type parameter bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:29:11
- |
-LL | enum EnTy<#[rustc_ty_enum] J> { A(J), B }
- | ^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on lifetime bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:32:12
- |
-LL | trait TrLt<#[rustc_lt_trait] 'c> { fn foo(&self, _: &'c [u32]) -> &'c u32; }
- | ^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on type parameter bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:34:12
- |
-LL | trait TrTy<#[rustc_ty_trait] K> { fn foo(&self, _: K); }
- | ^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on lifetime bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:37:11
- |
-LL | type TyLt<#[rustc_lt_type] 'd> = &'d u32;
- | ^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on type parameter bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:39:11
- |
-LL | type TyTy<#[rustc_ty_type] L> = (L, );
- | ^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on lifetime bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:42:6
- |
-LL | impl<#[rustc_lt_inherent] 'e> StLt<'e> { }
- | ^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on type parameter bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:44:6
- |
-LL | impl<#[rustc_ty_inherent] M> StTy<M> { }
- | ^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on lifetime bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:47:6
- |
-LL | impl<#[rustc_lt_impl_for] 'f> TrLt<'f> for StLt<'f> {
- | ^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on type parameter bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:51:6
- |
-LL | impl<#[rustc_ty_impl_for] N> TrTy<N> for StTy<N> {
- | ^^^^^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on lifetime bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:56:9
- |
-LL | fn f_lt<#[rustc_lt_fn] 'g>(_: &'g [u32]) -> &'g u32 { loop { } }
- | ^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on type parameter bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:58:9
- |
-LL | fn f_ty<#[rustc_ty_fn] O>(_: O) { }
- | ^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on lifetime bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:62:13
- |
-LL | fn m_lt<#[rustc_lt_meth] 'h>(_: &'h [u32]) -> &'h u32 { loop { } }
- | ^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on type parameter bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:64:13
- |
-LL | fn m_ty<#[rustc_ty_meth] P>(_: P) { }
- | ^^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error[E0658]: attributes on lifetime bindings are experimental (see issue #34761)
- --> $DIR/feature-gate-generic_param_attrs.rs:69:19
- |
-LL | where Q: for <#[rustc_lt_hof] 'i> Fn(&'i [u32]) -> &'i u32
- | ^^^^^^^^^^^^^^^
- |
- = help: add #![feature(generic_param_attrs)] to the crate attributes to enable
-
-error: aborting due to 17 previous errors
-
-For more information about this error, try `rustc --explain E0658`.
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(decl_macro)]
+
+macro_rules! returns_isize(
+ ($ident:ident) => (
+ fn $ident() -> isize;
+ )
+);
+
+macro takes_u32_returns_u32($ident:ident) {
+ fn $ident (arg: u32) -> u32;
+}
+
+macro_rules! emits_nothing(
+ () => ()
+);
+
+#[link(name = "rust_test_helpers", kind = "static")]
+extern {
+ returns_isize!(rust_get_test_int);
+ //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
+ //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ emits_nothing!();
+ //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+}
--- /dev/null
+error[E0658]: Macro invocations in `extern {}` blocks are experimental. (see issue #49476)
+ --> $DIR/feature-gate-macros_in_extern.rs:29:5
+ |
+LL | returns_isize!(rust_get_test_int);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: add #![feature(macros_in_extern)] to the crate attributes to enable
+
+error[E0658]: Macro invocations in `extern {}` blocks are experimental. (see issue #49476)
+ --> $DIR/feature-gate-macros_in_extern.rs:31:5
+ |
+LL | takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: add #![feature(macros_in_extern)] to the crate attributes to enable
+
+error[E0658]: Macro invocations in `extern {}` blocks are experimental. (see issue #49476)
+ --> $DIR/feature-gate-macros_in_extern.rs:33:5
+ |
+LL | emits_nothing!();
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = help: add #![feature(macros_in_extern)] to the crate attributes to enable
+
+error: aborting due to 3 previous errors
+
+For more information about this error, try `rustc --explain E0658`.
// Check that `may_dangle` is rejected if `dropck_eyepatch` feature gate is absent.
-#![feature(generic_param_attrs)]
-
struct Pt<A>(A);
impl<#[may_dangle] A> Drop for Pt<A> {
//~^ ERROR may_dangle has unstable semantics and may be removed in the future
error[E0658]: may_dangle has unstable semantics and may be removed in the future (see issue #34761)
- --> $DIR/feature-gate-may-dangle.rs:18:6
+ --> $DIR/feature-gate-may-dangle.rs:16:6
|
LL | impl<#[may_dangle] A> Drop for Pt<A> {
| ^^^^^^^^^^^^^
error[E0658]: non-ascii idents are not fully supported. (see issue #28979)
- --> $DIR/feature-gate-non_ascii_idents.rs:11:1
+ --> $DIR/feature-gate-non_ascii_idents.rs:11:22
|
LL | extern crate core as bäz; //~ ERROR non-ascii idents
- | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ | ^^^
|
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
--> $DIR/feature-gate-non_ascii_idents.rs:13:5
|
LL | use föö::bar; //~ ERROR non-ascii idents
- | ^^^^^^^^
+ | ^^^
|
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
error[E0658]: non-ascii idents are not fully supported. (see issue #28979)
- --> $DIR/feature-gate-non_ascii_idents.rs:15:1
+ --> $DIR/feature-gate-non_ascii_idents.rs:15:5
|
LL | mod föö { //~ ERROR non-ascii idents
- | ^^^^^^^
+ | ^^^
|
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
error[E0658]: non-ascii idents are not fully supported. (see issue #28979)
- --> $DIR/feature-gate-non_ascii_idents.rs:19:1
+ --> $DIR/feature-gate-non_ascii_idents.rs:19:4
|
-LL | / fn bär( //~ ERROR non-ascii idents
-LL | | bäz: isize //~ ERROR non-ascii idents
-LL | | ) {
-LL | | let _ö: isize; //~ ERROR non-ascii idents
-... |
-LL | | }
-LL | | }
- | |_^
+LL | fn bär( //~ ERROR non-ascii idents
+ | ^^^
|
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
error[E0658]: non-ascii idents are not fully supported. (see issue #28979)
- --> $DIR/feature-gate-non_ascii_idents.rs:29:1
+ --> $DIR/feature-gate-non_ascii_idents.rs:29:8
|
LL | struct Föö { //~ ERROR non-ascii idents
- | ^^^^^^^^^^
+ | ^^^
|
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
--> $DIR/feature-gate-non_ascii_idents.rs:30:5
|
LL | föö: isize //~ ERROR non-ascii idents
- | ^^^^^^^^^^
+ | ^^^
|
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
error[E0658]: non-ascii idents are not fully supported. (see issue #28979)
- --> $DIR/feature-gate-non_ascii_idents.rs:33:1
+ --> $DIR/feature-gate-non_ascii_idents.rs:33:6
|
LL | enum Bär { //~ ERROR non-ascii idents
- | ^^^^^^^^
+ | ^^^
|
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
--> $DIR/feature-gate-non_ascii_idents.rs:35:9
|
LL | qüx: isize //~ ERROR non-ascii idents
- | ^^^^^^^^^^
+ | ^^^
|
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
error[E0658]: non-ascii idents are not fully supported. (see issue #28979)
- --> $DIR/feature-gate-non_ascii_idents.rs:40:5
+ --> $DIR/feature-gate-non_ascii_idents.rs:40:8
|
LL | fn qüx(); //~ ERROR non-ascii idents
- | ^^^^^^^^^
+ | ^^^
|
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// This test previously ensured that attributes on formals in generic parameter
+// lists are rejected without a feature gate.
+//
+// (We are prefixing all tested features with `rustc_`, to ensure that
+// the attributes themselves won't be rejected by the compiler when
+// using `rustc_attrs` feature. There is a separate compile-fail/ test
+// ensuring that the attribute feature-gating works in this context.)
+
+// must-compile-successfully
+
+#![feature(rustc_attrs)]
+#![allow(dead_code)]
+
+struct StLt<#[rustc_lt_struct] 'a>(&'a u32);
+struct StTy<#[rustc_ty_struct] I>(I);
+enum EnLt<#[rustc_lt_enum] 'b> { A(&'b u32), B }
+enum EnTy<#[rustc_ty_enum] J> { A(J), B }
+trait TrLt<#[rustc_lt_trait] 'c> { fn foo(&self, _: &'c [u32]) -> &'c u32; }
+trait TrTy<#[rustc_ty_trait] K> { fn foo(&self, _: K); }
+type TyLt<#[rustc_lt_type] 'd> = &'d u32;
+type TyTy<#[rustc_ty_type] L> = (L, );
+
+impl<#[rustc_lt_inherent] 'e> StLt<'e> { }
+impl<#[rustc_ty_inherent] M> StTy<M> { }
+impl<#[rustc_lt_impl_for] 'f> TrLt<'f> for StLt<'f> {
+ fn foo(&self, _: &'f [u32]) -> &'f u32 { loop { } }
+}
+impl<#[rustc_ty_impl_for] N> TrTy<N> for StTy<N> {
+ fn foo(&self, _: N) { }
+}
+
+fn f_lt<#[rustc_lt_fn] 'g>(_: &'g [u32]) -> &'g u32 { loop { } }
+fn f_ty<#[rustc_ty_fn] O>(_: O) { }
+
+impl<I> StTy<I> {
+ fn m_lt<#[rustc_lt_meth] 'h>(_: &'h [u32]) -> &'h u32 { loop { } }
+ fn m_ty<#[rustc_ty_meth] P>(_: P) { }
+}
+
+fn hof_lt<Q>(_: Q)
+ where Q: for <#[rustc_lt_hof] 'i> Fn(&'i [u32]) -> &'i u32
+{}
+
+fn main() {}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(universal_impl_trait)]
+
+use std::fmt::Debug;
+
+fn foo<T>(x: impl Debug) { }
+
+fn main() {
+ foo::<String>('a'); //~ ERROR cannot provide explicit type parameters
+}
--- /dev/null
+error[E0632]: cannot provide explicit type parameters when `impl Trait` is used in argument position.
+ --> $DIR/universal-issue-48703.rs:18:5
+ |
+LL | foo::<String>('a'); //~ ERROR cannot provide explicit type parameters
+ | ^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0632`.
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #48728, an ICE that occurred computing
+// coherence "help" information.
+
+#[derive(Clone)] //~ ERROR conflicting implementations of trait `std::clone::Clone`
+struct Node<T: ?Sized>(Box<T>);
+
+impl<T: Clone + ?Sized> Clone for Node<[T]> {
+ fn clone(&self) -> Self {
+ Node(Box::clone(&self.0))
+ }
+}
+
+fn main() {}
--- /dev/null
+error[E0119]: conflicting implementations of trait `std::clone::Clone` for type `Node<[_]>`:
+ --> $DIR/issue-48728.rs:14:10
+ |
+LL | #[derive(Clone)] //~ ERROR conflicting implementations of trait `std::clone::Clone`
+ | ^^^^^ conflicting implementation for `Node<[_]>`
+...
+LL | impl<T: Clone + ?Sized> Clone for Node<[T]> {
+ | ------------------------------------------- first implementation here
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0119`.
#![allow(warnings)]
#![feature(dropck_eyepatch)]
-#![feature(generic_param_attrs)]
fn use_x(_: usize) -> bool { true }
#![allow(warnings)]
#![feature(dropck_eyepatch)]
-#![feature(generic_param_attrs)]
fn use_x(_: usize) -> bool { true }
error[E0506]: cannot assign to `v[..]` because it is borrowed
- --> $DIR/drop-no-may-dangle.rs:31:9
+ --> $DIR/drop-no-may-dangle.rs:30:9
|
LL | let p: WrapMayNotDangle<&usize> = WrapMayNotDangle { value: &v[0] };
| ----- borrow of `v[..]` occurs here
| - borrow later used here, when `p` is dropped
error[E0506]: cannot assign to `v[..]` because it is borrowed
- --> $DIR/drop-no-may-dangle.rs:34:5
+ --> $DIR/drop-no-may-dangle.rs:33:5
|
LL | let p: WrapMayNotDangle<&usize> = WrapMayNotDangle { value: &v[0] };
| ----- borrow of `v[..]` occurs here
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+#![feature(nll)]
+struct FancyNum {
+ num: u8,
+}
+
+fn main() {
+ let mut fancy = FancyNum{ num: 5 };
+ let fancy_ref = &(&mut fancy);
+ fancy_ref.num = 6; //~ ERROR E0594
+ println!("{}", fancy_ref.num);
+}
--- /dev/null
+error[E0594]: cannot assign to data in a `&` reference
+ --> $DIR/issue-47388.rs:18:5
+ |
+LL | let fancy_ref = &(&mut fancy);
+ | ------------- help: consider changing this to be a mutable reference: `&mut`
+LL | fancy_ref.num = 6; //~ ERROR E0594
+ | ^^^^^^^^^^^^^^^^^ `fancy_ref` is a `&` reference, so the data it refers to cannot be written
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0594`.
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub struct Foo {
+}
+
+impl Foo {
+ fn get(&self) -> Option<&Result<String, String>> {
+ None
+ }
+
+ fn mutate(&mut self) { }
+}
+
+fn main() {
+ let mut foo = Foo { };
+
+ // foo.get() returns type Option<&Result<String, String>>, so
+ // using `string` keeps borrow of `foo` alive. Hence calling
+ // `foo.mutate()` should be an error.
+ while let Some(Ok(string)) = foo.get() {
+ foo.mutate();
+ //~^ ERROR cannot borrow `foo` as mutable
+ println!("foo={:?}", *string);
+ }
+}
--- /dev/null
+error[E0502]: cannot borrow `foo` as mutable because it is also borrowed as immutable
+ --> $DIR/borrowck-issue-49631.rs:30:9
+ |
+LL | while let Some(Ok(string)) = foo.get() {
+ | --- - immutable borrow ends here
+ | |
+ | immutable borrow occurs here
+LL | foo.mutate();
+ | ^^^ mutable borrow occurs here
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0502`.
"sparc-unknown-linux-gnu",
"sparc64-unknown-linux-gnu",
"sparcv9-sun-solaris",
+ "thumbv6m-none-eabi",
+ "thumbv7em-none-eabi",
+ "thumbv7em-none-eabihf",
+ "thumbv7m-none-eabi",
"wasm32-unknown-emscripten",
"wasm32-unknown-unknown",
"x86_64-apple-darwin",
target: "*".to_string(),
});
+ // If the components/extensions don't actually exist for this
+ // particular host/target combination then nix it entirely from our
+ // lists.
+ {
+ let has_component = |c: &Component| {
+ if c.target == "*" {
+ return true
+ }
+ let pkg = match manifest.pkg.get(&c.pkg) {
+ Some(p) => p,
+ None => return false,
+ };
+ let target = match pkg.target.get(&c.target) {
+ Some(t) => t,
+ None => return false,
+ };
+ target.available
+ };
+ extensions.retain(&has_component);
+ components.retain(&has_component);
+ }
+
pkg.target.insert(host.to_string(), Target {
available: true,
url: Some(self.url(&filename)),
}
}
+#[derive(Clone)]
+pub enum CompareMode {
+ Nll
+}
+
+impl CompareMode {
+ fn to_str(&self) -> &'static str {
+ match *self {
+ CompareMode::Nll => "nll"
+ }
+ }
+
+ pub fn parse(s: String) -> CompareMode {
+ match s.as_str() {
+ "nll" => CompareMode::Nll,
+ x => panic!("unknown --compare-mode option: {}", x),
+ }
+ }
+}
+
#[derive(Clone)]
pub struct Config {
/// The library paths required for running the compiler
/// where to find the remote test client process, if we're using it
pub remote_test_client: Option<PathBuf>,
+ /// mode describing what file the actual ui output will be compared to
+ pub compare_mode: Option<CompareMode>,
+
// Configuration for various run-make tests frobbing things like C compilers
// or querying about various LLVM component information.
pub cc: String,
}
/// Used by `ui` tests to generate things like `foo.stderr` from `foo.rs`.
-pub fn expected_output_path(testpaths: &TestPaths, revision: Option<&str>, kind: &str) -> PathBuf {
+pub fn expected_output_path(testpaths: &TestPaths,
+ revision: Option<&str>,
+ compare_mode: &Option<CompareMode>,
+ kind: &str) -> PathBuf {
+
assert!(UI_EXTENSIONS.contains(&kind));
- let extension = match revision {
- Some(r) => format!("{}.{}", r, kind),
- None => kind.to_string(),
- };
+ let mut parts = Vec::new();
+
+ if let Some(x) = revision { parts.push(x); }
+ if let Some(ref x) = *compare_mode { parts.push(x.to_str()); }
+ parts.push(kind);
+
+ let extension = parts.join(".");
testpaths.file.with_extension(extension)
}
use common::{Config, TestPaths};
use common::{DebugInfoGdb, DebugInfoLldb, Mode, Pretty};
use common::{expected_output_path, UI_EXTENSIONS};
+use common::CompareMode;
use test::ColorConfig;
use util::logv;
"path to the remote test client",
"PATH",
)
+ .optopt(
+ "",
+ "compare-mode",
+ "mode describing what file the actual ui output will be compared to",
+ "COMPARE MODE"
+ )
.optflag("h", "help", "show this message");
let (argv0, args_) = args.split_first().unwrap();
quiet: matches.opt_present("quiet"),
color,
remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from),
+ compare_mode: matches.opt_str("compare-mode").map(CompareMode::parse),
cc: matches.opt_str("cc").unwrap(),
cxx: matches.opt_str("cxx").unwrap(),
};
// Debugging emscripten code doesn't make sense today
- let ignore = early_props.ignore || !up_to_date(config, testpaths, &early_props)
+ let ignore = early_props.ignore
+ || (!up_to_date(config, testpaths, &early_props) && config.compare_mode.is_none())
|| (config.mode == DebugInfoGdb || config.mode == DebugInfoLldb)
&& config.target.contains("emscripten");
// UI test files.
for extension in UI_EXTENSIONS {
for revision in &props.revisions {
- let path = &expected_output_path(testpaths, Some(revision), extension);
+ let path = &expected_output_path(testpaths,
+ Some(revision),
+ &config.compare_mode,
+ extension);
inputs.push(mtime(path));
}
if props.revisions.is_empty() {
- let path = &expected_output_path(testpaths, None, extension);
+ let path = &expected_output_path(testpaths, None, &config.compare_mode, extension);
inputs.push(mtime(path));
}
}
use common::{Codegen, CodegenUnits, DebugInfoGdb, DebugInfoLldb, Rustdoc};
use common::{Incremental, MirOpt, RunMake, Ui};
use common::{expected_output_path, UI_STDERR, UI_STDOUT};
+use common::CompareMode;
use diff;
use errors::{self, Error, ErrorKind};
use filetime::FileTime;
}
}
+ match self.config.compare_mode {
+ Some(CompareMode::Nll) => {
+ rustc.args(&["-Znll", "-Zborrowck=mir", "-Ztwo-phase-borrows"]);
+ },
+ None => {},
+ }
+
if self.props.force_host {
rustc.args(self.split_maybe_args(&self.config.host_rustcflags));
} else {
let proc_res = self.compile_test();
self.check_if_test_should_compile(&proc_res);
- let expected_stderr_path = self.expected_output_path(UI_STDERR);
- let expected_stderr = self.load_expected_output(&expected_stderr_path);
-
- let expected_stdout_path = self.expected_output_path(UI_STDOUT);
- let expected_stdout = self.load_expected_output(&expected_stdout_path);
+ let expected_stderr = self.load_expected_output(UI_STDERR);
+ let expected_stdout = self.load_expected_output(UI_STDOUT);
let normalized_stdout =
self.normalize_output(&proc_res.stdout, &self.props.normalize_stdout);
self.fatal_proc_rec("test run failed!", &proc_res);
}
}
- if !explicit {
+ if !explicit && self.config.compare_mode.is_none() {
if !expected_errors.is_empty() || !proc_res.status.success() {
// "// error-pattern" comments
self.check_expected_errors(expected_errors, &proc_res);
normalized
}
- fn expected_output_path(&self, kind: &str) -> PathBuf {
- expected_output_path(&self.testpaths, self.revision, kind)
- }
+ fn load_expected_output(&self, kind: &str) -> String {
+ let mut path = expected_output_path(&self.testpaths,
+ self.revision,
+ &self.config.compare_mode,
+ kind);
- fn load_expected_output(&self, path: &Path) -> String {
- if !path.exists() {
- return String::new();
+ if !path.exists() && self.config.compare_mode.is_some() {
+ // fallback!
+ path = expected_output_path(&self.testpaths, self.revision, &None, kind);
}
+ if path.exists() {
+ match self.load_expected_output_from_path(&path) {
+ Ok(x) => x,
+ Err(x) => self.fatal(&x),
+ }
+ } else {
+ String::new()
+ }
+ }
+
+ fn load_expected_output_from_path(&self, path: &Path) -> Result<String, String> {
let mut result = String::new();
match File::open(path).and_then(|mut f| f.read_to_string(&mut result)) {
- Ok(_) => result,
- Err(e) => self.fatal(&format!(
+ Ok(_) => Ok(result),
+ Err(e) => Err(format!(
"failed to load expected output from `{}`: {}",
path.display(),
e
clap = "2.25.0"
[dependencies.mdbook]
-version = "0.1.2"
+version = "0.1.5"
default-features = false
+features = ["search"]
// except according to those terms.
#![feature(link_args)]
+
+#[allow(unused_attributes)]
// Set the stack size at link time on Windows. See rustc_driver::in_rustc_thread
// for the rationale.
#[cfg_attr(all(windows, target_env = "msvc"), link_args = "/STACK:16777216")]
-Subproject commit a4462d18bf6b92aaec1eeb1c30d5ddf94a3ca987
+Subproject commit e784712f09d4978b5331ceaf96476bcf4b1b0b1b
//! This library contains the tidy lints and exposes it
//! to be used by tools.
-#![deny(warnings)]
-
extern crate serde;
extern crate serde_json;
#[macro_use]