mod sig {
use super::{limbs_for_bits, ExpInt, Limb, Loss, LIMB_BITS};
use core::cmp::Ordering;
+ use core::iter;
use core::mem;
pub(super) fn is_all_zeros(limbs: &[Limb]) -> bool {
pub(super) fn add(a: &mut [Limb], b: &[Limb], mut c: Limb) -> Limb {
assert!(c <= 1);
- for (a, &b) in a.iter_mut().zip(b) {
+ for (a, &b) in iter::zip(a, b) {
let (r, overflow) = a.overflowing_add(b);
let (r, overflow2) = r.overflowing_add(c);
*a = r;
pub(super) fn sub(a: &mut [Limb], b: &[Limb], mut c: Limb) -> Limb {
assert!(c <= 1);
- for (a, &b) in a.iter_mut().zip(b) {
+ for (a, &b) in iter::zip(a, b) {
let (r, overflow) = a.overflowing_sub(b);
let (r, overflow2) = r.overflowing_sub(c);
*a = r;
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![no_std]
#![forbid(unsafe_code)]
+#![feature(iter_zip)]
#![feature(nll)]
#![cfg_attr(bootstrap, feature(or_patterns))]
start_ptr
}
- /// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
- /// reference to it. Will panic if passed a zero-sized types.
- ///
- /// Panics:
- ///
- /// - Zero-sized types
- /// - Zero-length slices
- #[inline]
- pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
- where
- T: Copy,
- {
- unsafe {
- let len = slice.len();
- let start_ptr = self.alloc_raw_slice(len);
- slice.as_ptr().copy_to_nonoverlapping(start_ptr, len);
- slice::from_raw_parts_mut(start_ptr, len)
- }
- }
-
#[inline]
pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
assert!(mem::size_of::<T>() != 0);
}
impl Mutability {
- /// Returns `MutMutable` only if both `self` and `other` are mutable.
- pub fn and(self, other: Self) -> Self {
- match self {
- Mutability::Mut => other,
- Mutability::Not => Mutability::Not,
- }
- }
-
pub fn invert(self) -> Self {
match self {
Mutability::Mut => Mutability::Not,
FloatTy::F64 => sym::f64,
}
}
-
- pub fn bit_width(self) -> u64 {
- match self {
- FloatTy::F32 => 32,
- FloatTy::F64 => 64,
- }
- }
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
IntTy::I128 => sym::i128,
}
}
-
- pub fn bit_width(&self) -> Option<u64> {
- Some(match *self {
- IntTy::Isize => return None,
- IntTy::I8 => 8,
- IntTy::I16 => 16,
- IntTy::I32 => 32,
- IntTy::I64 => 64,
- IntTy::I128 => 128,
- })
- }
-
- pub fn normalize(&self, target_width: u32) -> Self {
- match self {
- IntTy::Isize => match target_width {
- 16 => IntTy::I16,
- 32 => IntTy::I32,
- 64 => IntTy::I64,
- _ => unreachable!(),
- },
- _ => *self,
- }
- }
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Copy, Debug)]
UintTy::U128 => sym::u128,
}
}
-
- pub fn bit_width(&self) -> Option<u64> {
- Some(match *self {
- UintTy::Usize => return None,
- UintTy::U8 => 8,
- UintTy::U16 => 16,
- UintTy::U32 => 32,
- UintTy::U64 => 64,
- UintTy::U128 => 128,
- })
- }
-
- pub fn normalize(&self, target_width: u32) -> Self {
- match self {
- UintTy::Usize => match target_width {
- 16 => UintTy::U16,
- 32 => UintTy::U32,
- 64 => UintTy::U64,
- _ => unreachable!(),
- },
- _ => *self,
- }
- }
}
/// A constraint on an associated type (e.g., `A = Bar` in `Foo<A = Bar>` or
}
impl FnDecl {
- pub fn get_self(&self) -> Option<ExplicitSelf> {
- self.inputs.get(0).and_then(Param::to_self)
- }
pub fn has_self(&self) -> bool {
self.inputs.get(0).map_or(false, Param::is_self)
}
self.meta_item().map_or(false, |meta_item| meta_item.is_word())
}
- /// Returns `true` if `self` is a `MetaItem` and the meta item is a `ValueString`.
- pub fn is_value_str(&self) -> bool {
- self.value_str().is_some()
- }
-
- /// Returns `true` if `self` is a `MetaItem` and the meta item is a list.
- pub fn is_meta_item_list(&self) -> bool {
- self.meta_item_list().is_some()
- }
-
+ /// See [`MetaItem::name_value_literal_span`].
pub fn name_value_literal_span(&self) -> Option<Span> {
self.meta_item()?.name_value_literal_span()
}
false
}
}
-
- pub fn is_meta_item_list(&self) -> bool {
- self.meta_item_list().is_some()
- }
-
- /// Indicates if the attribute is a `ValueString`.
- pub fn is_value_str(&self) -> bool {
- self.value_str().is_some()
- }
-
- /// This is used in case you want the value span instead of the whole attribute. Example:
- ///
- /// ```text
- /// #[doc(alias = "foo")]
- /// ```
- ///
- /// In here, it'll return a span for `"foo"`.
- pub fn name_value_literal_span(&self) -> Option<Span> {
- match self.kind {
- AttrKind::Normal(ref item, _) => {
- item.meta(self.span).and_then(|meta| meta.name_value_literal_span())
- }
- AttrKind::DocComment(..) => None,
- }
- }
}
impl MetaItem {
self.path == name
}
- pub fn is_value_str(&self) -> bool {
- self.value_str().is_some()
- }
-
/// This is used in case you want the value span instead of the whole attribute. Example:
///
/// ```text
#![feature(const_fn_transmute)]
#![feature(const_panic)]
#![feature(crate_visibility_modifier)]
+#![feature(iter_zip)]
#![feature(label_break_value)]
#![feature(nll)]
#![cfg_attr(bootstrap, feature(or_patterns))]
}
}
- pub fn joint(self) -> TokenStream {
- TokenStream::new(vec![(self, Spacing::Joint)])
- }
-
pub fn token(kind: TokenKind, span: Span) -> TokenTree {
TokenTree::Token(Token::new(kind, span))
}
self.0.len()
}
- pub fn span(&self) -> Option<Span> {
- match &**self.0 {
- [] => None,
- [(tt, _)] => Some(tt.span()),
- [(tt_start, _), .., (tt_end, _)] => Some(tt_start.span().to(tt_end.span())),
- }
- }
-
pub fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
match streams.len() {
0 => TokenStream::default(),
}
}
- pub fn trees_ref(&self) -> CursorRef<'_> {
- CursorRef::new(self)
- }
-
pub fn trees(&self) -> Cursor {
self.clone().into_trees()
}
pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
let mut t1 = self.trees();
let mut t2 = other.trees();
- for (t1, t2) in t1.by_ref().zip(t2.by_ref()) {
+ for (t1, t2) in iter::zip(&mut t1, &mut t2) {
if !t1.eq_unspanned(&t2) {
return false;
}
}
impl<'t> CursorRef<'t> {
- fn new(stream: &TokenStream) -> CursorRef<'_> {
- CursorRef { stream, index: 0 }
- }
-
fn next_with_spacing(&mut self) -> Option<&'t TreeAndSpacing> {
self.stream.0.get(self.index).map(|tree| {
self.index += 1;
use smallvec::{smallvec, SmallVec};
use tracing::debug;
+use std::iter;
use std::mem;
pub(super) struct ItemLowerer<'a, 'lowering, 'hir> {
UseTreeKind::Glob => {}
UseTreeKind::Simple(_, id1, id2) => {
for (_, &id) in
- self.expect_full_res_from_use(base_id).skip(1).zip([id1, id2].iter())
+ iter::zip(self.expect_full_res_from_use(base_id).skip(1), &[id1, id2])
{
vec.push(id);
}
// won't be dealing with macros in the rest of the compiler.
// Essentially a single `use` which imports two names is desugared into
// two imports.
- for (res, &new_node_id) in resolutions.zip([id1, id2].iter()) {
+ for (res, &new_node_id) in iter::zip(resolutions, &[id1, id2]) {
let ident = *ident;
let mut path = path.clone();
for seg in &mut path.segments {
#![feature(crate_visibility_modifier)]
#![cfg_attr(bootstrap, feature(or_patterns))]
#![feature(box_patterns)]
+#![feature(iter_zip)]
#![recursion_limit = "256"]
use rustc_ast::node_id::NodeMap;
State::new().token_to_string(token)
}
-pub fn token_to_string_ext(token: &Token, convert_dollar_crate: bool) -> String {
- State::new().token_to_string_ext(token, convert_dollar_crate)
-}
-
pub fn ty_to_string(ty: &ast::Ty) -> String {
State::new().ty_to_string(ty)
}
State::new().tts_to_string(tokens)
}
-pub fn stmt_to_string(stmt: &ast::Stmt) -> String {
- State::new().stmt_to_string(stmt)
-}
-
pub fn item_to_string(i: &ast::Item) -> String {
State::new().item_to_string(i)
}
-pub fn generic_params_to_string(generic_params: &[ast::GenericParam]) -> String {
- State::new().generic_params_to_string(generic_params)
-}
-
pub fn path_to_string(p: &ast::Path) -> String {
State::new().path_to_string(p)
}
State::new().vis_to_string(v)
}
-pub fn block_to_string(blk: &ast::Block) -> String {
- State::new().block_to_string(blk)
-}
-
pub fn meta_list_item_to_string(li: &ast::NestedMetaItem) -> String {
State::new().meta_list_item_to_string(li)
}
-pub fn attr_item_to_string(ai: &ast::AttrItem) -> String {
- State::new().attr_item_to_string(ai)
-}
-
pub fn attribute_to_string(attr: &ast::Attribute) -> String {
State::new().attribute_to_string(attr)
}
-pub fn param_to_string(arg: &ast::Param) -> String {
- State::new().param_to_string(arg)
-}
-
pub fn to_string(f: impl FnOnce(&mut State<'_>)) -> String {
State::new().to_string(f)
}
}
}
- pub fn print_usize(&mut self, i: usize) {
- self.s.word(i.to_string())
- }
-
crate fn print_name(&mut self, name: Symbol) {
self.s.word(name.to_string());
self.ann.post(self, AnnNode::Name(&name))
// make a series of nested matches, to destructure the
// structs. This is actually right-to-left, but it shouldn't
// matter.
- for (arg_expr, pat) in self_args.iter().zip(patterns) {
+ for (arg_expr, pat) in iter::zip(self_args, patterns) {
body = cx.expr_match(
trait_.span,
arg_expr.clone(),
let mut discriminant_test = cx.expr_bool(sp, true);
let mut first_ident = None;
- for (&ident, self_arg) in vi_idents.iter().zip(&self_args) {
+ for (&ident, self_arg) in iter::zip(&vi_idents, &self_args) {
let self_addr = cx.expr_addr_of(sp, self_arg.clone());
let variant_value =
deriving::call_intrinsic(cx, sp, sym::discriminant_value, vec![self_addr]);
let subpats = self.create_subpatterns(cx, paths, mutbl, use_temporaries);
let pattern = match *struct_def {
VariantData::Struct(..) => {
- let field_pats = subpats
- .into_iter()
- .zip(&ident_exprs)
+ let field_pats = iter::zip(subpats, &ident_exprs)
.map(|(pat, &(sp, ident, ..))| {
if ident.is_none() {
cx.span_bug(sp, "a braced struct with unnamed fields in `derive`");
#![feature(bool_to_option)]
#![feature(crate_visibility_modifier)]
#![feature(decl_macro)]
+#![feature(iter_zip)]
#![feature(nll)]
#![cfg_attr(bootstrap, feature(or_patterns))]
#![feature(proc_macro_internals)]
+++ /dev/null
-name: Bootstrap rustc using cg_clif
-
-on:
- - push
-
-jobs:
- bootstrap_rustc:
- runs-on: ubuntu-latest
-
- steps:
- - uses: actions/checkout@v2
-
- - name: Cache cargo installed crates
- uses: actions/cache@v2
- with:
- path: ~/.cargo/bin
- key: ${{ runner.os }}-cargo-installed-crates
-
- - name: Cache cargo registry and index
- uses: actions/cache@v2
- with:
- path: |
- ~/.cargo/registry
- ~/.cargo/git
- key: ${{ runner.os }}-cargo-registry-and-index-${{ hashFiles('**/Cargo.lock') }}
-
- - name: Cache cargo target dir
- uses: actions/cache@v2
- with:
- path: target
- key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }}
-
- - name: Prepare dependencies
- run: |
- git config --global user.email "user@example.com"
- git config --global user.name "User"
- ./prepare.sh
-
- - name: Test
- run: |
- # Enable backtraces for easier debugging
- export RUST_BACKTRACE=1
-
- ./scripts/test_bootstrap.sh
jobs:
build:
runs-on: ${{ matrix.os }}
+ timeout-minutes: 60
strategy:
fail-fast: false
matrix:
- os: [ubuntu-latest, macos-latest]
+ include:
+ - os: ubuntu-latest
+ - os: macos-latest
+ # cross-compile from Linux to Windows using mingw
+ - os: ubuntu-latest
+ env:
+ TARGET_TRIPLE: x86_64-pc-windows-gnu
steps:
- uses: actions/checkout@v2
path: target
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }}
+ - name: Install MinGW toolchain and wine
+ if: matrix.os == 'ubuntu-latest' && matrix.env.TARGET_TRIPLE == 'x86_64-pc-windows-gnu'
+ run: |
+ sudo apt-get install -y gcc-mingw-w64-x86-64 wine-stable
+ rustup target add x86_64-pc-windows-gnu
+
- name: Prepare dependencies
run: |
git config --global user.email "user@example.com"
./prepare.sh
- name: Test
+ env:
+ TARGET_TRIPLE: ${{ matrix.env.TARGET_TRIPLE }}
run: |
# Enable backtraces for easier debugging
export RUST_BACKTRACE=1
export COMPILE_RUNS=2
export RUN_RUNS=2
+ # Enable extra checks
+ export CG_CLIF_ENABLE_VERIFIER=1
+
./test.sh
- name: Package prebuilt cg_clif
run: tar cvfJ cg_clif.tar.xz build
- name: Upload prebuilt cg_clif
+ if: matrix.env.TARGET_TRIPLE != 'x86_64-pc-windows-gnu'
uses: actions/upload-artifact@v2
with:
name: cg_clif-${{ runner.os }}
--- /dev/null
+name: Various rustc tests
+
+on:
+ - push
+
+jobs:
+ bootstrap_rustc:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Cache cargo installed crates
+ uses: actions/cache@v2
+ with:
+ path: ~/.cargo/bin
+ key: ${{ runner.os }}-cargo-installed-crates
+
+ - name: Cache cargo registry and index
+ uses: actions/cache@v2
+ with:
+ path: |
+ ~/.cargo/registry
+ ~/.cargo/git
+ key: ${{ runner.os }}-cargo-registry-and-index-${{ hashFiles('**/Cargo.lock') }}
+
+ - name: Cache cargo target dir
+ uses: actions/cache@v2
+ with:
+ path: target
+ key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }}
+
+ - name: Prepare dependencies
+ run: |
+ git config --global user.email "user@example.com"
+ git config --global user.name "User"
+ ./prepare.sh
+
+ - name: Test
+ run: |
+ # Enable backtraces for easier debugging
+ export RUST_BACKTRACE=1
+
+ ./scripts/test_bootstrap.sh
+ rustc_test_suite:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v2
+
+ - name: Cache cargo installed crates
+ uses: actions/cache@v2
+ with:
+ path: ~/.cargo/bin
+ key: ${{ runner.os }}-cargo-installed-crates
+
+ - name: Cache cargo registry and index
+ uses: actions/cache@v2
+ with:
+ path: |
+ ~/.cargo/registry
+ ~/.cargo/git
+ key: ${{ runner.os }}-cargo-registry-and-index-${{ hashFiles('**/Cargo.lock') }}
+
+ - name: Cache cargo target dir
+ uses: actions/cache@v2
+ with:
+ path: target
+ key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain', '**/Cargo.lock') }}
+
+ - name: Prepare dependencies
+ run: |
+ git config --global user.email "user@example.com"
+ git config --global user.name "User"
+ ./prepare.sh
+
+ - name: Test
+ run: |
+ # Enable backtraces for easier debugging
+ export RUST_BACKTRACE=1
+
+ ./scripts/test_rustc_tests.sh
// source for rustc_* is not included in the rust-src component; disable the errors about this
"rust-analyzer.diagnostics.disabled": ["unresolved-extern-crate", "macro-error"],
"rust-analyzer.assist.importMergeBehavior": "last",
- "rust-analyzer.cargo.loadOutDirsFromCheck": true,
+ "rust-analyzer.cargo.runBuildScripts": true,
"rust-analyzer.linkedProjects": [
"./Cargo.toml",
//"./build_sysroot/sysroot_src/src/libstd/Cargo.toml",
[[package]]
name = "cranelift-bforest"
-version = "0.70.0"
-source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#cdb60ec5a9df087262ae8960a31067e88cd80058"
+version = "0.72.0"
+source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
dependencies = [
"cranelift-entity",
]
[[package]]
name = "cranelift-codegen"
-version = "0.70.0"
-source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#cdb60ec5a9df087262ae8960a31067e88cd80058"
+version = "0.72.0"
+source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
dependencies = [
"byteorder",
"cranelift-bforest",
[[package]]
name = "cranelift-codegen-meta"
-version = "0.70.0"
-source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#cdb60ec5a9df087262ae8960a31067e88cd80058"
+version = "0.72.0"
+source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
dependencies = [
"cranelift-codegen-shared",
"cranelift-entity",
[[package]]
name = "cranelift-codegen-shared"
-version = "0.70.0"
-source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#cdb60ec5a9df087262ae8960a31067e88cd80058"
+version = "0.72.0"
+source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
[[package]]
name = "cranelift-entity"
-version = "0.70.0"
-source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#cdb60ec5a9df087262ae8960a31067e88cd80058"
+version = "0.72.0"
+source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
[[package]]
name = "cranelift-frontend"
-version = "0.70.0"
-source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#cdb60ec5a9df087262ae8960a31067e88cd80058"
+version = "0.72.0"
+source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
dependencies = [
"cranelift-codegen",
"log",
[[package]]
name = "cranelift-jit"
-version = "0.70.0"
-source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#cdb60ec5a9df087262ae8960a31067e88cd80058"
+version = "0.72.0"
+source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
dependencies = [
"anyhow",
"cranelift-codegen",
[[package]]
name = "cranelift-module"
-version = "0.70.0"
-source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#cdb60ec5a9df087262ae8960a31067e88cd80058"
+version = "0.72.0"
+source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
dependencies = [
"anyhow",
"cranelift-codegen",
[[package]]
name = "cranelift-native"
-version = "0.70.0"
-source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#cdb60ec5a9df087262ae8960a31067e88cd80058"
+version = "0.72.0"
+source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
dependencies = [
"cranelift-codegen",
"target-lexicon",
[[package]]
name = "cranelift-object"
-version = "0.70.0"
-source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#cdb60ec5a9df087262ae8960a31067e88cd80058"
+version = "0.72.0"
+source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8e43e96410a14143d368273cf1e708f8094bb8e0"
dependencies = [
"anyhow",
"cranelift-codegen",
"libc",
]
+[[package]]
+name = "memmap2"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "04e3e85b970d650e2ae6d70592474087051c11c54da7f7b4949725c5735fbcc6"
+dependencies = [
+ "libc",
+]
+
[[package]]
name = "object"
version = "0.23.0"
"gimli",
"indexmap",
"libloading",
+ "memmap2",
"object",
"smallvec",
"target-lexicon",
cranelift-object = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" }
target-lexicon = "0.11.0"
gimli = { version = "0.23.0", default-features = false, features = ["write"]}
-object = { version = "0.23.0", default-features = false, features = ["std", "read_core", "write", "coff", "elf", "macho", "pe"] }
+object = { version = "0.23.0", default-features = false, features = ["std", "read_core", "write", "archive", "coff", "elf", "macho", "pe"] }
ar = { git = "https://github.com/bjorn3/rust-ar.git", branch = "do_not_remove_cg_clif_ranlib" }
indexmap = "1.0.2"
libloading = { version = "0.6.0", optional = true }
smallvec = "1.6.1"
+memmap2 = "0.2.1"
# Uncomment to use local checkout of cranelift
#[patch."https://github.com/bytecodealliance/wasmtime/"]
[profile.release.package.syn]
opt-level = 0
debug = false
+
+[package.metadata.rust-analyzer]
+rustc_private = true
Assuming `$cg_clif_dir` is the directory you cloned this repo into and you followed the instructions (`prepare.sh` and `build.sh` or `test.sh`).
-### Cargo
-
In the directory with your project (where you can do the usual `cargo build`), run:
```bash
-$ $cg_clif_dir/build/cargo.sh run
-```
-
-This should build and run your project with rustc_codegen_cranelift instead of the usual LLVM backend.
-
-### Rustc
-
-> You should prefer using the Cargo method.
-
-```bash
-$ $cg_clif_dir/build/bin/cg_clif my_crate.rs
-```
-
-### Jit mode
-
-In jit mode cg_clif will immediately execute your code without creating an executable file.
-
-> This requires all dependencies to be available as dynamic library.
-> The jit mode will probably need cargo integration to make this possible.
-
-```bash
-$ $cg_clif_dir/build/cargo.sh jit
-```
-
-or
-
-```bash
-$ $cg_clif_dir/build/bin/cg_clif -Cllvm-args=mode=jit -Cprefer-dynamic my_crate.rs
-```
-
-There is also an experimental lazy jit mode. In this mode functions are only compiled once they are
-first called. It currently does not work with multi-threaded programs. When a not yet compiled
-function is called from another thread than the main thread, you will get an ICE.
-
-```bash
-$ $cg_clif_dir/build/cargo.sh lazy-jit
+$ $cg_clif_dir/build/cargo.sh build
```
-### Shell
-
-These are a few functions that allow you to easily run rust code from the shell using cg_clif as jit.
-
-```bash
-function jit_naked() {
- echo "$@" | $cg_clif_dir/build/bin/cg_clif - -Cllvm-args=mode=jit -Cprefer-dynamic
-}
-
-function jit() {
- jit_naked "fn main() { $@ }"
-}
+This will build your project with rustc_codegen_cranelift instead of the usual LLVM backend.
-function jit_calc() {
- jit 'println!("0x{:x}", ' $@ ');';
-}
-```
+For additional ways to use rustc_codegen_cranelift like the JIT mode see [usage.md](docs/usage.md).
## Env vars
-[see env_vars.md](docs/env_vars.md)
+See [env_vars.md](docs/env_vars.md) for all env vars used by rustc_codegen_cranelift.
## Not yet supported
`llvm_asm!` will remain unimplemented forever. `asm!` doesn't yet support reg classes. You
have to specify specific registers instead.
* SIMD ([tracked here](https://github.com/bjorn3/rustc_codegen_cranelift/issues/171), some basic things work)
+
+## License
+
+Licensed under either of
+
+ * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or
+ http://www.apache.org/licenses/LICENSE-2.0)
+ * MIT license ([LICENSE-MIT](LICENSE-MIT) or
+ http://opensource.org/licenses/MIT)
+
+at your option.
+
+### Contribution
+
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in the work by you shall be dual licensed as above, without any
+additional terms or conditions.
ln rust-toolchain scripts/config.sh scripts/cargo.sh "$target_dir"
mkdir -p "$target_dir/lib/rustlib/$TARGET_TRIPLE/lib/"
+mkdir -p "$target_dir/lib/rustlib/$HOST_TRIPLE/lib/"
if [[ "$TARGET_TRIPLE" == "x86_64-pc-windows-gnu" ]]; then
cp $(rustc --print sysroot)/lib/rustlib/$TARGET_TRIPLE/lib/*.o "$target_dir/lib/rustlib/$TARGET_TRIPLE/lib/"
fi
;;
"llvm")
cp -r $(rustc --print sysroot)/lib/rustlib/$TARGET_TRIPLE/lib "$target_dir/lib/rustlib/$TARGET_TRIPLE/"
+ if [[ "$HOST_TRIPLE" != "$TARGET_TRIPLE" ]]; then
+ cp -r $(rustc --print sysroot)/lib/rustlib/$HOST_TRIPLE/lib "$target_dir/lib/rustlib/$HOST_TRIPLE/"
+ fi
;;
"clif")
echo "[BUILD] sysroot"
dir=$(pwd)
cd "$target_dir"
time "$dir/build_sysroot/build_sysroot.sh"
+ if [[ "$HOST_TRIPLE" != "$TARGET_TRIPLE" ]]; then
+ time TARGET_TRIPLE="$HOST_TRIPLE" "$dir/build_sysroot/build_sysroot.sh"
+ fi
cp lib/rustlib/*/lib/libstd-* lib/
;;
*)
[[package]]
name = "adler"
-version = "0.2.3"
+version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ee2a4ec343196209d6594e19543ae87a39f96d5534d7174822a3ad825dd6ed7e"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
dependencies = [
"compiler_builtins",
"rustc-std-workspace-core",
[[package]]
name = "hashbrown"
-version = "0.9.1"
+version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
+checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
dependencies = [
"compiler_builtins",
"rustc-std-workspace-alloc",
[[package]]
name = "libc"
-version = "0.2.86"
+version = "0.2.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b7282d924be3275cec7f6756ff4121987bc6481325397dde6ba3e7802b1a8b1c"
+checksum = "8916b1f6ca17130ec6568feccee27c156ad12037880833a3b842a823236502e7"
dependencies = [
"rustc-std-workspace-core",
]
[[package]]
name = "miniz_oxide"
-version = "0.4.3"
+version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f2d26ec3309788e423cfbf68ad1800f061638098d76a83681af979dc4eda19d"
+checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"
dependencies = [
"adler",
"autocfg",
if [[ "$1" != "--debug" ]]; then
sysroot_channel='release'
# FIXME Enable incremental again once rust-lang/rust#74946 is fixed
- CARGO_INCREMENTAL=0 RUSTFLAGS="$RUSTFLAGS -Zmir-opt-level=2" cargo build --target "$TARGET_TRIPLE" --release
+ CARGO_INCREMENTAL=0 RUSTFLAGS="$RUSTFLAGS -Zmir-opt-level=3" cargo build --target "$TARGET_TRIPLE" --release
else
sysroot_channel='debug'
cargo build --target "$TARGET_TRIPLE"
to make it possible to use incremental mode for all analyses performed by rustc without caching
object files when their content should have been changed by a change to cg_clif.</dd>
<dt>CG_CLIF_DISPLAY_CG_TIME</dt>
- <dd>If "1", display the time it took to perform codegen for a crate</dd>
+ <dd>If "1", display the time it took to perform codegen for a crate.</dd>
+ <dt>CG_CLIF_ENABLE_VERIFIER</dt>
+ <dd>Enable the Cranelift ir verifier for all compilation passes. If not set it will only run once
+ before passing the clif ir to Cranelift for compilation.</dt>
</dl>
--- /dev/null
+# Usage
+
+rustc_codegen_cranelift can be used as a near-drop-in replacement for `cargo build` or `cargo run` for existing projects.
+
+Assuming `$cg_clif_dir` is the directory you cloned this repo into and you followed the instructions (`prepare.sh` and `build.sh` or `test.sh`).
+
+## Cargo
+
+In the directory with your project (where you can do the usual `cargo build`), run:
+
+```bash
+$ $cg_clif_dir/build/cargo.sh build
+```
+
+This will build your project with rustc_codegen_cranelift instead of the usual LLVM backend.
+
+## Rustc
+
+> You should prefer using the Cargo method.
+
+```bash
+$ $cg_clif_dir/build/bin/cg_clif my_crate.rs
+```
+
+## Jit mode
+
+In jit mode cg_clif will immediately execute your code without creating an executable file.
+
+> This requires all dependencies to be available as dynamic library.
+> The jit mode will probably need cargo integration to make this possible.
+
+```bash
+$ $cg_clif_dir/build/cargo.sh jit
+```
+
+or
+
+```bash
+$ $cg_clif_dir/build/bin/cg_clif -Cllvm-args=mode=jit -Cprefer-dynamic my_crate.rs
+```
+
+There is also an experimental lazy jit mode. In this mode functions are only compiled once they are
+first called. It currently does not work with multi-threaded programs. When a not yet compiled
+function is called from another thread than the main thread, you will get an ICE.
+
+```bash
+$ $cg_clif_dir/build/cargo.sh lazy-jit
+```
+
+## Shell
+
+These are a few functions that allow you to easily run rust code from the shell using cg_clif as jit.
+
+```bash
+function jit_naked() {
+ echo "$@" | $cg_clif_dir/build/bin/cg_clif - -Cllvm-args=mode=jit -Cprefer-dynamic
+}
+
+function jit() {
+ jit_naked "fn main() { $@ }"
+}
+
+function jit_calc() {
+ jit 'println!("0x{:x}", ' $@ ');';
+}
+```
}
#[no_mangle]
+#[cfg(not(windows))]
pub fn get_tls() -> u8 {
#[thread_local]
static A: u8 = 42;
-#![feature(
- no_core, start, lang_items, box_syntax, never_type, linkage,
- extern_types, thread_local
-)]
+#![feature(no_core, lang_items, box_syntax, never_type, linkage, extern_types, thread_local)]
#![no_core]
#![allow(dead_code, non_camel_case_types)]
assert_eq!(((|()| 42u8) as fn(()) -> u8)(()), 42);
- #[cfg(not(jit))]
+ #[cfg(not(any(jit, windows)))]
{
extern {
#[linkage = "extern_weak"]
from_decimal_string();
- #[cfg(not(jit))]
+ #[cfg(not(any(jit, windows)))]
test_tls();
#[cfg(all(not(jit), target_os = "linux"))]
#!/usr/bin/env bash
set -e
-rustup component add rust-src rustc-dev llvm-tools-preview
./build_sysroot/prepare_sysroot_src.sh
cargo install hyperfine || echo "Skipping hyperfine install"
-nightly-2021-03-05
+[toolchain]
+channel = "nightly-2021-03-29"
+components = ["rust-src", "rustc-dev", "llvm-tools-preview"]
source "$dir/config.sh"
# read nightly compiler from rust-toolchain file
-TOOLCHAIN=$(cat "$dir/rust-toolchain")
+TOOLCHAIN=$(cat "$dir/rust-toolchain" | grep channel | sed "s/channel = \"\(.*\)\"/\1/")
cmd=$1
shift || true
set -e
-unamestr=$(uname)
-if [[ "$unamestr" == 'Linux' || "$unamestr" == 'FreeBSD' ]]; then
- dylib_ext='so'
-elif [[ "$unamestr" == 'Darwin' ]]; then
- dylib_ext='dylib'
-else
- echo "Unsupported os"
- exit 1
-fi
+dylib=$(echo "" | rustc --print file-names --crate-type dylib --crate-name rustc_codegen_cranelift -)
if echo "$RUSTC_WRAPPER" | grep sccache; then
echo
export RUSTC=$dir"/bin/cg_clif"
export RUSTDOCFLAGS=$linker' -Cpanic=abort -Zpanic-abort-tests '\
-'-Zcodegen-backend='$dir'/lib/librustc_codegen_cranelift.'$dylib_ext' --sysroot '$dir
+'-Zcodegen-backend='$dir'/lib/'$dylib' --sysroot '$dir
# FIXME fix `#[linkage = "extern_weak"]` without this
-if [[ "$unamestr" == 'Darwin' ]]; then
+if [[ "$(uname)" == 'Darwin' ]]; then
export RUSTFLAGS="$RUSTFLAGS -Clink-arg=-undefined -Clink-arg=dynamic_lookup"
fi
echo "=> Installing new nightly"
rustup toolchain install --profile minimal "nightly-${TOOLCHAIN}" # Sanity check to see if the nightly exists
- echo "nightly-${TOOLCHAIN}" > rust-toolchain
+ sed -i "s/\"nightly-.*\"/\"nightly-${TOOLCHAIN}\"/" rust-toolchain
rustup component add rustfmt || true
echo "=> Uninstalling all old nighlies"
--- /dev/null
+#!/bin/bash
+set -e
+
+./build.sh
+source build/config.sh
+
+echo "[SETUP] Rust fork"
+git clone https://github.com/rust-lang/rust.git || true
+pushd rust
+git fetch
+git checkout -- .
+git checkout "$(rustc -V | cut -d' ' -f3 | tr -d '(')"
+
+git apply - <<EOF
+diff --git a/Cargo.toml b/Cargo.toml
+index 5bd1147cad5..10d68a2ff14 100644
+--- a/Cargo.toml
++++ b/Cargo.toml
+@@ -111,5 +111,7 @@ rustc-std-workspace-std = { path = 'library/rustc-std-workspace-std' }
+ rustc-std-workspace-alloc = { path = 'library/rustc-std-workspace-alloc' }
+ rustc-std-workspace-std = { path = 'library/rustc-std-workspace-std' }
+
++compiler_builtins = { path = "../build_sysroot/compiler-builtins" }
++
+ [patch."https://github.com/rust-lang/rust-clippy"]
+ clippy_lints = { path = "src/tools/clippy/clippy_lints" }
+diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml
+index 23e689fcae7..5f077b765b6 100644
+--- a/compiler/rustc_data_structures/Cargo.toml
++++ b/compiler/rustc_data_structures/Cargo.toml
+@@ -32,7 +32,6 @@ tempfile = "3.0.5"
+
+ [dependencies.parking_lot]
+ version = "0.11"
+-features = ["nightly"]
+
+ [target.'cfg(windows)'.dependencies]
+ winapi = { version = "0.3", features = ["fileapi", "psapi"] }
+diff --git a/library/alloc/Cargo.toml b/library/alloc/Cargo.toml
+index d95b5b7f17f..00b6f0e3635 100644
+--- a/library/alloc/Cargo.toml
++++ b/library/alloc/Cargo.toml
+@@ -8,7 +8,7 @@ edition = "2018"
+
+ [dependencies]
+ core = { path = "../core" }
+-compiler_builtins = { version = "0.1.39", features = ['rustc-dep-of-std'] }
++compiler_builtins = { version = "0.1.39", features = ['rustc-dep-of-std', 'no-asm'] }
+
+ [dev-dependencies]
+ rand = "0.7"
+EOF
+
+cat > config.toml <<EOF
+[llvm]
+ninja = false
+
+[build]
+rustc = "$(pwd)/../build/bin/cg_clif"
+cargo = "$(rustup which cargo)"
+full-bootstrap = true
+local-rebuild = true
+
+[rust]
+codegen-backends = ["cranelift"]
+deny-warnings = false
+EOF
+popd
cd "$(dirname "$0")/../"
-./build.sh
-source build/config.sh
+source ./scripts/setup_rust_fork.sh
echo "[TEST] Bootstrap of rustc"
-git clone https://github.com/rust-lang/rust.git || true
pushd rust
-git fetch
-git checkout -- .
-git checkout "$(rustc -V | cut -d' ' -f3 | tr -d '(')"
-
-git apply - <<EOF
-diff --git a/Cargo.toml b/Cargo.toml
-index 5bd1147cad5..10d68a2ff14 100644
---- a/Cargo.toml
-+++ b/Cargo.toml
-@@ -111,5 +111,7 @@ rustc-std-workspace-std = { path = 'library/rustc-std-workspace-std' }
- rustc-std-workspace-alloc = { path = 'library/rustc-std-workspace-alloc' }
- rustc-std-workspace-std = { path = 'library/rustc-std-workspace-std' }
-
-+compiler_builtins = { path = "../build_sysroot/compiler-builtins" }
-+
- [patch."https://github.com/rust-lang/rust-clippy"]
- clippy_lints = { path = "src/tools/clippy/clippy_lints" }
-diff --git a/compiler/rustc_data_structures/Cargo.toml b/compiler/rustc_data_structures/Cargo.toml
-index 23e689fcae7..5f077b765b6 100644
---- a/compiler/rustc_data_structures/Cargo.toml
-+++ b/compiler/rustc_data_structures/Cargo.toml
-@@ -32,7 +32,6 @@ tempfile = "3.0.5"
-
- [dependencies.parking_lot]
- version = "0.11"
--features = ["nightly"]
-
- [target.'cfg(windows)'.dependencies]
- winapi = { version = "0.3", features = ["fileapi", "psapi"] }
-diff --git a/library/alloc/Cargo.toml b/library/alloc/Cargo.toml
-index d95b5b7f17f..00b6f0e3635 100644
---- a/library/alloc/Cargo.toml
-+++ b/library/alloc/Cargo.toml
-@@ -8,7 +8,7 @@ edition = "2018"
-
- [dependencies]
- core = { path = "../core" }
--compiler_builtins = { version = "0.1.39", features = ['rustc-dep-of-std'] }
-+compiler_builtins = { version = "0.1.39", features = ['rustc-dep-of-std', 'no-asm'] }
-
- [dev-dependencies]
- rand = "0.7"
-EOF
-
-cat > config.toml <<EOF
-[llvm]
-ninja = false
-
-[build]
-rustc = "$(pwd)/../build/bin/cg_clif"
-cargo = "$(rustup which cargo)"
-full-bootstrap = true
-local-rebuild = true
-
-[rust]
-codegen-backends = ["cranelift"]
-EOF
-
rm -r compiler/rustc_codegen_cranelift/{Cargo.*,src}
cp ../Cargo.* compiler/rustc_codegen_cranelift/
cp -r ../src compiler/rustc_codegen_cranelift/src
--- /dev/null
+#!/bin/bash
+set -e
+
+cd $(dirname "$0")/../
+
+source ./scripts/setup_rust_fork.sh
+
+echo "[TEST] Test suite of rustc"
+pushd rust
+
+cargo install ripgrep
+
+rm -r src/test/ui/{extern/,panics/,unsized-locals/,thinlto/,simd*,*lto*.rs,linkage*,unwind-*.rs} || true
+for test in $(rg --files-with-matches "asm!|catch_unwind|should_panic|lto" src/test/ui); do
+ rm $test
+done
+
+for test in $(rg -i --files-with-matches "//(\[\w+\])?~|// error-pattern:|// build-fail|// run-fail|-Cllvm-args" src/test/ui); do
+ rm $test
+done
+
+git checkout -- src/test/ui/issues/auxiliary/issue-3136-a.rs # contains //~ERROR, but shouldn't be removed
+
+# these all depend on unwinding support
+rm src/test/ui/backtrace.rs
+rm src/test/ui/array-slice-vec/box-of-array-of-drop-*.rs
+rm src/test/ui/array-slice-vec/slice-panic-*.rs
+rm src/test/ui/array-slice-vec/nested-vec-3.rs
+rm src/test/ui/cleanup-rvalue-temp-during-incomplete-alloc.rs
+rm src/test/ui/issues/issue-26655.rs
+rm src/test/ui/issues/issue-29485.rs
+rm src/test/ui/issues/issue-30018-panic.rs
+rm src/test/ui/multi-panic.rs
+rm src/test/ui/sepcomp/sepcomp-unwind.rs
+rm src/test/ui/structs-enums/unit-like-struct-drop-run.rs
+rm src/test/ui/terminate-in-initializer.rs
+rm src/test/ui/threads-sendsync/task-stderr.rs
+rm src/test/ui/numbers-arithmetic/int-abs-overflow.rs
+rm src/test/ui/drop/drop-trait-enum.rs
+rm src/test/ui/numbers-arithmetic/issue-8460.rs
+
+rm src/test/ui/issues/issue-28950.rs # depends on stack size optimizations
+rm src/test/ui/init-large-type.rs # same
+rm src/test/ui/sse2.rs # cpuid not supported, so sse2 not detected
+rm src/test/ui/issues/issue-33992.rs # unsupported linkages
+rm src/test/ui/issues/issue-51947.rs # same
+rm src/test/ui/numbers-arithmetic/saturating-float-casts.rs # intrinsic gives different but valid result
+rm src/test/ui/mir/mir_misc_casts.rs # depends on deduplication of constants
+rm src/test/ui/mir/mir_raw_fat_ptr.rs # same
+rm src/test/ui/async-await/async-fn-size-moved-locals.rs # -Cpanic=abort shrinks some generator by one byte
+rm src/test/ui/async-await/async-fn-size-uninit-locals.rs # same
+rm src/test/ui/generator/size-moved-locals.rs # same
+rm src/test/ui/fn/dyn-fn-alignment.rs # wants a 256 byte alignment
+rm src/test/ui/test-attrs/test-fn-signature-verification-for-explicit-return-type.rs # "Cannot run dynamic test fn out-of-process"
+rm src/test/ui/intrinsics/intrinsic-nearby.rs # unimplemented nearbyintf32 and nearbyintf64 intrinsics
+
+rm src/test/incremental/hashes/inline_asm.rs # inline asm
+rm src/test/incremental/issue-72386.rs # same
+rm src/test/incremental/change_crate_dep_kind.rs # requires -Cpanic=unwind
+rm src/test/incremental/issue-49482.rs # same
+rm src/test/incremental/issue-54059.rs # same
+rm src/test/incremental/lto.rs # requires lto
+
+rm src/test/pretty/asm.rs # inline asm
+rm src/test/pretty/raw-str-nonexpr.rs # same
+
+rm -r src/test/run-pass-valgrind/unsized-locals
+
+rm src/test/ui/json-bom-plus-crlf-multifile.rs # differing warning
+rm src/test/ui/json-bom-plus-crlf.rs # same
+rm src/test/ui/type-alias-impl-trait/cross_crate_ice*.rs # requires removed aux dep
+
+rm src/test/ui/allocator/no_std-alloc-error-handler-default.rs # missing rust_oom definition
+rm src/test/ui/cfg/cfg-panic.rs
+rm src/test/ui/default-alloc-error-hook.rs
+rm -r src/test/ui/hygiene/
+
+rm -r src/test/ui/polymorphization/ # polymorphization not yet supported
+rm src/test/codegen-units/polymorphization/unused_type_parameters.rs # same
+
+rm -r src/test/run-make/fmt-write-bloat/ # tests an optimization
+rm src/test/ui/abi/mir/mir_codegen_calls_variadic.rs # requires float varargs
+rm src/test/ui/abi/variadic-ffi.rs # requires callee side vararg support
+
+echo "[TEST] rustc test suite"
+RUST_TEST_NOCAPTURE=1 COMPILETEST_FORCE_STAGE0=1 ./x.py test --stage 0 src/test/{codegen-units,run-make,run-pass-valgrind,ui}
+popd
echo "[AOT] mod_bench"
$MY_RUSTC example/mod_bench.rs --crate-type bin --target "$TARGET_TRIPLE"
$RUN_WRAPPER ./target/out/mod_bench
+}
+function extended_sysroot_tests() {
pushd rand
- rm -r ./target || true
- ../build/cargo.sh test --workspace
+ cargo clean
+ if [[ "$HOST_TRIPLE" = "$TARGET_TRIPLE" ]]; then
+ echo "[TEST] rust-random/rand"
+ ../build/cargo.sh test --workspace
+ else
+ echo "[AOT] rust-random/rand"
+ ../build/cargo.sh build --workspace --target $TARGET_TRIPLE --tests
+ fi
popd
-}
-function extended_sysroot_tests() {
pushd simple-raytracer
if [[ "$HOST_TRIPLE" = "$TARGET_TRIPLE" ]]; then
echo "[BENCH COMPILE] ebobby/simple-raytracer"
else
echo "[BENCH COMPILE] ebobby/simple-raytracer (skipped)"
echo "[COMPILE] ebobby/simple-raytracer"
- ../cargo.sh build
+ ../build/cargo.sh build --target $TARGET_TRIPLE
echo "[BENCH RUN] ebobby/simple-raytracer (skipped)"
fi
popd
pushd build_sysroot/sysroot_src/library/core/tests
echo "[TEST] libcore"
- rm -r ./target || true
- ../../../../../build/cargo.sh test
+ cargo clean
+ if [[ "$HOST_TRIPLE" = "$TARGET_TRIPLE" ]]; then
+ ../../../../../build/cargo.sh test
+ else
+ ../../../../../build/cargo.sh build --target $TARGET_TRIPLE --tests
+ fi
popd
pushd regex
echo "[TEST] rust-lang/regex example shootout-regex-dna"
- ../build/cargo.sh clean
+ cargo clean
# Make sure `[codegen mono items] start` doesn't poison the diff
- ../build/cargo.sh build --example shootout-regex-dna
- cat examples/regexdna-input.txt | ../build/cargo.sh run --example shootout-regex-dna | grep -v "Spawned thread" > res.txt
- diff -u res.txt examples/regexdna-output.txt
+ ../build/cargo.sh build --example shootout-regex-dna --target $TARGET_TRIPLE
+ if [[ "$HOST_TRIPLE" = "$TARGET_TRIPLE" ]]; then
+ cat examples/regexdna-input.txt \
+ | ../build/cargo.sh run --example shootout-regex-dna --target $TARGET_TRIPLE \
+ | grep -v "Spawned thread" > res.txt
+ diff -u res.txt examples/regexdna-output.txt
+ fi
- echo "[TEST] rust-lang/regex tests"
- ../build/cargo.sh test --tests -- --exclude-should-panic --test-threads 1 -Zunstable-options -q
+ if [[ "$HOST_TRIPLE" = "$TARGET_TRIPLE" ]]; then
+ echo "[TEST] rust-lang/regex tests"
+ ../build/cargo.sh test --tests -- --exclude-should-panic --test-threads 1 -Zunstable-options -q
+ else
+ echo "[AOT] rust-lang/regex tests"
+ ../build/cargo.sh build --tests --target $TARGET_TRIPLE
+ fi
popd
}
use crate::prelude::*;
pub(super) fn add_args_header_comment(fx: &mut FunctionCx<'_, '_, '_>) {
- fx.add_global_comment(
- "kind loc.idx param pass mode ty".to_string(),
- );
+ if fx.clif_comments.enabled() {
+ fx.add_global_comment(
+ "kind loc.idx param pass mode ty".to_string(),
+ );
+ }
}
pub(super) fn add_arg_comment<'tcx>(
arg_abi_mode: PassMode,
arg_layout: TyAndLayout<'tcx>,
) {
+ if !fx.clif_comments.enabled() {
+ return;
+ }
+
let local = if let Some(local) = local {
Cow::Owned(format!("{:?}", local))
} else {
}
pub(super) fn add_locals_header_comment(fx: &mut FunctionCx<'_, '_, '_>) {
- fx.add_global_comment(String::new());
- fx.add_global_comment(
- "kind local ty size align (abi,pref)".to_string(),
- );
+ if fx.clif_comments.enabled() {
+ fx.add_global_comment(String::new());
+ fx.add_global_comment(
+ "kind local ty size align (abi,pref)".to_string(),
+ );
+ }
}
pub(super) fn add_local_place_comments<'tcx>(
place: CPlace<'tcx>,
local: Local,
) {
+ if !fx.clif_comments.enabled() {
+ return;
+ }
let TyAndLayout { ty, layout } = place.layout();
let rustc_target::abi::Layout { size, align, abi: _, variants: _, fields: _, largest_niche: _ } =
layout;
} else {
Cow::Borrowed("")
};
- match ptr.base_and_offset() {
+ match ptr.debug_base_and_offset() {
(crate::pointer::PointerBase::Addr(addr), offset) => {
("reuse", format!("storage={}{}{}", addr, offset, meta).into())
}
//! Handling of everything related to the calling convention. Also fills `fx.local_map`.
-#[cfg(debug_assertions)]
mod comments;
mod pass_mode;
mod returning;
let func_id = import_function(self.tcx, self.cx.module, inst);
let func_ref = self.cx.module.declare_func_in_func(func_id, &mut self.bcx.func);
- #[cfg(debug_assertions)]
- self.add_comment(func_ref, format!("{:?}", inst));
+ if self.clif_comments.enabled() {
+ self.add_comment(func_ref, format!("{:?}", inst));
+ }
func_ref
}
let func_id = self.cx.module.declare_function(&name, Linkage::Import, &sig).unwrap();
let func_ref = self.cx.module.declare_func_in_func(func_id, &mut self.bcx.func);
let call_inst = self.bcx.ins().call(func_ref, args);
- #[cfg(debug_assertions)]
- {
+ if self.clif_comments.enabled() {
self.add_comment(call_inst, format!("easy_call {}", name));
}
let results = self.bcx.inst_results(call_inst);
CPlace::new_stack_slot(fx, layout)
};
- #[cfg(debug_assertions)]
self::comments::add_local_place_comments(fx, place, local);
place
let ssa_analyzed = crate::analyze::analyze(fx);
- #[cfg(debug_assertions)]
self::comments::add_args_header_comment(fx);
let mut block_params_iter = fx.bcx.func.dfg.block_params(start_block).to_vec().into_iter();
fx.fn_abi = Some(fn_abi);
assert!(block_params_iter.next().is_none(), "arg_value left behind");
- #[cfg(debug_assertions)]
self::comments::add_locals_header_comment(fx);
for (local, arg_kind, ty) in func_params {
CPlace::for_ptr(addr, val.layout())
};
- #[cfg(debug_assertions)]
self::comments::add_local_place_comments(fx, place, local);
assert_eq!(fx.local_map.push(place), local);
let (func_ref, first_arg) = match instance {
// Trait object call
Some(Instance { def: InstanceDef::Virtual(_, idx), .. }) => {
- #[cfg(debug_assertions)]
- {
+ if fx.clif_comments.enabled() {
let nop_inst = fx.bcx.ins().nop();
fx.add_comment(
nop_inst,
// Indirect call
None => {
- #[cfg(debug_assertions)]
- {
+ if fx.clif_comments.enabled() {
let nop_inst = fx.bcx.ins().nop();
fx.add_comment(nop_inst, "indirect call");
}
// FIXME find a cleaner way to support varargs
if fn_sig.c_variadic {
if !matches!(fn_sig.abi, Abi::C { .. }) {
- fx.tcx.sess.span_fatal(
- span,
- &format!("Variadic call for non-C abi {:?}", fn_sig.abi),
- );
+ fx.tcx.sess.span_fatal(span, &format!("Variadic call for non-C abi {:?}", fn_sig.abi));
}
let sig_ref = fx.bcx.func.dfg.call_signature(call_inst).unwrap();
let abi_params = call_args
});
let ptr = Pointer::new(fx.bcx.ins().stack_addr(pointer_ty(fx.tcx), stack_slot, 0));
let mut offset = 0;
- let mut block_params_iter = block_params.into_iter().copied();
+ let mut block_params_iter = block_params.iter().copied();
for param in abi_params {
let val = ptr.offset_i64(fx, offset).store(
fx,
/// as necessary.
pub(super) fn cvalue_for_param<'tcx>(
fx: &mut FunctionCx<'_, '_, 'tcx>,
- #[cfg_attr(not(debug_assertions), allow(unused_variables))] local: Option<mir::Local>,
- #[cfg_attr(not(debug_assertions), allow(unused_variables))] local_field: Option<usize>,
+ local: Option<mir::Local>,
+ local_field: Option<usize>,
arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
block_params_iter: &mut impl Iterator<Item = Value>,
) -> Option<CValue<'tcx>> {
})
.collect::<SmallVec<[_; 2]>>();
- #[cfg(debug_assertions)]
crate::abi::comments::add_arg_comment(
fx,
"arg",
}
};
- #[cfg(not(debug_assertions))]
- let _ = ret_param;
-
- #[cfg(debug_assertions)]
crate::abi::comments::add_arg_comment(
fx,
"ret",
let results = fx
.bcx
.inst_results(call_inst)
- .into_iter()
+ .iter()
.copied()
.collect::<SmallVec<[Value; 2]>>();
let result =
use crate::prelude::*;
+use cranelift_codegen::binemit::{NullStackMapSink, NullTrapSink};
use rustc_ast::expand::allocator::{AllocatorKind, AllocatorTy, ALLOCATOR_METHODS};
use rustc_span::symbol::sym;
bcx.finalize();
}
module
- .define_function(func_id, &mut ctx, &mut cranelift_codegen::binemit::NullTrapSink {})
+ .define_function(func_id, &mut ctx, &mut NullTrapSink {}, &mut NullStackMapSink {})
.unwrap();
unwind_context.add_function(func_id, &ctx, module.isa());
}
bcx.finalize();
}
module
- .define_function(func_id, &mut ctx, &mut cranelift_codegen::binemit::NullTrapSink {})
+ .define_function(func_id, &mut ctx, &mut NullTrapSink {}, &mut NullStackMapSink {})
.unwrap();
unwind_context.add_function(func_id, &ctx, module.isa());
}
//! Codegen of a single function
+use cranelift_codegen::binemit::{NullStackMapSink, NullTrapSink};
use rustc_index::vec::IndexVec;
use rustc_middle::ty::adjustment::PointerCast;
use rustc_middle::ty::layout::FnAbiExt;
use crate::prelude::*;
-pub(crate) fn codegen_fn<'tcx>(
- cx: &mut crate::CodegenCx<'_, 'tcx>,
- instance: Instance<'tcx>,
- linkage: Linkage,
-) {
+pub(crate) fn codegen_fn<'tcx>(cx: &mut crate::CodegenCx<'_, 'tcx>, instance: Instance<'tcx>) {
let tcx = cx.tcx;
let _inst_guard =
// Declare function
let name = tcx.symbol_name(instance).name.to_string();
let sig = get_function_sig(tcx, cx.module.isa().triple(), instance);
- let func_id = cx.module.declare_function(&name, linkage, &sig).unwrap();
+ let func_id = cx.module.declare_function(&name, Linkage::Local, &sig).unwrap();
cx.cached_context.clear();
let module = &mut cx.module;
tcx.sess.time("define function", || {
module
- .define_function(func_id, context, &mut cranelift_codegen::binemit::NullTrapSink {})
+ .define_function(func_id, context, &mut NullTrapSink {}, &mut NullStackMapSink {})
.unwrap()
});
codegen_stmt(fx, block, stmt);
}
- #[cfg(debug_assertions)]
- {
+ if fx.clif_comments.enabled() {
let mut terminator_head = "\n".to_string();
bb_data.terminator().kind.fmt_head(&mut terminator_head).unwrap();
let inst = fx.bcx.func.layout.last_inst(block).unwrap();
fx.set_debug_loc(stmt.source_info);
- #[cfg(false_debug_assertions)]
+ #[cfg(disabled)]
match &stmt.kind {
StatementKind::StorageLive(..) | StatementKind::StorageDead(..) => {} // Those are not very useful
_ => {
- let inst = fx.bcx.func.layout.last_inst(cur_block).unwrap();
- fx.add_comment(inst, format!("{:?}", stmt));
+ if fx.clif_comments.enabled() {
+ let inst = fx.bcx.func.layout.last_inst(cur_block).unwrap();
+ fx.add_comment(inst, format!("{:?}", stmt));
+ }
}
}
let val = crate::constant::codegen_tls_ref(fx, def_id, lval.layout());
lval.write_cvalue(fx, val);
}
- Rvalue::BinaryOp(bin_op, box (ref lhs, ref rhs)) => {
- let lhs = codegen_operand(fx, lhs);
- let rhs = codegen_operand(fx, rhs);
+ Rvalue::BinaryOp(bin_op, ref lhs_rhs) => {
+ let lhs = codegen_operand(fx, &lhs_rhs.0);
+ let rhs = codegen_operand(fx, &lhs_rhs.1);
let res = crate::num::codegen_binop(fx, bin_op, lhs, rhs);
lval.write_cvalue(fx, res);
}
- Rvalue::CheckedBinaryOp(bin_op, box (ref lhs, ref rhs)) => {
- let lhs = codegen_operand(fx, lhs);
- let rhs = codegen_operand(fx, rhs);
+ Rvalue::CheckedBinaryOp(bin_op, ref lhs_rhs) => {
+ let lhs = codegen_operand(fx, &lhs_rhs.0);
+ let rhs = codegen_operand(fx, &lhs_rhs.1);
let res = if !fx.tcx.sess.overflow_checks() {
let val =
.val
.try_to_bits(fx.tcx.data_layout.pointer_size)
.unwrap();
- if fx.clif_type(operand.layout().ty) == Some(types::I8) {
+ if operand.layout().size.bytes() == 0 {
+ // Do nothing for ZST's
+ } else if fx.clif_type(operand.layout().ty) == Some(types::I8) {
let times = fx.bcx.ins().iconst(fx.pointer_type, times as i64);
// FIXME use emit_small_memset where possible
let addr = lval.to_ptr().get_addr(fx);
}
}
StatementKind::Coverage { .. } => fx.tcx.sess.fatal("-Zcoverage is unimplemented"),
- StatementKind::CopyNonOverlapping(box rustc_middle::mir::CopyNonOverlapping {
- src,
- dst,
- count,
- }) => {
- let dst = codegen_operand(fx, dst);
+ StatementKind::CopyNonOverlapping(inner) => {
+ let dst = codegen_operand(fx, &inner.dst);
let pointee = dst
- .layout()
- .pointee_info_at(fx, rustc_target::abi::Size::ZERO)
- .expect("Expected pointer");
+ .layout()
+ .pointee_info_at(fx, rustc_target::abi::Size::ZERO)
+ .expect("Expected pointer");
let dst = dst.load_scalar(fx);
- let src = codegen_operand(fx, src).load_scalar(fx);
- let count = codegen_operand(fx, count).load_scalar(fx);
+ let src = codegen_operand(fx, &inner.src).load_scalar(fx);
+ let count = codegen_operand(fx, &inner.count).load_scalar(fx);
let elem_size: u64 = pointee.size.bytes();
- let bytes = if elem_size != 1 {
- fx.bcx.ins().imul_imm(count, elem_size as i64)
- } else {
- count
- };
+ let bytes =
+ if elem_size != 1 { fx.bcx.ins().imul_imm(count, elem_size as i64) } else { count };
fx.bcx.call_memcpy(fx.cx.module.target_config(), dst, src, bytes);
}
}
BinOp::Add | BinOp::Sub if !checked => None,
BinOp::Mul if !checked => {
let val_ty = if is_signed { fx.tcx.types.i128 } else { fx.tcx.types.u128 };
- Some(fx.easy_call("__multi3", &[lhs, rhs], val_ty))
+ if fx.tcx.sess.target.is_like_windows {
+ let ret_place = CPlace::new_stack_slot(fx, lhs.layout());
+ let (lhs_ptr, lhs_extra) = lhs.force_stack(fx);
+ let (rhs_ptr, rhs_extra) = rhs.force_stack(fx);
+ assert!(lhs_extra.is_none());
+ assert!(rhs_extra.is_none());
+ let args =
+ [ret_place.to_ptr().get_addr(fx), lhs_ptr.get_addr(fx), rhs_ptr.get_addr(fx)];
+ fx.lib_call(
+ "__multi3",
+ vec![
+ AbiParam::special(pointer_ty(fx.tcx), ArgumentPurpose::StructReturn),
+ AbiParam::new(pointer_ty(fx.tcx)),
+ AbiParam::new(pointer_ty(fx.tcx)),
+ ],
+ vec![],
+ &args,
+ );
+ Some(ret_place.to_cvalue(fx))
+ } else {
+ Some(fx.easy_call("__multi3", &[lhs, rhs], val_ty))
+ }
}
BinOp::Add | BinOp::Sub | BinOp::Mul => {
assert!(checked);
let out_ty = fx.tcx.mk_tup([lhs.layout().ty, fx.tcx.types.bool].iter());
let out_place = CPlace::new_stack_slot(fx, fx.layout_of(out_ty));
- let param_types = vec![
- AbiParam::special(pointer_ty(fx.tcx), ArgumentPurpose::StructReturn),
- AbiParam::new(types::I128),
- AbiParam::new(types::I128),
- ];
- let args = [out_place.to_ptr().get_addr(fx), lhs.load_scalar(fx), rhs.load_scalar(fx)];
+ let (param_types, args) = if fx.tcx.sess.target.is_like_windows {
+ let (lhs_ptr, lhs_extra) = lhs.force_stack(fx);
+ let (rhs_ptr, rhs_extra) = rhs.force_stack(fx);
+ assert!(lhs_extra.is_none());
+ assert!(rhs_extra.is_none());
+ (
+ vec![
+ AbiParam::special(pointer_ty(fx.tcx), ArgumentPurpose::StructReturn),
+ AbiParam::new(pointer_ty(fx.tcx)),
+ AbiParam::new(pointer_ty(fx.tcx)),
+ ],
+ [out_place.to_ptr().get_addr(fx), lhs_ptr.get_addr(fx), rhs_ptr.get_addr(fx)],
+ )
+ } else {
+ (
+ vec![
+ AbiParam::special(pointer_ty(fx.tcx), ArgumentPurpose::StructReturn),
+ AbiParam::new(types::I128),
+ AbiParam::new(types::I128),
+ ],
+ [out_place.to_ptr().get_addr(fx), lhs.load_scalar(fx), rhs.load_scalar(fx)],
+ )
+ };
let name = match (bin_op, is_signed) {
(BinOp::Add, false) => "__rust_u128_addo",
(BinOp::Add, true) => "__rust_i128_addo",
Some(out_place.to_cvalue(fx))
}
BinOp::Offset => unreachable!("offset should only be used on pointers, not 128bit ints"),
- BinOp::Div => {
+ BinOp::Div | BinOp::Rem => {
assert!(!checked);
- if is_signed {
- Some(fx.easy_call("__divti3", &[lhs, rhs], fx.tcx.types.i128))
- } else {
- Some(fx.easy_call("__udivti3", &[lhs, rhs], fx.tcx.types.u128))
- }
- }
- BinOp::Rem => {
- assert!(!checked);
- if is_signed {
- Some(fx.easy_call("__modti3", &[lhs, rhs], fx.tcx.types.i128))
+ let name = match (bin_op, is_signed) {
+ (BinOp::Div, false) => "__udivti3",
+ (BinOp::Div, true) => "__divti3",
+ (BinOp::Rem, false) => "__umodti3",
+ (BinOp::Rem, true) => "__modti3",
+ _ => unreachable!(),
+ };
+ if fx.tcx.sess.target.is_like_windows {
+ let (lhs_ptr, lhs_extra) = lhs.force_stack(fx);
+ let (rhs_ptr, rhs_extra) = rhs.force_stack(fx);
+ assert!(lhs_extra.is_none());
+ assert!(rhs_extra.is_none());
+ let args = [lhs_ptr.get_addr(fx), rhs_ptr.get_addr(fx)];
+ let ret = fx.lib_call(
+ name,
+ vec![AbiParam::new(pointer_ty(fx.tcx)), AbiParam::new(pointer_ty(fx.tcx))],
+ vec![AbiParam::new(types::I64X2)],
+ &args,
+ )[0];
+ // FIXME use bitcast instead of store to get from i64x2 to i128
+ let ret_place = CPlace::new_stack_slot(fx, lhs.layout());
+ ret_place.to_ptr().store(fx, ret, MemFlags::trusted());
+ Some(ret_place.to_cvalue(fx))
} else {
- Some(fx.easy_call("__umodti3", &[lhs, rhs], fx.tcx.types.u128))
+ Some(fx.easy_call(name, &[lhs, rhs], lhs.layout().ty))
}
}
BinOp::Lt | BinOp::Le | BinOp::Eq | BinOp::Ge | BinOp::Gt | BinOp::Ne => {
let _ = self.cx.module.define_data(msg_id, &data_ctx);
let local_msg_id = self.cx.module.declare_data_in_func(msg_id, self.bcx.func);
- #[cfg(debug_assertions)]
- {
+ if self.clif_comments.enabled() {
self.add_comment(local_msg_id, msg);
}
self.bcx.ins().global_value(self.pointer_type, local_msg_id)
--- /dev/null
+macro builtin_functions($register:ident; $(fn $name:ident($($arg_name:ident: $arg_ty:ty),*) -> $ret_ty:ty;)*) {
+ #[cfg(feature = "jit")]
+ #[allow(improper_ctypes)]
+ extern "C" {
+ $(fn $name($($arg_name: $arg_ty),*) -> $ret_ty;)*
+ }
+
+ #[cfg(feature = "jit")]
+ pub(crate) fn $register(builder: &mut cranelift_jit::JITBuilder) {
+ for &(name, val) in &[$((stringify!($name), $name as *const u8)),*] {
+ builder.symbol(name, val);
+ }
+ }
+}
+
+builtin_functions! {
+ register_functions_for_jit;
+
+ // integers
+ fn __multi3(a: i128, b: i128) -> i128;
+ fn __udivti3(n: u128, d: u128) -> u128;
+ fn __divti3(n: i128, d: i128) -> i128;
+ fn __umodti3(n: u128, d: u128) -> u128;
+ fn __modti3(n: i128, d: i128) -> i128;
+ fn __rust_u128_addo(a: u128, b: u128) -> (u128, bool);
+ fn __rust_i128_addo(a: i128, b: i128) -> (i128, bool);
+ fn __rust_u128_subo(a: u128, b: u128) -> (u128, bool);
+ fn __rust_i128_subo(a: i128, b: i128) -> (i128, bool);
+ fn __rust_u128_mulo(a: u128, b: u128) -> (u128, bool);
+ fn __rust_i128_mulo(a: i128, b: i128) -> (i128, bool);
+
+ // floats
+ fn __floattisf(i: i128) -> f32;
+ fn __floattidf(i: i128) -> f64;
+ fn __floatuntisf(i: u128) -> f32;
+ fn __floatuntidf(i: u128) -> f64;
+ fn __fixsfti(f: f32) -> i128;
+ fn __fixdfti(f: f64) -> i128;
+ fn __fixunssfti(f: f32) -> u128;
+ fn __fixunsdfti(f: f64) -> u128;
+}
) -> CValue<'tcx> {
let data_id = data_id_for_static(fx.tcx, fx.cx.module, def_id, false);
let local_data_id = fx.cx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
- #[cfg(debug_assertions)]
- fx.add_comment(local_data_id, format!("tls {:?}", def_id));
+ if fx.clif_comments.enabled() {
+ fx.add_comment(local_data_id, format!("tls {:?}", def_id));
+ }
let tls_ptr = fx.bcx.ins().tls_value(fx.pointer_type, local_data_id);
CValue::by_val(tls_ptr, layout)
}
) -> CPlace<'tcx> {
let data_id = data_id_for_static(fx.tcx, fx.cx.module, def_id, false);
let local_data_id = fx.cx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
- #[cfg(debug_assertions)]
- fx.add_comment(local_data_id, format!("{:?}", def_id));
+ if fx.clif_comments.enabled() {
+ fx.add_comment(local_data_id, format!("{:?}", def_id));
+ }
let global_ptr = fx.bcx.ins().global_value(fx.pointer_type, local_data_id);
assert!(!layout.is_unsized(), "unsized statics aren't supported");
assert!(
};
let const_val = match const_.val {
ConstKind::Value(const_val) => const_val,
- ConstKind::Unevaluated(ty::Unevaluated { def, substs, promoted }) if fx.tcx.is_static(def.did) => {
+ ConstKind::Unevaluated(ty::Unevaluated { def, substs, promoted })
+ if fx.tcx.is_static(def.did) =>
+ {
assert!(substs.is_empty());
assert!(promoted.is_none());
data_id_for_alloc_id(fx.cx.module, ptr.alloc_id, alloc.mutability);
let local_data_id =
fx.cx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
- #[cfg(debug_assertions)]
- fx.add_comment(local_data_id, format!("{:?}", ptr.alloc_id));
+ if fx.clif_comments.enabled() {
+ fx.add_comment(local_data_id, format!("{:?}", ptr.alloc_id));
+ }
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
}
Some(GlobalAlloc::Function(instance)) => {
let data_id = data_id_for_static(fx.tcx, fx.cx.module, def_id, false);
let local_data_id =
fx.cx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
- #[cfg(debug_assertions)]
- fx.add_comment(local_data_id, format!("{:?}", def_id));
+ if fx.clif_comments.enabled() {
+ fx.add_comment(local_data_id, format!("{:?}", def_id));
+ }
fx.bcx.ins().global_value(fx.pointer_type, local_data_id)
}
None => bug!("missing allocation {:?}", ptr.alloc_id),
let data_id = data_id_for_alloc_id(fx.cx.module, alloc_id, alloc.mutability);
let local_data_id = fx.cx.module.declare_data_in_func(data_id, &mut fx.bcx.func);
- #[cfg(debug_assertions)]
- fx.add_comment(local_data_id, format!("{:?}", alloc_id));
+ if fx.clif_comments.enabled() {
+ fx.add_comment(local_data_id, format!("{:?}", alloc_id));
+ }
let global_ptr = fx.bcx.ins().global_value(fx.pointer_type, local_data_id);
crate::pointer::Pointer::new(global_ptr)
}
#[cfg(unix)]
{
use std::os::unix::ffi::OsStrExt;
- return path.as_bytes();
+ path.as_bytes()
}
#[cfg(not(unix))]
{
- return path.to_str().unwrap().as_bytes();
+ path.to_str().unwrap().as_bytes()
}
}
tcx.sess.opts.debuginfo != DebugInfo::None,
);
super::predefine_mono_items(&mut cx, &mono_items);
- for (mono_item, (linkage, visibility)) in mono_items {
- let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility);
+ for (mono_item, _) in mono_items {
match mono_item {
MonoItem::Fn(inst) => {
- cx.tcx.sess.time("codegen fn", || crate::base::codegen_fn(&mut cx, inst, linkage));
+ cx.tcx.sess.time("codegen fn", || crate::base::codegen_fn(&mut cx, inst));
}
MonoItem::Static(def_id) => {
crate::constant::codegen_static(&mut cx.constants_cx, def_id)
metadata: EncodedMetadata,
need_metadata_module: bool,
) -> Box<(CodegenResults, FxHashMap<WorkProductId, WorkProduct>)> {
+ use rustc_span::symbol::sym;
+
+ let crate_attrs = tcx.hir().attrs(rustc_hir::CRATE_HIR_ID);
+ let subsystem = tcx.sess.first_attr_value_str_by_name(crate_attrs, sym::windows_subsystem);
+ let windows_subsystem = subsystem.map(|subsystem| {
+ if subsystem != sym::windows && subsystem != sym::console {
+ tcx.sess.fatal(&format!(
+ "invalid windows subsystem `{}`, only \
+ `windows` and `console` are allowed",
+ subsystem
+ ));
+ }
+ subsystem.to_string()
+ });
+
let mut work_products = FxHashMap::default();
let cgus = if tcx.sess.opts.output_types.should_codegen() {
allocator_module,
metadata_module,
metadata,
- windows_subsystem: None, // Windows is not yet supported
+ windows_subsystem,
linker_info: LinkerInfo::new(tcx),
crate_info: CrateInfo::new(tcx),
},
use std::ffi::CString;
use std::os::raw::{c_char, c_int};
+use cranelift_codegen::binemit::{NullStackMapSink, NullTrapSink};
use rustc_codegen_ssa::CrateInfo;
use rustc_middle::mir::mono::MonoItem;
+use rustc_session::config::EntryFnType;
use cranelift_jit::{JITBuilder, JITModule};
let mut jit_builder =
JITBuilder::with_isa(crate::build_isa(tcx.sess), cranelift_module::default_libcall_names());
jit_builder.hotswap(matches!(backend_config.codegen_mode, CodegenMode::JitLazy));
+ crate::compiler_builtins::register_functions_for_jit(&mut jit_builder);
jit_builder.symbols(imported_symbols);
let mut jit_module = JITModule::new(jit_builder);
assert_eq!(pointer_ty(tcx), jit_module.target_config().pointer_type());
- let sig = Signature {
- params: vec![
- AbiParam::new(jit_module.target_config().pointer_type()),
- AbiParam::new(jit_module.target_config().pointer_type()),
- ],
- returns: vec![AbiParam::new(jit_module.target_config().pointer_type() /*isize*/)],
- call_conv: CallConv::triple_default(&crate::target_triple(tcx.sess)),
- };
- let main_func_id = jit_module.declare_function("main", Linkage::Import, &sig).unwrap();
-
let (_, cgus) = tcx.collect_and_partition_mono_items(LOCAL_CRATE);
let mono_items = cgus
.iter()
super::time(tcx, "codegen mono items", || {
super::predefine_mono_items(&mut cx, &mono_items);
- for (mono_item, (linkage, visibility)) in mono_items {
- let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility);
+ for (mono_item, _) in mono_items {
match mono_item {
MonoItem::Fn(inst) => match backend_config.codegen_mode {
CodegenMode::Aot => unreachable!(),
CodegenMode::Jit => {
- cx.tcx
- .sess
- .time("codegen fn", || crate::base::codegen_fn(&mut cx, inst, linkage));
+ cx.tcx.sess.time("codegen fn", || crate::base::codegen_fn(&mut cx, inst));
}
CodegenMode::JitLazy => codegen_shim(&mut cx, inst),
},
tcx.sess.fatal("Inline asm is not supported in JIT mode");
}
- crate::main_shim::maybe_create_entry_wrapper(tcx, &mut jit_module, &mut unwind_context);
crate::allocator::codegen(tcx, &mut jit_module, &mut unwind_context);
tcx.sess.abort_if_errors();
jit_module.finalize_definitions();
-
let _unwind_register_guard = unsafe { unwind_context.register_jit(&jit_module) };
- let finalized_main: *const u8 = jit_module.get_finalized_function(main_func_id);
-
println!(
"Rustc codegen cranelift will JIT run the executable, because -Cllvm-args=mode=jit was passed"
);
- let f: extern "C" fn(c_int, *const *const c_char) -> c_int =
- unsafe { ::std::mem::transmute(finalized_main) };
-
let args = ::std::env::var("CG_CLIF_JIT_ARGS").unwrap_or_else(|_| String::new());
let args = std::iter::once(&*tcx.crate_name(LOCAL_CRATE).as_str().to_string())
.chain(args.split(' '))
BACKEND_CONFIG.with(|tls_backend_config| {
assert!(tls_backend_config.borrow_mut().replace(backend_config).is_none())
});
- CURRENT_MODULE
- .with(|current_module| assert!(current_module.borrow_mut().replace(jit_module).is_none()));
- let ret = f(args.len() as c_int, argv.as_ptr());
+ let (main_def_id, entry_ty) = tcx.entry_fn(LOCAL_CRATE).unwrap();
+ let instance = Instance::mono(tcx, main_def_id.to_def_id()).polymorphize(tcx);
+
+ match entry_ty {
+ EntryFnType::Main => {
+ // FIXME set program arguments somehow
- std::process::exit(ret);
+ let main_sig = Signature {
+ params: vec![],
+ returns: vec![],
+ call_conv: CallConv::triple_default(&crate::target_triple(tcx.sess)),
+ };
+ let main_func_id = jit_module
+ .declare_function(tcx.symbol_name(instance).name, Linkage::Import, &main_sig)
+ .unwrap();
+ let finalized_main: *const u8 = jit_module.get_finalized_function(main_func_id);
+
+ CURRENT_MODULE.with(|current_module| {
+ assert!(current_module.borrow_mut().replace(jit_module).is_none())
+ });
+
+ let f: extern "C" fn() = unsafe { ::std::mem::transmute(finalized_main) };
+ f();
+ std::process::exit(0);
+ }
+ EntryFnType::Start => {
+ let start_sig = Signature {
+ params: vec![
+ AbiParam::new(jit_module.target_config().pointer_type()),
+ AbiParam::new(jit_module.target_config().pointer_type()),
+ ],
+ returns: vec![AbiParam::new(
+ jit_module.target_config().pointer_type(), /*isize*/
+ )],
+ call_conv: CallConv::triple_default(&crate::target_triple(tcx.sess)),
+ };
+ let start_func_id = jit_module
+ .declare_function(tcx.symbol_name(instance).name, Linkage::Import, &start_sig)
+ .unwrap();
+ let finalized_start: *const u8 = jit_module.get_finalized_function(start_func_id);
+
+ CURRENT_MODULE.with(|current_module| {
+ assert!(current_module.borrow_mut().replace(jit_module).is_none())
+ });
+
+ let f: extern "C" fn(c_int, *const *const c_char) -> c_int =
+ unsafe { ::std::mem::transmute(finalized_start) };
+ let ret = f(args.len() as c_int, argv.as_ptr());
+ std::process::exit(ret);
+ }
+ }
}
#[no_mangle]
jit_module.prepare_for_function_redefine(func_id).unwrap();
let mut cx = crate::CodegenCx::new(tcx, backend_config, jit_module, false);
- tcx.sess
- .time("codegen fn", || crate::base::codegen_fn(&mut cx, instance, Linkage::Export));
+ tcx.sess.time("codegen fn", || crate::base::codegen_fn(&mut cx, instance));
let (global_asm, _debug_context, unwind_context) = cx.finalize();
assert!(global_asm.is_empty());
imported_symbols
}
-pub(super) fn codegen_shim<'tcx>(cx: &mut CodegenCx<'_, 'tcx>, inst: Instance<'tcx>) {
+fn codegen_shim<'tcx>(cx: &mut CodegenCx<'_, 'tcx>, inst: Instance<'tcx>) {
let tcx = cx.tcx;
let pointer_type = cx.module.target_config().pointer_type();
.define_function(
func_id,
&mut Context::for_function(trampoline),
- &mut cranelift_codegen::binemit::NullTrapSink {},
+ &mut NullTrapSink {},
+ &mut NullStackMapSink {},
)
.unwrap();
}
mono_items: &[(MonoItem<'tcx>, (RLinkage, Visibility))],
) {
cx.tcx.sess.time("predefine functions", || {
+ let is_compiler_builtins = cx.tcx.is_compiler_builtins(LOCAL_CRATE);
for &(mono_item, (linkage, visibility)) in mono_items {
match mono_item {
MonoItem::Fn(instance) => {
let name = cx.tcx.symbol_name(instance).name.to_string();
let _inst_guard = crate::PrintOnPanic(|| format!("{:?} {}", instance, name));
let sig = get_function_sig(cx.tcx, cx.module.isa().triple(), instance);
- let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility);
+ let linkage = crate::linkage::get_clif_linkage(
+ mono_item,
+ linkage,
+ visibility,
+ is_compiler_builtins,
+ );
cx.module.declare_function(&name, linkage, &sig).unwrap();
}
MonoItem::Static(_) | MonoItem::GlobalAsm(_) => {}
if template.is_empty() {
// Black box
return;
+ } else if template[0] == InlineAsmTemplatePiece::String("int $$0x29".to_string()) {
+ let true_ = fx.bcx.ins().iconst(types::I32, 1);
+ fx.bcx.ins().trapnz(true_, TrapCode::User(1));
+ return;
}
let mut slot_size = Size::from_bytes(0);
offset: None,
size: u32::try_from(slot_size.bytes()).unwrap(),
});
- #[cfg(debug_assertions)]
- fx.add_comment(stack_slot, "inline asm scratch slot");
+ if fx.clif_comments.enabled() {
+ fx.add_comment(stack_slot, "inline asm scratch slot");
+ }
let inline_asm_func = fx
.cx
)
.unwrap();
let inline_asm_func = fx.cx.module.declare_func_in_func(inline_asm_func, &mut fx.bcx.func);
- #[cfg(debug_assertions)]
- fx.add_comment(inline_asm_func, asm_name);
+ if fx.clif_comments.enabled() {
+ fx.add_comment(inline_asm_func, asm_name);
+ }
for (_reg, offset, value) in inputs {
fx.bcx.ins().stack_store(value, stack_slot, i32::try_from(offset.bytes()).unwrap());
let idx_bytes = match idx_const {
ConstValue::ByRef { alloc, offset } => {
let ptr = Pointer::new(AllocId(0 /* dummy */), offset);
- let size = Size::from_bytes(4 * u64::from(ret_lane_count) /* size_of([u32; ret_lane_count]) */);
+ let size = Size::from_bytes(4 * ret_lane_count /* size_of([u32; ret_lane_count]) */);
alloc.get_bytes(fx, ptr, size).unwrap()
}
_ => unreachable!("{:?}", idx_const),
-#![feature(
- rustc_private,
- decl_macro,
- type_alias_impl_trait,
- associated_type_bounds,
- never_type,
- try_blocks,
- box_patterns,
- hash_drain_filter
-)]
+#![feature(rustc_private, decl_macro, never_type, hash_drain_filter)]
#![warn(rust_2018_idioms)]
#![warn(unused_lifetimes)]
#![warn(unreachable_pub)]
mod cast;
mod codegen_i128;
mod common;
+mod compiler_builtins;
mod constant;
mod debuginfo;
mod discriminant;
impl CodegenBackend for CraneliftCodegenBackend {
fn init(&self, sess: &Session) {
- if sess.lto() != rustc_session::config::Lto::No && sess.opts.cg.embed_bitcode {
- sess.warn("LTO is not supported. You may get a linker error.");
+ use rustc_session::config::Lto;
+ match sess.lto() {
+ Lto::No | Lto::ThinLocal => {}
+ Lto::Thin | Lto::Fat => sess.warn("LTO is not supported. You may get a linker error."),
}
}
vec![]
}
- fn codegen_crate<'tcx>(
+ fn codegen_crate(
&self,
- tcx: TyCtxt<'tcx>,
+ tcx: TyCtxt<'_>,
metadata: EncodedMetadata,
need_metadata_module: bool,
) -> Box<dyn Any> {
BackendConfig::from_opts(&tcx.sess.opts.cg.llvm_args)
.unwrap_or_else(|err| tcx.sess.fatal(&err))
};
- let res = driver::codegen_crate(tcx, metadata, need_metadata_module, config);
-
- res
+ driver::codegen_crate(tcx, metadata, need_metadata_module, config)
}
fn join_codegen(
let mut flags_builder = settings::builder();
flags_builder.enable("is_pic").unwrap();
flags_builder.set("enable_probestack", "false").unwrap(); // __cranelift_probestack is not provided
- flags_builder
- .set("enable_verifier", if cfg!(debug_assertions) { "true" } else { "false" })
- .unwrap();
+ let enable_verifier =
+ cfg!(debug_assertions) || std::env::var("CG_CLIF_ENABLE_VERIFIER").is_ok();
+ flags_builder.set("enable_verifier", if enable_verifier { "true" } else { "false" }).unwrap();
let tls_model = match target_triple.binary_format {
BinaryFormat::Elf => "elf_gd",
flags_builder.set("enable_simd", "true").unwrap();
+ flags_builder.set("enable_llvm_abi_extensions", "true").unwrap();
+
use rustc_session::config::OptLevel;
match sess.opts.optimize {
OptLevel::No => {
flags_builder.set("opt_level", "none").unwrap();
}
OptLevel::Less | OptLevel::Default => {}
- OptLevel::Aggressive => {
+ OptLevel::Size | OptLevel::SizeMin | OptLevel::Aggressive => {
flags_builder.set("opt_level", "speed_and_size").unwrap();
}
- OptLevel::Size | OptLevel::SizeMin => {
- sess.warn("Optimizing for size is not supported. Just ignoring the request");
- }
}
let flags = settings::Flags::new(flags_builder);
mono_item: MonoItem<'_>,
linkage: RLinkage,
visibility: Visibility,
+ is_compiler_builtins: bool,
) -> Linkage {
match (linkage, visibility) {
+ (RLinkage::External, Visibility::Default) if is_compiler_builtins => Linkage::Hidden,
(RLinkage::External, Visibility::Default) => Linkage::Export,
(RLinkage::Internal, Visibility::Default) => Linkage::Local,
(RLinkage::External, Visibility::Hidden) => Linkage::Hidden,
+use cranelift_codegen::binemit::{NullStackMapSink, NullTrapSink};
use rustc_hir::LangItem;
use rustc_session::config::EntryFnType;
bcx.seal_all_blocks();
bcx.finalize();
}
- m.define_function(
- cmain_func_id,
- &mut ctx,
- &mut cranelift_codegen::binemit::NullTrapSink {},
- )
- .unwrap();
+ m.define_function(cmain_func_id, &mut ctx, &mut NullTrapSink {}, &mut NullStackMapSink {})
+ .unwrap();
unwind_context.add_function(cmain_func_id, &ctx, m.isa());
}
}
//! Reading and writing of the rustc metadata for rlibs and dylibs
-use std::convert::TryFrom;
use std::fs::File;
+use std::ops::Deref;
use std::path::Path;
use rustc_codegen_ssa::METADATA_FILENAME;
-use rustc_data_structures::owning_ref::OwningRef;
+use rustc_data_structures::owning_ref::{OwningRef, StableAddress};
use rustc_data_structures::rustc_erase_owner;
use rustc_data_structures::sync::MetadataRef;
use rustc_middle::middle::cstore::{EncodedMetadata, MetadataLoader};
pub(crate) struct CraneliftMetadataLoader;
+struct StableMmap(memmap2::Mmap);
+
+impl Deref for StableMmap {
+ type Target = [u8];
+
+ fn deref(&self) -> &[u8] {
+ &*self.0
+ }
+}
+
+unsafe impl StableAddress for StableMmap {}
+
+fn load_metadata_with(
+ path: &Path,
+ f: impl for<'a> FnOnce(&'a [u8]) -> Result<&'a [u8], String>,
+) -> Result<MetadataRef, String> {
+ let file = File::open(path).map_err(|e| format!("{:?}", e))?;
+ let data = unsafe { memmap2::MmapOptions::new().map_copy_read_only(&file) }
+ .map_err(|e| format!("{:?}", e))?;
+ let metadata = OwningRef::new(StableMmap(data)).try_map(f)?;
+ return Ok(rustc_erase_owner!(metadata.map_owner_box()));
+}
+
impl MetadataLoader for CraneliftMetadataLoader {
fn get_rlib_metadata(&self, _target: &Target, path: &Path) -> Result<MetadataRef, String> {
- let mut archive = ar::Archive::new(File::open(path).map_err(|e| format!("{:?}", e))?);
- // Iterate over all entries in the archive:
- while let Some(entry_result) = archive.next_entry() {
- let mut entry = entry_result.map_err(|e| format!("{:?}", e))?;
- if entry.header().identifier() == METADATA_FILENAME.as_bytes() {
- let mut buf = Vec::with_capacity(
- usize::try_from(entry.header().size())
- .expect("Rlib metadata file too big to load into memory."),
- );
- ::std::io::copy(&mut entry, &mut buf).map_err(|e| format!("{:?}", e))?;
- let buf: OwningRef<Vec<u8>, [u8]> = OwningRef::new(buf);
- return Ok(rustc_erase_owner!(buf.map_owner_box()));
+ load_metadata_with(path, |data| {
+ let archive = object::read::archive::ArchiveFile::parse(&*data)
+ .map_err(|e| format!("{:?}", e))?;
+
+ for entry_result in archive.members() {
+ let entry = entry_result.map_err(|e| format!("{:?}", e))?;
+ if entry.name() == METADATA_FILENAME.as_bytes() {
+ return Ok(entry.data());
+ }
}
- }
- Err("couldn't find metadata entry".to_string())
+ Err("couldn't find metadata entry".to_string())
+ })
}
fn get_dylib_metadata(&self, _target: &Target, path: &Path) -> Result<MetadataRef, String> {
use object::{Object, ObjectSection};
- let file = std::fs::read(path).map_err(|e| format!("read:{:?}", e))?;
- let file = object::File::parse(&file).map_err(|e| format!("parse: {:?}", e))?;
- let buf = file
- .section_by_name(".rustc")
- .ok_or("no .rustc section")?
- .data()
- .map_err(|e| format!("failed to read .rustc section: {:?}", e))?
- .to_owned();
- let buf: OwningRef<Vec<u8>, [u8]> = OwningRef::new(buf);
- Ok(rustc_erase_owner!(buf.map_owner_box()))
+
+ load_metadata_with(path, |data| {
+ let file = object::File::parse(&data).map_err(|e| format!("parse: {:?}", e))?;
+ file.section_by_name(".rustc")
+ .ok_or("no .rustc section")?
+ .data()
+ .map_err(|e| format!("failed to read .rustc section: {:?}", e))
+ })
}
}
BinOp::Shl => {
let lhs_ty = fx.bcx.func.dfg.value_type(lhs);
let actual_shift = fx.bcx.ins().band_imm(rhs, i64::from(lhs_ty.bits() - 1));
- let actual_shift = clif_intcast(fx, actual_shift, types::I8, false);
fx.bcx.ins().ishl(lhs, actual_shift)
}
BinOp::Shr => {
let lhs_ty = fx.bcx.func.dfg.value_type(lhs);
let actual_shift = fx.bcx.ins().band_imm(rhs, i64::from(lhs_ty.bits() - 1));
- let actual_shift = clif_intcast(fx, actual_shift, types::I8, false);
if signed {
fx.bcx.ins().sshr(lhs, actual_shift)
} else {
let lhs = in_lhs.load_scalar(fx);
let rhs = in_rhs.load_scalar(fx);
- return codegen_compare_bin_op(fx, bin_op, false, lhs, rhs);
+ codegen_compare_bin_op(fx, bin_op, false, lhs, rhs)
}
BinOp::Offset => {
let pointee_ty = in_lhs.layout().ty.builtin_deref(true).unwrap().ty;
let ptr_diff = fx.bcx.ins().imul_imm(offset, pointee_size as i64);
let base_val = base.load_scalar(fx);
let res = fx.bcx.ins().iadd(base_val, ptr_diff);
- return CValue::by_val(res, base.layout());
+ CValue::by_val(res, base.layout())
}
_ => unreachable!("{:?}({:?}, {:?})", bin_op, in_lhs, in_rhs),
- };
+ }
} else {
let (lhs_ptr, lhs_extra) = in_lhs.load_scalar_pair(fx);
let (rhs_ptr, rhs_extra) = in_rhs.load_scalar_pair(fx);
pub(super) fn optimize_function(
ctx: &mut Context,
- #[cfg_attr(not(debug_assertions), allow(unused_variables))]
clif_comments: &mut crate::pretty_clif::CommentWriter,
) {
combine_stack_addr_with_load_store(&mut ctx.func);
remove_unused_stack_addr_and_stack_load(&mut opt_ctx);
- #[cfg(debug_assertions)]
- {
+ if clif_comments.enabled() {
for (&OrdStackSlot(stack_slot), usage) in &opt_ctx.stack_slot_usage_map {
clif_comments.add_comment(stack_slot, format!("used by: {:?}", usage));
}
for load in users.stack_load.clone().into_iter() {
let potential_stores = users.potential_stores_for_load(&opt_ctx.ctx, load);
- #[cfg(debug_assertions)]
- for &store in &potential_stores {
- clif_comments.add_comment(
- load,
- format!(
- "Potential store -> load forwarding {} -> {} ({:?}, {:?})",
- opt_ctx.ctx.func.dfg.display_inst(store, None),
- opt_ctx.ctx.func.dfg.display_inst(load, None),
- spatial_overlap(&opt_ctx.ctx.func, store, load),
- temporal_order(&opt_ctx.ctx, store, load),
- ),
- );
+ if clif_comments.enabled() {
+ for &store in &potential_stores {
+ clif_comments.add_comment(
+ load,
+ format!(
+ "Potential store -> load forwarding {} -> {} ({:?}, {:?})",
+ opt_ctx.ctx.func.dfg.display_inst(store, None),
+ opt_ctx.ctx.func.dfg.display_inst(load, None),
+ spatial_overlap(&opt_ctx.ctx.func, store, load),
+ temporal_order(&opt_ctx.ctx, store, load),
+ ),
+ );
+ }
}
match *potential_stores {
[] => {
- #[cfg(debug_assertions)]
- clif_comments
- .add_comment(load, "[BUG?] Reading uninitialized memory".to_string());
+ if clif_comments.enabled() {
+ clif_comments
+ .add_comment(load, "[BUG?] Reading uninitialized memory".to_string());
+ }
}
[store]
if spatial_overlap(&opt_ctx.ctx.func, store, load) == SpatialOverlap::Full
// Only one store could have been the origin of the value.
let stored_value = opt_ctx.ctx.func.dfg.inst_args(store)[0];
- #[cfg(debug_assertions)]
- clif_comments
- .add_comment(load, format!("Store to load forward {} -> {}", store, load));
+ if clif_comments.enabled() {
+ clif_comments.add_comment(
+ load,
+ format!("Store to load forward {} -> {}", store, load),
+ );
+ }
users.change_load_to_alias(&mut opt_ctx.ctx.func, load, stored_value);
}
for store in users.stack_store.clone().into_iter() {
let potential_loads = users.potential_loads_of_store(&opt_ctx.ctx, store);
- #[cfg(debug_assertions)]
- for &load in &potential_loads {
- clif_comments.add_comment(
- store,
- format!(
- "Potential load from store {} <- {} ({:?}, {:?})",
- opt_ctx.ctx.func.dfg.display_inst(load, None),
- opt_ctx.ctx.func.dfg.display_inst(store, None),
- spatial_overlap(&opt_ctx.ctx.func, store, load),
- temporal_order(&opt_ctx.ctx, store, load),
- ),
- );
+ if clif_comments.enabled() {
+ for &load in &potential_loads {
+ clif_comments.add_comment(
+ store,
+ format!(
+ "Potential load from store {} <- {} ({:?}, {:?})",
+ opt_ctx.ctx.func.dfg.display_inst(load, None),
+ opt_ctx.ctx.func.dfg.display_inst(store, None),
+ spatial_overlap(&opt_ctx.ctx.func, store, load),
+ temporal_order(&opt_ctx.ctx, store, load),
+ ),
+ );
+ }
}
if potential_loads.is_empty() {
// Never loaded; can safely remove all stores and the stack slot.
// FIXME also remove stores when there is always a next store before a load.
- #[cfg(debug_assertions)]
- clif_comments.add_comment(
- store,
- format!(
- "Remove dead stack store {} of {}",
- opt_ctx.ctx.func.dfg.display_inst(store, None),
- stack_slot.0
- ),
- );
+ if clif_comments.enabled() {
+ clif_comments.add_comment(
+ store,
+ format!(
+ "Remove dead stack store {} of {}",
+ opt_ctx.ctx.func.dfg.display_inst(store, None),
+ stack_slot.0
+ ),
+ );
+ }
users.remove_dead_store(&mut opt_ctx.ctx.func, store);
}
Pointer { base: PointerBase::Dangling(align), offset: Offset32::new(0) }
}
- #[cfg(debug_assertions)]
- pub(crate) fn base_and_offset(self) -> (PointerBase, Offset32) {
+ pub(crate) fn debug_base_and_offset(self) -> (PointerBase, Offset32) {
(self.base, self.offset)
}
#[derive(Debug)]
pub(crate) struct CommentWriter {
+ enabled: bool,
global_comments: Vec<String>,
entity_comments: FxHashMap<AnyEntity, String>,
}
impl CommentWriter {
pub(crate) fn new<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self {
- let global_comments = if cfg!(debug_assertions) {
+ let enabled = should_write_ir(tcx);
+ let global_comments = if enabled {
vec![
format!("symbol {}", tcx.symbol_name(instance).name),
format!("instance {:?}", instance),
vec![]
};
- CommentWriter { global_comments, entity_comments: FxHashMap::default() }
+ CommentWriter { enabled, global_comments, entity_comments: FxHashMap::default() }
}
}
-#[cfg(debug_assertions)]
impl CommentWriter {
+ pub(crate) fn enabled(&self) -> bool {
+ self.enabled
+ }
+
pub(crate) fn add_global_comment<S: Into<String>>(&mut self, comment: S) {
+ debug_assert!(self.enabled);
self.global_comments.push(comment.into());
}
entity: E,
comment: S,
) {
+ debug_assert!(self.enabled);
+
use std::collections::hash_map::Entry;
match self.entity_comments.entry(entity.into()) {
Entry::Occupied(mut occ) => {
}
}
-#[cfg(debug_assertions)]
impl FunctionCx<'_, '_, '_> {
pub(crate) fn add_global_comment<S: Into<String>>(&mut self, comment: S) {
self.clif_comments.add_global_comment(comment);
tcx.sess.opts.output_types.contains_key(&OutputType::LlvmAssembly)
}
-pub(crate) fn write_ir_file<'tcx>(
- tcx: TyCtxt<'tcx>,
+pub(crate) fn write_ir_file(
+ tcx: TyCtxt<'_>,
name: &str,
write: impl FnOnce(&mut dyn Write) -> std::io::Result<()>,
) {
let clif_file_name = clif_output_dir.join(name);
- let res: std::io::Result<()> = try {
- let mut file = std::fs::File::create(clif_file_name)?;
- write(&mut file)?;
- };
+ let res = std::fs::File::create(clif_file_name).and_then(|mut file| write(&mut file));
if let Err(err) = res {
tcx.sess.warn(&format!("error writing ir file: {}", err));
}
)
.unwrap();
let puts = fx.cx.module.declare_func_in_func(puts, &mut fx.bcx.func);
- #[cfg(debug_assertions)]
- {
+ if fx.clif_comments.enabled() {
fx.add_comment(puts, "puts");
}
use crate::prelude::*;
-use cranelift_codegen::entity::EntityRef;
use cranelift_codegen::ir::immediates::Offset32;
fn codegen_field<'tcx>(
self,
fx: &mut FunctionCx<'_, '_, 'tcx>,
from: CValue<'tcx>,
- #[cfg_attr(not(debug_assertions), allow(unused_variables))] method: &'static str,
+ method: &'static str,
) {
fn transmute_value<'tcx>(
fx: &mut FunctionCx<'_, '_, 'tcx>,
assert_eq!(self.layout().size, from.layout().size);
- #[cfg(debug_assertions)]
- {
+ if fx.clif_comments.enabled() {
use cranelift_codegen::cursor::{Cursor, CursorPosition};
let cur_block = match fx.bcx.cursor().position() {
CursorPosition::After(block) => block,
}
// dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
}
+ (&ty::Adt(adt_def_a, substs_a), &ty::Adt(adt_def_b, substs_b))
+ if adt_def_a.did == adt_def_b.did =>
+ {
+ let mut types_a = substs_a.types();
+ let mut types_b = substs_b.types();
+ loop {
+ match (types_a.next(), types_b.next()) {
+ (Some(a), Some(b)) => assert_assignable(fx, a, b),
+ (None, None) => return,
+ (Some(_), None) | (None, Some(_)) => panic!("{:#?}/{:#?}", from_ty, to_ty),
+ }
+ }
+ }
_ => {
assert_eq!(
from_ty, to_ty,
use std::ffi::{CStr, CString};
use std::fs::File;
use std::io;
+use std::iter;
use std::path::Path;
use std::ptr;
use std::slice;
modules: &[llvm::ThinLTOModule],
names: &[CString],
) -> Self {
- let keys = modules
- .iter()
- .zip(names.iter())
+ let keys = iter::zip(modules, names)
.map(|(module, name)| {
let key = build_string(|rust_str| unsafe {
llvm::LLVMRustComputeLTOCacheKey(rust_str, module.identifier, data.0);
use rustc_target::spec::{HasTargetSpec, Target};
use std::borrow::Cow;
use std::ffi::CStr;
+use std::iter;
use std::ops::{Deref, Range};
use std::ptr;
use tracing::debug;
let param_tys = self.cx.func_params_types(fn_ty);
- let all_args_match = param_tys
- .iter()
- .zip(args.iter().map(|&v| self.val_ty(v)))
+ let all_args_match = iter::zip(¶m_tys, args.iter().map(|&v| self.val_ty(v)))
.all(|(expected_ty, actual_ty)| *expected_ty == actual_ty);
if all_args_match {
return Cow::Borrowed(args);
}
- let casted_args: Vec<_> = param_tys
- .into_iter()
- .zip(args.iter())
+ let casted_args: Vec<_> = iter::zip(param_tys, args)
.enumerate()
.map(|(i, (expected_ty, &actual_val))| {
let actual_ty = self.val_ty(actual_val);
let discriminant_type_metadata = |discr: Primitive| {
let enumerators_metadata: Vec<_> = match enum_type.kind() {
- ty::Adt(def, _) => def
- .discriminants(tcx)
- .zip(&def.variants)
+ ty::Adt(def, _) => iter::zip(def.discriminants(tcx), &def.variants)
.map(|((_, discr), v)| {
let name = v.ident.as_str();
let is_unsigned = match discr.ty.kind() {
if substs.types().next().is_some() {
let generics = cx.tcx.generics_of(def.did);
let names = get_parameter_names(cx, generics);
- let template_params: Vec<_> = substs
- .iter()
- .zip(names)
+ let template_params: Vec<_> = iter::zip(substs, names)
.filter_map(|(kind, name)| {
if let GenericArgKind::Type(ty) = kind.unpack() {
let actual_type =
use libc::c_uint;
use smallvec::SmallVec;
use std::cell::RefCell;
+use std::iter;
use tracing::debug;
mod create_scope_map;
// Again, only create type information if full debuginfo is enabled
let template_params: Vec<_> = if cx.sess().opts.debuginfo == DebugInfo::Full {
let names = get_parameter_names(cx, generics);
- substs
- .iter()
- .zip(names)
+ iter::zip(substs, names)
.filter_map(|(kind, name)| {
if let GenericArgKind::Type(ty) = kind.unpack() {
let actual_type =
#![feature(extended_key_value_attributes)]
#![feature(extern_types)]
#![feature(in_band_lifetimes)]
+#![feature(iter_zip)]
#![feature(nll)]
#![cfg_attr(bootstrap, feature(or_patterns))]
#![recursion_limit = "256"]
RealPredicateTrue = 15,
}
-impl RealPredicate {
- pub fn from_generic(realpred: rustc_codegen_ssa::common::RealPredicate) -> Self {
- match realpred {
- rustc_codegen_ssa::common::RealPredicate::RealPredicateFalse => {
- RealPredicate::RealPredicateFalse
- }
- rustc_codegen_ssa::common::RealPredicate::RealOEQ => RealPredicate::RealOEQ,
- rustc_codegen_ssa::common::RealPredicate::RealOGT => RealPredicate::RealOGT,
- rustc_codegen_ssa::common::RealPredicate::RealOGE => RealPredicate::RealOGE,
- rustc_codegen_ssa::common::RealPredicate::RealOLT => RealPredicate::RealOLT,
- rustc_codegen_ssa::common::RealPredicate::RealOLE => RealPredicate::RealOLE,
- rustc_codegen_ssa::common::RealPredicate::RealONE => RealPredicate::RealONE,
- rustc_codegen_ssa::common::RealPredicate::RealORD => RealPredicate::RealORD,
- rustc_codegen_ssa::common::RealPredicate::RealUNO => RealPredicate::RealUNO,
- rustc_codegen_ssa::common::RealPredicate::RealUEQ => RealPredicate::RealUEQ,
- rustc_codegen_ssa::common::RealPredicate::RealUGT => RealPredicate::RealUGT,
- rustc_codegen_ssa::common::RealPredicate::RealUGE => RealPredicate::RealUGE,
- rustc_codegen_ssa::common::RealPredicate::RealULT => RealPredicate::RealULT,
- rustc_codegen_ssa::common::RealPredicate::RealULE => RealPredicate::RealULE,
- rustc_codegen_ssa::common::RealPredicate::RealUNE => RealPredicate::RealUNE,
- rustc_codegen_ssa::common::RealPredicate::RealPredicateTrue => {
- RealPredicate::RealPredicateTrue
- }
- }
- }
-}
-
/// LLVMTypeKind
#[derive(Copy, Clone, PartialEq, Debug)]
#[repr(C)]
}
impl CounterMappingRegion {
- pub fn code_region(
+ crate fn code_region(
counter: coverage_map::Counter,
file_id: u32,
start_line: u32,
}
}
- pub fn expansion_region(
+ // This function might be used in the future; the LLVM API is still evolving, as is coverage
+ // support.
+ #[allow(dead_code)]
+ crate fn expansion_region(
file_id: u32,
expanded_file_id: u32,
start_line: u32,
}
}
- pub fn skipped_region(
+ // This function might be used in the future; the LLVM API is still evolving, as is coverage
+ // support.
+ #[allow(dead_code)]
+ crate fn skipped_region(
file_id: u32,
start_line: u32,
start_col: u32,
}
}
- pub fn gap_region(
+ // This function might be used in the future; the LLVM API is still evolving, as is coverage
+ // support.
+ #[allow(dead_code)]
+ crate fn gap_region(
counter: coverage_map::Counter,
file_id: u32,
start_line: u32,
cmd.add_eh_frame_header();
}
+ // NO-OPT-OUT, OBJECT-FILES-NO, AUDIT-ORDER
+ // Make the binary compatible with data execution prevention schemes.
+ cmd.add_no_exec();
+
+ // NO-OPT-OUT, OBJECT-FILES-NO
+ // Avoid linking to dynamic libraries unless they satisfy some undefined symbols
+ // at the point at which they are specified on the command line.
+ // Must be passed before any dynamic libraries.
+ cmd.add_as_needed();
+
// NO-OPT-OUT, OBJECT-FILES-NO
if crt_objects_fallback {
cmd.no_crt_objects();
fn group_end(&mut self);
fn linker_plugin_lto(&mut self);
fn add_eh_frame_header(&mut self) {}
+ fn add_no_exec(&mut self) {}
+ fn add_as_needed(&mut self) {}
fn finalize(&mut self);
}
fn add_eh_frame_header(&mut self) {
self.linker_arg("--eh-frame-hdr");
}
+
+ fn add_no_exec(&mut self) {
+ if self.sess.target.is_like_windows {
+ self.linker_arg("--nxcompat");
+ } else if self.sess.target.linker_is_gnu {
+ self.linker_arg("-znoexecstack");
+ }
+ }
+
+ fn add_as_needed(&mut self) {
+ if self.sess.target.linker_is_gnu {
+ self.linker_arg("--as-needed");
+ }
+ }
}
pub struct MsvcLinker<'a> {
fn linker_plugin_lto(&mut self) {
// Do nothing
}
+
+ fn add_no_exec(&mut self) {
+ self.cmd.arg("/NXCOMPAT");
+ }
}
pub struct EmLinker<'a> {
#![feature(nll)]
#![cfg_attr(bootstrap, feature(or_patterns))]
#![feature(associated_type_bounds)]
+#![feature(iter_zip)]
#![recursion_limit = "256"]
#![feature(box_syntax)]
IndexVec<mir::BasicBlock, Option<Bx::BasicBlock>>,
IndexVec<mir::BasicBlock, Option<Bx::Funclet>>,
) {
- block_bxs
- .iter_enumerated()
- .zip(cleanup_kinds)
+ iter::zip(block_bxs.iter_enumerated(), cleanup_kinds)
.map(|((bb, &llbb), cleanup_kind)| {
match *cleanup_kind {
CleanupKind::Funclet if base::wants_msvc_seh(bx.sess()) => {}
pub fn pointer_ref(&self) -> &P::Target {
self.raw.pointer_ref()
}
- pub fn pointer_mut(&mut self) -> &mut P::Target
- where
- P: std::ops::DerefMut,
- {
- self.raw.pointer_mut()
- }
pub fn tag(&self) -> T {
self.raw.tag()
}
- pub fn set_tag(&mut self, tag: T) {
- self.raw.set_tag(tag);
- }
}
impl<P, T, const COMPARE_PACKED: bool> std::ops::Deref for TaggedPtr<P, T, COMPARE_PACKED>
None
}
}
-
- /// Returns `true` if nothing is enqueued.
- #[inline]
- pub fn is_empty(&self) -> bool {
- self.deque.is_empty()
- }
}
pub fn new(at_args: &'a [String], callbacks: &'b mut (dyn Callbacks + Send)) -> Self {
Self { at_args, callbacks, file_loader: None, emitter: None, make_codegen_backend: None }
}
+ /// Used by cg_clif.
pub fn set_make_codegen_backend(
&mut self,
make_codegen_backend: Option<
self.make_codegen_backend = make_codegen_backend;
self
}
+ /// Used by RLS.
pub fn set_emitter(&mut self, emitter: Option<Box<dyn Write + Send>>) -> &mut Self {
self.emitter = emitter;
self
}
+ /// Used by RLS.
pub fn set_file_loader(
&mut self,
file_loader: Option<Box<dyn FileLoader + Send + Sync>>,
/// (Rust does not yet support upcasting from a trait object to
/// an object for one of its super-traits.)
fn pp_ann(&self) -> &dyn pprust_hir::PpAnn;
-
- /// Computes an user-readable representation of a path, if possible.
- fn node_path(&self, id: hir::HirId) -> Option<String> {
- self.hir_map().and_then(|map| map.def_path_from_hir_id(id)).map(|path| {
- path.data.into_iter().map(|elem| elem.data.to_string()).collect::<Vec<_>>().join("::")
- })
- }
}
struct NoAnn<'hir> {
fn pp_ann(&self) -> &dyn pprust_hir::PpAnn {
self
}
-
- fn node_path(&self, id: hir::HirId) -> Option<String> {
- Some(self.tcx.def_path_str(self.tcx.hir().local_def_id(id).to_def_id()))
- }
}
impl<'tcx> pprust_hir::PpAnn for TypedAnnotation<'tcx> {
pub fn highlighted<S: Into<String>>(t: S) -> DiagnosticStyledString {
DiagnosticStyledString(vec![StringPart::Highlighted(t.into())])
}
-
- pub fn content(&self) -> String {
- self.0.iter().map(|x| x.content()).collect::<String>()
- }
}
#[derive(Debug, PartialEq, Eq)]
Highlighted(String),
}
-impl StringPart {
- pub fn content(&self) -> &str {
- match self {
- &StringPart::Normal(ref s) | &StringPart::Highlighted(ref s) => s,
- }
- }
-}
-
impl Diagnostic {
pub fn new(level: Level, message: &str) -> Self {
Diagnostic::new_with_code(level, None, message)
self
}
- pub fn note_expected_found(
+ crate fn note_expected_found(
&mut self,
expected_label: &dyn fmt::Display,
expected: DiagnosticStyledString,
self.note_expected_found_extra(expected_label, expected, found_label, found, &"", &"")
}
- pub fn note_unsuccessful_coercion(
+ crate fn note_unsuccessful_coercion(
&mut self,
expected: DiagnosticStyledString,
found: DiagnosticStyledString,
/// Prints the span with a note above it.
/// This is like [`Diagnostic::note()`], but it gets its own span.
- pub fn span_note<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self {
+ crate fn span_note<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self {
self.sub(Level::Note, msg, sp.into(), None);
self
}
/// Add a warning attached to this diagnostic.
- pub fn warn(&mut self, msg: &str) -> &mut Self {
+ crate fn warn(&mut self, msg: &str) -> &mut Self {
self.sub(Level::Warning, msg, MultiSpan::new(), None);
self
}
/// Prints the span with a warning above it.
/// This is like [`Diagnostic::warn()`], but it gets its own span.
- pub fn span_warn<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self {
+ crate fn span_warn<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self {
self.sub(Level::Warning, msg, sp.into(), None);
self
}
/// Add a help message attached to this diagnostic.
- pub fn help(&mut self, msg: &str) -> &mut Self {
+ crate fn help(&mut self, msg: &str) -> &mut Self {
self.sub(Level::Help, msg, MultiSpan::new(), None);
self
}
/// Prints the span with some help above it.
/// This is like [`Diagnostic::help()`], but it gets its own span.
- pub fn span_help<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self {
+ crate fn span_help<S: Into<MultiSpan>>(&mut self, sp: S, msg: &str) -> &mut Self {
self.sub(Level::Help, msg, sp.into(), None);
self
}
self
}
- /// Show multiple suggestions that have multiple parts.
- /// See also [`Diagnostic::multipart_suggestion()`].
- pub fn multipart_suggestions(
- &mut self,
- msg: &str,
- suggestions: Vec<Vec<(Span, String)>>,
- applicability: Applicability,
- ) -> &mut Self {
- assert!(!suggestions.is_empty());
- for s in &suggestions {
- assert!(!s.is_empty());
- }
- self.suggestions.push(CodeSuggestion {
- substitutions: suggestions
- .into_iter()
- .map(|suggestion| Substitution {
- parts: suggestion
- .into_iter()
- .map(|(span, snippet)| SubstitutionPart { snippet, span })
- .collect(),
- })
- .collect(),
- msg: msg.to_owned(),
- style: SuggestionStyle::ShowCode,
- applicability,
- tool_metadata: Default::default(),
- });
- self
- }
-
/// Prints out a message with for a multipart suggestion without showing the suggested code.
///
/// This is intended to be used for suggestions that are obvious in what the changes need to
self.code.clone()
}
- pub fn set_primary_message<M: Into<String>>(&mut self, msg: M) -> &mut Self {
+ crate fn set_primary_message<M: Into<String>>(&mut self, msg: M) -> &mut Self {
self.message[0] = (msg.into(), Style::NoStyle);
self
}
/// Convenience function for internal use, clients should use one of the
/// public methods above.
+ ///
+ /// Used by `proc_macro_server` for implementing `server::Diagnostic`.
pub fn sub(
&mut self,
level: Level,
pub fn $n:ident(&self, $($name:ident: $ty:ty),* $(,)?) -> &Self
) => {
$(#[$attrs])*
+ // we always document with --document-private-items
+ #[cfg_attr(not(bootstrap), allow(rustdoc::private_intra_doc_links))]
+ #[cfg_attr(bootstrap, allow(private_intra_doc_links))]
#[doc = concat!("See [`Diagnostic::", stringify!($n), "()`].")]
pub fn $n(&self, $($name: $ty),*) -> &Self {
self.diagnostic.$n($($name),*);
) => {
$(#[$attrs])*
#[doc = concat!("See [`Diagnostic::", stringify!($n), "()`].")]
+ // we always document with --document-private-items
+ #[cfg_attr(not(bootstrap), allow(rustdoc::private_intra_doc_links))]
+ #[cfg_attr(bootstrap, allow(private_intra_doc_links))]
pub fn $n(&mut self, $($name: $ty),*) -> &mut Self {
self.0.diagnostic.$n($($name),*);
self
) => {
$(#[$attrs])*
#[doc = concat!("See [`Diagnostic::", stringify!($n), "()`].")]
+ // we always document with --document-private-items
+ #[cfg_attr(not(bootstrap), allow(rustdoc::private_intra_doc_links))]
+ #[cfg_attr(bootstrap, allow(private_intra_doc_links))]
pub fn $n<$($generic: $bound),*>(&mut self, $($name: $ty),*) -> &mut Self {
self.0.diagnostic.$n($($name),*);
self
buffered_diagnostics.extend(self.into_diagnostic().map(|(diag, _)| diag));
}
- /// Convenience function for internal use, clients should use one of the
- /// span_* methods instead.
- pub fn sub<S: Into<MultiSpan>>(
- &mut self,
- level: Level,
- message: &str,
- span: Option<S>,
- ) -> &mut Self {
- let span = span.map(|s| s.into()).unwrap_or_else(MultiSpan::new);
- self.0.diagnostic.sub(level, message, span, None);
- self
- }
-
/// Delay emission of this diagnostic as a bug.
///
/// This can be useful in contexts where an error indicates a bug but
self
}
- /// See [`Diagnostic::multipart_suggestions()`].
- pub fn multipart_suggestions(
- &mut self,
- msg: &str,
- suggestions: Vec<Vec<(Span, String)>>,
- applicability: Applicability,
- ) -> &mut Self {
- if !self.0.allow_suggestions {
- return self;
- }
- self.0.diagnostic.multipart_suggestions(msg, suggestions, applicability);
- self
- }
-
/// See [`Diagnostic::tool_only_multipart_suggestion()`].
pub fn tool_only_multipart_suggestion(
&mut self,
};
let ascii_confusables = &['c', 'f', 'i', 'k', 'o', 's', 'u', 'v', 'w', 'x', 'y', 'z'];
// All the chars that differ in capitalization are confusable (above):
- let confusable = found
- .chars()
- .zip(suggested.chars())
+ let confusable = iter::zip(found.chars(), suggested.chars())
.filter(|(f, s)| f != s)
.all(|(f, s)| (ascii_confusables.contains(&f) || ascii_confusables.contains(&s)));
confusable && found.to_lowercase() == suggested.to_lowercase()
h_end: usize,
) -> DiagnosticSpanLine {
DiagnosticSpanLine {
- text: sf.get_line(index).map_or(String::new(), |l| l.into_owned()),
+ text: sf.get_line(index).map_or_else(String::new, |l| l.into_owned()),
highlight_start: h_start,
highlight_end: h_end,
}
#![feature(crate_visibility_modifier)]
#![feature(backtrace)]
#![feature(extended_key_value_attributes)]
+#![feature(iter_zip)]
#![feature(nll)]
#[macro_use]
db
}
- pub fn failure(&self, msg: &str) {
- self.inner.borrow_mut().failure(msg);
- }
-
pub fn fatal(&self, msg: &str) -> FatalError {
self.inner.borrow_mut().fatal(msg)
}
Registry { long_descriptions: long_descriptions.iter().copied().collect() }
}
- /// This will panic if an invalid error code is passed in
- pub fn find_description(&self, code: &str) -> Option<&'static str> {
- self.long_descriptions[code]
- }
/// Returns `InvalidErrorCode` if the code requested does not exist in the
/// registry. Otherwise, returns an `Option` where `None` means the error
/// code is valid but has no extended information.
matches!(self.annotation_type, AnnotationType::MultilineLine(_))
}
- pub fn is_multiline(&self) -> bool {
- matches!(
- self.annotation_type,
- AnnotationType::Multiline(_)
- | AnnotationType::MultilineStart(_)
- | AnnotationType::MultilineLine(_)
- | AnnotationType::MultilineEnd(_)
- )
- }
-
pub fn len(&self) -> usize {
// Account for usize underflows
if self.end_col > self.start_col {
// Code for creating styled buffers
use crate::snippet::{Style, StyledString};
+use std::iter;
#[derive(Debug)]
pub struct StyledBuffer {
let mut output: Vec<Vec<StyledString>> = vec![];
let mut styled_vec: Vec<StyledString> = vec![];
- for (row, row_style) in self.text.iter().zip(&self.styles) {
+ for (row, row_style) in iter::zip(&self.text, &self.styles) {
let mut current_style = Style::NoStyle;
let mut current_text = String::new();
- for (&c, &s) in row.iter().zip(row_style) {
+ for (&c, &s) in iter::zip(row, row_style) {
if s != current_style {
if !current_text.is_empty() {
styled_vec.push(StyledString { text: current_text, style: current_style });
let pathexpr = self.expr_path(self.path_global(sp, fn_path));
self.expr_call(sp, pathexpr, args)
}
- pub fn expr_method_call(
- &self,
- span: Span,
- expr: P<ast::Expr>,
- ident: Ident,
- mut args: Vec<P<ast::Expr>>,
- ) -> P<ast::Expr> {
- args.insert(0, expr);
- let segment = ast::PathSegment::from_ident(ident.with_span_pos(span));
- self.expr(span, ast::ExprKind::MethodCall(segment, args, span))
- }
pub fn expr_block(&self, b: P<ast::Block>) -> P<ast::Expr> {
self.expr(b.span, ast::ExprKind::Block(b, None))
}
#![feature(crate_visibility_modifier)]
#![feature(decl_macro)]
#![feature(destructuring_assignment)]
+#![feature(iter_zip)]
#![cfg_attr(bootstrap, feature(or_patterns))]
#![feature(proc_macro_diagnostic)]
#![feature(proc_macro_internals)]
use smallvec::SmallVec;
+use std::iter;
+
/// Stack represented as linked list.
///
/// Those are used for environments because they grow incrementally and are not mutable.
sess.span_diagnostic.span_bug(span, "length mismatch between LHSes and RHSes")
}
let mut valid = true;
- for (lhs, rhs) in lhses.iter().zip(rhses.iter()) {
+ for (lhs, rhs) in iter::zip(lhses, rhses) {
let mut binders = Binders::default();
check_binders(sess, node_id, lhs, &Stack::Empty, &mut binders, &Stack::Empty, &mut valid);
check_occurrences(sess, node_id, rhs, &Stack::Empty, &binders, &Stack::Empty, &mut valid);
use crate::tests::string_to_stream;
use rustc_ast::token;
-use rustc_ast::tokenstream::{TokenStream, TokenStreamBuilder, TokenTree};
+use rustc_ast::tokenstream::{Spacing, TokenStream, TokenStreamBuilder, TokenTree};
use rustc_span::with_default_session_globals;
use rustc_span::{BytePos, Span, Symbol};
use smallvec::smallvec;
Span::with_root_ctxt(BytePos(a), BytePos(b))
}
+fn joint(tree: TokenTree) -> TokenStream {
+ TokenStream::new(vec![(tree, Spacing::Joint)])
+}
+
#[test]
fn test_concat() {
with_default_session_globals(|| {
fn test_dotdotdot() {
with_default_session_globals(|| {
let mut builder = TokenStreamBuilder::new();
- builder.push(TokenTree::token(token::Dot, sp(0, 1)).joint());
- builder.push(TokenTree::token(token::Dot, sp(1, 2)).joint());
+ builder.push(joint(TokenTree::token(token::Dot, sp(0, 1))));
+ builder.push(joint(TokenTree::token(token::Dot, sp(1, 2))));
builder.push(TokenTree::token(token::Dot, sp(2, 3)));
let stream = builder.build();
assert!(stream.eq_unspanned(&string_to_ts("...")));
pub fn as_slice(&'a self) -> &'a str {
&*self.name
}
-
- pub fn name(self) -> Cow<'a, str> {
- self.name
- }
}
/// Each instance of a type that implements `Label<C>` maps to a
LabelStr(s.into())
}
- pub fn escaped<S: Into<Cow<'a, str>>>(s: S) -> LabelText<'a> {
- EscStr(s.into())
- }
-
pub fn html<S: Into<Cow<'a, str>>>(s: S) -> LabelText<'a> {
HtmlStr(s.into())
}
}
}
- /// Puts `prefix` on a line above this label, with a blank line separator.
- pub fn prefix_line(self, prefix: LabelText<'_>) -> LabelText<'static> {
- prefix.suffix_line(self)
- }
-
/// Puts `suffix` on a line below this label, with a blank line separator.
pub fn suffix_line(self, suffix: LabelText<'_>) -> LabelText<'static> {
let mut prefix = self.pre_escaped_content().into_owned();
DarkTheme,
}
-/// Returns vec holding all the default render options.
-pub fn default_options() -> Vec<RenderOption> {
- vec![]
-}
-
/// Renders directed graph `g` into the writer `w` in DOT syntax.
/// (Simple wrapper around `render_opts` that passes a default set of options.)
pub fn render<'a, N, E, G, W>(g: &'a G, w: &mut W) -> io::Result<()>
fn node_label(&'a self, n: &Node) -> LabelText<'a> {
match self.node_labels[*n] {
Some(l) => LabelStr(l.into()),
- None => LabelStr(id_name(n).name()),
+ None => LabelStr(id_name(n).name),
}
}
fn edge_label(&'a self, e: &&'a Edge) -> LabelText<'a> {
hash
}
+ /// Used by librustdoc for fake DefIds.
pub fn num_def_ids(&self) -> usize {
self.index_to_key.len()
}
self.table.def_path_hash(id.local_def_index)
}
- #[inline]
- pub fn def_path_hash_to_def_id(&self, def_path_hash: DefPathHash) -> LocalDefId {
- let local_def_index = self.table.def_path_hash_to_index[&def_path_hash];
- LocalDefId { local_def_index }
- }
-
/// Returns the path from the crate root to `index`. The root
/// nodes are not included in the path (i.e., this will be an
/// empty vector for the crate root). For an inlined item, this
// ignore-tidy-filelength
-use crate::def::{CtorKind, DefKind, Namespace, Res};
+use crate::def::{CtorKind, DefKind, Res};
use crate::def_id::DefId;
crate use crate::hir_id::HirId;
use crate::{itemlikevisit, LangItem};
TyAlias(&'hir Ty<'hir>),
}
-impl ImplItemKind<'_> {
- pub fn namespace(&self) -> Namespace {
- match self {
- ImplItemKind::TyAlias(..) => Namespace::TypeNS,
- ImplItemKind::Const(..) | ImplItemKind::Fn(..) => Namespace::ValueNS,
- }
- }
-}
-
// The name of the associated type for `Fn` return types.
pub const FN_OUTPUT_NAME: Symbol = sym::Output;
Self::Str,
];
+ /// Like [`PrimTy::name`], but returns a &str instead of a symbol.
+ ///
+ /// Used by rustdoc.
pub fn name_str(self) -> &'static str {
match self {
PrimTy::Int(i) => i.name_str(),
local_id: ItemLocalId::from_u32(0),
};
+/// N.B. This collection is currently unused, but will be used by #72015 and future PRs.
#[derive(Clone, Default, Debug, Encodable, Decodable)]
pub struct HirIdVec<T> {
map: IndexVec<LocalDefId, IndexVec<ItemLocalId, T>>,
})
}
- /// Checks if the pattern contains any patterns that bind something to
- /// an ident, e.g., `foo`, or `Foo(foo)` or `foo @ Bar(..)`.
- pub fn contains_bindings(&self) -> bool {
- self.satisfies(|p| matches!(p.kind, PatKind::Binding(..)))
- }
-
- /// Checks if the pattern satisfies the given predicate on some sub-pattern.
- fn satisfies(&self, pred: impl Fn(&hir::Pat<'_>) -> bool) -> bool {
- let mut satisfies = false;
- self.walk_short(|p| {
- if pred(p) {
- satisfies = true;
- false // Found one, can short circuit now.
- } else {
- true
- }
- });
- satisfies
- }
-
pub fn simple_ident(&self) -> Option<Ident> {
match self.kind {
PatKind::Binding(
to_string(NO_ANN, |s| s.print_bounds("", bounds))
}
-pub fn param_to_string(arg: &hir::Param<'_>) -> String {
- to_string(NO_ANN, |s| s.print_param(arg))
-}
-
pub fn ty_to_string(ty: &hir::Ty<'_>) -> String {
to_string(NO_ANN, |s| s.print_type(ty))
}
}
}
- pub fn print_usize(&mut self, i: usize) {
- self.s.word(i.to_string())
- }
-
pub fn print_name(&mut self, name: Symbol) {
self.print_ident(Ident::with_dummy_span(name))
}
- pub fn print_for_decl(&mut self, loc: &hir::Local<'_>, coll: &hir::Expr<'_>) {
- self.print_local_decl(loc);
- self.s.space();
- self.word_space("in");
- self.print_expr(coll)
- }
-
pub fn print_path(&mut self, path: &hir::Path<'_>, colons_before_params: bool) {
self.maybe_print_comment(path.span.lo());
}
}
- pub fn print_opt_abi_and_extern_if_nondefault(&mut self, opt_abi: Option<Abi>) {
- match opt_abi {
- Some(Abi::Rust) => {}
- Some(abi) => {
- self.word_nbsp("extern");
- self.word_nbsp(abi.to_string())
- }
- None => {}
- }
- }
-
- pub fn print_extern_opt_abi(&mut self, opt_abi: Option<Abi>) {
- if let Some(abi) = opt_abi {
- self.word_nbsp("extern");
- self.word_nbsp(abi.to_string())
- }
- }
-
pub fn print_fn_header_info(&mut self, header: hir::FnHeader, vis: &hir::Visibility<'_>) {
self.s.word(visibility_qualified(vis, ""));
&[label_strs::generics_of, label_strs::predicates_of, label_strs::type_of];
/// Trait definition `DepNode`s.
-const BASE_TRAIT_DEF: &[&str] = &[
- label_strs::associated_item_def_ids,
- label_strs::generics_of,
- label_strs::object_safety_violations,
- label_strs::predicates_of,
- label_strs::specialization_graph_of,
- label_strs::trait_def,
- label_strs::trait_impls_of,
-];
-
/// Extra `DepNode`s for functions and methods.
const EXTRA_ASSOCIATED: &[&str] = &[label_strs::associated_item];
/// Abstract data type (struct, enum, union) `DepNode`s.
const LABELS_ADT: &[&[&str]] = &[BASE_HIR, BASE_STRUCT];
-/// Trait definition `DepNode`s.
-#[allow(dead_code)]
-const LABELS_TRAIT: &[&[&str]] = &[BASE_HIR, BASE_TRAIT_DEF];
-
// FIXME: Struct/Enum/Unions Fields (there is currently no way to attach these)
//
// Fields are kind of separate from their containers, as they can change independently from
{
assert_eq!(out_vec.len(), in_vec.len());
let mut changed = false;
- for (out_elem, in_elem) in out_vec.iter_mut().zip(in_vec.iter()) {
+ for (out_elem, in_elem) in iter::zip(out_vec, in_vec) {
let old_val = *out_elem;
let new_val = op(old_val, *in_elem);
*out_elem = new_val;
let (write_start, write_end) = self.range(write);
let words = &mut self.words[..];
let mut changed = false;
- for (read_index, write_index) in (read_start..read_end).zip(write_start..write_end) {
+ for (read_index, write_index) in iter::zip(read_start..read_end, write_start..write_end) {
let word = words[write_index];
let new_word = word | words[read_index];
words[write_index] = new_word;
assert_eq!(with.domain_size(), self.num_columns);
let (write_start, write_end) = self.range(write);
let mut changed = false;
- for (read_index, write_index) in (0..with.words().len()).zip(write_start..write_end) {
+ for (read_index, write_index) in iter::zip(0..with.words().len(), write_start..write_end) {
let word = self.words[write_index];
let new_word = word | with.words()[read_index];
self.words[write_index] = new_word;
#![feature(const_fn)]
#![feature(const_panic)]
#![feature(extend_one)]
+#![feature(iter_zip)]
#![feature(unboxed_closures)]
#![feature(test)]
#![feature(fn_traits)]
use rustc_middle::ty::subst::{GenericArg, GenericArgKind};
use rustc_middle::ty::{self, BoundVar, Const, ToPredicate, Ty, TyCtxt};
use std::fmt::Debug;
+use std::iter;
impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
/// This method is meant to be invoked as the final step of a canonical query
// In terms of our example above, we are iterating over pairs like:
// [(?A, Vec<?0>), ('static, '?1), (?B, ?0)]
- for (original_value, result_value) in original_values.var_values.iter().zip(result_values) {
+ for (original_value, result_value) in iter::zip(&original_values.var_values, result_values)
+ {
match result_value.unpack() {
GenericArgKind::Type(result_value) => {
// e.g., here `result_value` might be `?0` in the example above...
true
}
- fn visit_ct_substs(&self) -> bool {
- true
- }
-
fn binders<T>(
&mut self,
a: ty::Binder<T>,
}
}
}
+ ty::ConstKind::Unevaluated(ty::Unevaluated { def, substs, promoted })
+ if self.tcx().lazy_normalization() =>
+ {
+ assert_eq!(promoted, None);
+ let substs = self.relate_with_variance(ty::Variance::Invariant, substs, substs)?;
+ Ok(self.tcx().mk_const(ty::Const {
+ ty: c.ty,
+ val: ty::ConstKind::Unevaluated(ty::Unevaluated { def, substs, promoted }),
+ }))
+ }
_ => relate::super_relate_consts(self, c, c),
}
}
true
}
- fn visit_ct_substs(&self) -> bool {
- true
- }
-
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
_variance: ty::Variance,
}
}
}
+ ty::ConstKind::Unevaluated(ty::Unevaluated { def, substs, promoted })
+ if self.tcx().lazy_normalization() =>
+ {
+ assert_eq!(promoted, None);
+ let substs = self.relate_with_variance(ty::Variance::Invariant, substs, substs)?;
+ Ok(self.tcx().mk_const(ty::Const {
+ ty: c.ty,
+ val: ty::ConstKind::Unevaluated(ty::Unevaluated { def, substs, promoted }),
+ }))
+ }
_ => relate::super_relate_consts(self, c, c),
}
}
use rustc_span::{sym, BytePos, DesugaringKind, Pos, Span};
use rustc_target::spec::abi;
use std::ops::ControlFlow;
-use std::{cmp, fmt};
+use std::{cmp, fmt, iter};
mod note;
ty::GenericParamDefKind::Const { has_default: true } => Some(param.def_id),
_ => None,
});
- for (def_id, actual) in default_params.zip(substs.iter().rev()) {
+ for (def_id, actual) in iter::zip(default_params, substs.iter().rev()) {
match actual.unpack() {
GenericArgKind::Const(c) => {
if self.tcx.const_param_default(def_id).subst(self.tcx, substs) != c {
let len1 = sig1.inputs().len();
let len2 = sig2.inputs().len();
if len1 == len2 {
- for (i, (l, r)) in sig1.inputs().iter().zip(sig2.inputs().iter()).enumerate() {
+ for (i, (l, r)) in iter::zip(sig1.inputs(), sig2.inputs()).enumerate() {
let (x1, x2) = self.cmp(l, r);
(values.0).0.extend(x1.0);
(values.1).0.extend(x2.0);
let common_len = cmp::min(len1, len2);
let remainder1: Vec<_> = sub1.types().skip(common_len).collect();
let remainder2: Vec<_> = sub2.types().skip(common_len).collect();
- let common_default_params = remainder1
- .iter()
- .rev()
- .zip(remainder2.iter().rev())
- .filter(|(a, b)| a == b)
- .count();
+ let common_default_params =
+ iter::zip(remainder1.iter().rev(), remainder2.iter().rev())
+ .filter(|(a, b)| a == b)
+ .count();
let len = sub1.len() - common_default_params;
let consts_offset = len - sub1.consts().count();
const SEPARATOR: &str = "::";
let separator_len = SEPARATOR.len();
- let split_idx: usize = t1_str
- .split(SEPARATOR)
- .zip(t2_str.split(SEPARATOR))
- .take_while(|(mod1_str, mod2_str)| mod1_str == mod2_str)
- .map(|(mod_str, _)| mod_str.len() + separator_len)
- .sum();
+ let split_idx: usize =
+ iter::zip(t1_str.split(SEPARATOR), t2_str.split(SEPARATOR))
+ .take_while(|(mod1_str, mod2_str)| mod1_str == mod2_str)
+ .map(|(mod_str, _)| mod_str.len() + separator_len)
+ .sum();
debug!(
"cmp: separator_len={}, split_idx={}, min_len={}",
.find_map(|(path, msg)| (&path_str == path).then_some(msg))
{
let mut show_suggestion = true;
- for (exp_ty, found_ty) in exp_substs.types().zip(found_substs.types()) {
+ for (exp_ty, found_ty) in
+ iter::zip(exp_substs.types(), found_substs.types())
+ {
match *exp_ty.kind() {
ty::Ref(_, exp_ty, _) => {
match (exp_ty.kind(), found_ty.kind()) {
self.resolve_vars_if_possible(t).to_string()
}
- pub fn tys_to_string(&self, ts: &[Ty<'tcx>]) -> String {
- let tstrs: Vec<String> = ts.iter().map(|t| self.ty_to_string(*t)).collect();
- format!("({})", tstrs.join(", "))
- }
-
- pub fn trait_ref_to_string(&self, t: ty::TraitRef<'tcx>) -> String {
- self.resolve_vars_if_possible(t).print_only_trait_path().to_string()
- }
-
/// If `TyVar(vid)` resolves to a type, return that type. Else, return the
/// universe index of `TyVar(vid)`.
pub fn probe_ty_var(&self, vid: TyVid) -> Result<Ty<'tcx>, ty::UniverseIndex> {
) -> TypeTrace<'tcx> {
TypeTrace { cause: cause.clone(), values: Consts(ExpectedFound::new(a_is_expected, a, b)) }
}
-
- pub fn dummy(tcx: TyCtxt<'tcx>) -> TypeTrace<'tcx> {
- let err = tcx.ty_error();
- TypeTrace {
- cause: ObligationCause::dummy(),
- values: Types(ExpectedFound { expected: err, found: err }),
- }
- }
}
impl<'tcx> SubregionOrigin<'tcx> {
&self.region_bound_pairs_map
}
- /// Returns ownership of the `free_region_map`.
- pub fn into_free_region_map(self) -> FreeRegionMap<'tcx> {
- self.free_region_map
- }
-
/// This is a hack to support the old-skool regionck, which
/// processes region constraints from the main function and the
/// closure together. In that context, when we enter a closure, we
}
}
}
-
- /// Processes a single ad-hoc region obligation that was not
- /// registered in advance.
- pub fn type_must_outlive(
- &self,
- region_bound_pairs: &RegionBoundPairs<'tcx>,
- implicit_region_bound: Option<ty::Region<'tcx>>,
- param_env: ty::ParamEnv<'tcx>,
- origin: infer::SubregionOrigin<'tcx>,
- ty: Ty<'tcx>,
- region: ty::Region<'tcx>,
- ) {
- let outlives = &mut TypeOutlives::new(
- self,
- self.tcx,
- region_bound_pairs,
- implicit_region_bound,
- param_env,
- );
- let ty = self.resolve_vars_if_possible(ty);
- outlives.type_must_outlive(origin, ty, region);
- }
}
/// The `TypeOutlives` struct has the job of "lowering" a `T: 'a`
any_unifications: bool,
}
-/// When working with placeholder regions, we often wish to find all of
-/// the regions that are either reachable from a placeholder region, or
-/// which can reach a placeholder region, or both. We call such regions
-/// *tainted* regions. This struct allows you to decide what set of
-/// tainted regions you want.
-#[derive(Debug)]
-pub struct TaintDirections {
- incoming: bool,
- outgoing: bool,
-}
-
-impl TaintDirections {
- pub fn incoming() -> Self {
- TaintDirections { incoming: true, outgoing: false }
- }
-
- pub fn outgoing() -> Self {
- TaintDirections { incoming: false, outgoing: true }
- }
-
- pub fn both() -> Self {
- TaintDirections { incoming: true, outgoing: true }
- }
-}
-
impl<'tcx> RegionConstraintStorage<'tcx> {
pub fn new() -> Self {
Self::default()
self.var_infos[vid].universe
}
- /// Returns the origin for the given variable.
- pub fn var_origin(&self, vid: RegionVid) -> RegionVariableOrigin {
- self.var_infos[vid].origin
- }
-
fn add_constraint(&mut self, constraint: Constraint<'tcx>, origin: SubregionOrigin<'tcx>) {
// cannot add constraints once regions are resolved
debug!("RegionConstraintCollector: add_constraint({:?})", constraint);
VerifyBound::AnyBound(vec![self, vb])
}
}
-
- pub fn and(self, vb: VerifyBound<'tcx>) -> VerifyBound<'tcx> {
- if self.must_hold() && vb.must_hold() {
- self
- } else if self.cannot_hold() && vb.cannot_hold() {
- self
- } else {
- VerifyBound::AllBounds(vec![self, vb])
- }
- }
}
impl<'tcx> RegionConstraintData<'tcx> {
}
}
-pub(crate) struct Instantiate {
- vid: ty::TyVid,
-}
+pub(crate) struct Instantiate;
pub(crate) struct Delegate;
// Hack: we only need this so that `types_escaping_snapshot`
// can see what has been unified; see the Delegate impl for
// more details.
- self.undo_log.push(Instantiate { vid });
+ self.undo_log.push(Instantiate);
}
/// Creates a new type variable.
)
}
- /// Finds the set of type variables that existed *before* `s`
- /// but which have only been unified since `s` started, and
- /// return the types with which they were unified. So if we had
- /// a type variable `V0`, then we started the snapshot, then we
- /// created a type variable `V1`, unified `V0` with `T0`, and
- /// unified `V1` with `T1`, this function would return `{T0}`.
- pub fn types_escaping_snapshot(&mut self, s: &super::Snapshot<'tcx>) -> Vec<Ty<'tcx>> {
- let mut new_elem_threshold = u32::MAX;
- let mut escaping_types = Vec::new();
- let actions_since_snapshot = self.undo_log.actions_since_snapshot(s);
- debug!("actions_since_snapshot.len() = {}", actions_since_snapshot.len());
- for i in 0..actions_since_snapshot.len() {
- let actions_since_snapshot = self.undo_log.actions_since_snapshot(s);
- match actions_since_snapshot[i] {
- super::UndoLog::TypeVariables(UndoLog::Values(sv::UndoLog::NewElem(index))) => {
- // if any new variables were created during the
- // snapshot, remember the lower index (which will
- // always be the first one we see). Note that this
- // action must precede those variables being
- // specified.
- new_elem_threshold = cmp::min(new_elem_threshold, index as u32);
- debug!("NewElem({}) new_elem_threshold={}", index, new_elem_threshold);
- }
-
- super::UndoLog::TypeVariables(UndoLog::Values(sv::UndoLog::Other(
- Instantiate { vid, .. },
- ))) => {
- if vid.index < new_elem_threshold {
- // quick check to see if this variable was
- // created since the snapshot started or not.
- let mut eq_relations = ut::UnificationTable::with_log(
- &mut self.storage.eq_relations,
- &mut *self.undo_log,
- );
- let escaping_type = match eq_relations.probe_value(vid) {
- TypeVariableValue::Unknown { .. } => bug!(),
- TypeVariableValue::Known { value } => value,
- };
- escaping_types.push(escaping_type);
- }
- debug!("SpecifyVar({:?}) new_elem_threshold={}", vid, new_elem_threshold);
- }
-
- _ => {}
- }
- }
-
- escaping_types
- }
-
/// Returns indices of all variables that are not yet
/// instantiated.
pub fn unsolved_variables(&mut self) -> Vec<ty::TyVid> {
}
impl<'tcx> InferCtxtUndoLogs<'tcx> {
- pub fn actions_since_snapshot(&self, snapshot: &Snapshot<'tcx>) -> &[UndoLog<'tcx>] {
- &self.logs[snapshot.undo_len..]
- }
-
pub fn start_snapshot(&mut self) -> Snapshot<'tcx> {
self.num_open_snapshots += 1;
Snapshot { undo_len: self.logs.len(), _marker: PhantomData }
#![feature(const_fn)]
#![feature(const_panic)]
#![feature(extend_one)]
+#![feature(iter_zip)]
#![feature(never_type)]
#![cfg_attr(bootstrap, feature(or_patterns))]
#![feature(in_band_lifetimes)]
use rustc_span::symbol::Symbol;
use rustc_span::{MultiSpan, Span};
use std::fmt;
+use std::iter;
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn report_extra_impl_obligation(
note_span
.push_span_label(trait_span, "this trait cannot be made into an object...".to_string());
}
- for (span, msg) in multi_span.into_iter().zip(messages.into_iter()) {
+ for (span, msg) in iter::zip(multi_span, messages) {
note_span.push_span_label(span, msg);
}
err.span_note(
use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc_hir::definitions::Definitions;
use rustc_hir::Crate;
-use rustc_index::vec::IndexVec;
use rustc_lint::LintStore;
use rustc_middle::arena::Arena;
use rustc_middle::dep_graph::DepGraph;
callback(sess, &mut local_providers, &mut extern_providers);
}
- let queries = {
- let crates = resolver_outputs.cstore.crates_untracked();
- let max_cnum = crates.iter().map(|c| c.as_usize()).max().unwrap_or(0);
- let mut providers = IndexVec::from_elem_n(extern_providers, max_cnum + 1);
- providers[LOCAL_CRATE] = local_providers;
- queries.get_or_init(|| TcxQueries::new(providers, extern_providers))
- };
+ let queries = queries.get_or_init(|| TcxQueries::new(local_providers, extern_providers));
let gcx = sess.time("setup_global_ctxt", || {
global_ctxt.get_or_init(|| {
untracked!(ast_json, true);
untracked!(ast_json_noexpand, true);
untracked!(borrowck, String::from("other"));
- untracked!(borrowck_stats, true);
untracked!(deduplicate_diagnostics, true);
untracked!(dep_tasks, true);
untracked!(dont_buffer_diagnostics, true);
return false;
}
- if attr.is_value_str() {
+ if attr.value_str().is_some() {
return true;
}
use tracing::debug;
use std::cell::Cell;
+use std::iter;
use std::slice;
/// Information about the registered lints.
self.early_passes.push(Box::new(pass));
}
+ /// Used by clippy.
pub fn register_pre_expansion_pass(
&mut self,
pass: impl Fn() -> EarlyLintPassObject + 'static + sync::Send + sync::Sync,
/// // The given `def_id` is that of an `Option` type
/// }
/// ```
+ ///
+ /// Used by clippy, but should be replaced by diagnostic items eventually.
pub fn match_def_path(&self, def_id: DefId, path: &[Symbol]) -> bool {
let names = self.get_def_path(def_id);
- names.len() == path.len() && names.into_iter().zip(path.iter()).all(|(a, &b)| a == b)
+ names.len() == path.len() && iter::zip(names, path).all(|(a, &b)| a == b)
}
/// Gets the absolute path of `def_id` as a vector of `Symbol`.
self.id_to_set.insert(id, self.cur);
}
- pub fn build(self) -> LintLevelSets {
- self.sets
- }
-
pub fn build_map(self) -> LintLevelMap {
LintLevelMap { sets: self.sets, id_to_set: self.id_to_set }
}
#![feature(box_patterns)]
#![feature(crate_visibility_modifier)]
#![feature(iter_order_by)]
+#![feature(iter_zip)]
#![feature(never_type)]
#![feature(nll)]
#![cfg_attr(bootstrap, feature(or_patterns))]
store.register_renamed("exceeding_bitshifts", "arithmetic_overflow");
store.register_renamed("redundant_semicolon", "redundant_semicolons");
store.register_renamed("overlapping_patterns", "overlapping_range_endpoints");
+ store.register_renamed("safe_packed_borrows", "unaligned_references");
// These were moved to tool lints, but rustc still sees them when compiling normally, before
// tool lints are registered, so `check_tool_name_for_backwards_compat` doesn't work. Use
use rustc_target::spec::abi::Abi as SpecAbi;
use std::cmp;
+use std::iter;
use std::ops::ControlFlow;
use tracing::debug;
let sig = self.cx.tcx.fn_sig(def_id);
let sig = self.cx.tcx.erase_late_bound_regions(sig);
- for (input_ty, input_hir) in sig.inputs().iter().zip(decl.inputs) {
+ for (input_ty, input_hir) in iter::zip(sig.inputs(), decl.inputs) {
self.check_type_for_ffi_and_report_errors(input_hir.span, input_ty, false, false);
}
layout
);
- let (largest, slargest, largest_index) = enum_definition
- .variants
- .iter()
- .zip(variants)
+ let (largest, slargest, largest_index) = iter::zip(enum_definition.variants, variants)
.map(|(variant, variant_layout)| {
// Subtract the size of the enum tag.
let bytes = variant_layout.size.bytes().saturating_sub(tag_size);
/// unsafe {
/// let foo = Foo { field1: 0, field2: 0 };
/// let _ = &foo.field1;
+ /// println!("{}", foo.field1); // An implicit `&` is added here, triggering the lint.
/// }
/// }
/// ```
///
/// ### Explanation
///
- /// Creating a reference to an insufficiently aligned packed field is
- /// [undefined behavior] and should be disallowed.
- ///
- /// This lint is "allow" by default because there is no stable
- /// alternative, and it is not yet certain how widespread existing code
- /// will trigger this lint.
- ///
- /// See [issue #27060] for more discussion.
+ /// Creating a reference to an insufficiently aligned packed field is [undefined behavior] and
+ /// should be disallowed. Using an `unsafe` block does not change anything about this. Instead,
+ /// the code should do a copy of the data in the packed field or use raw pointers and unaligned
+ /// accesses. See [issue #82523] for more information.
///
/// [undefined behavior]: https://doc.rust-lang.org/reference/behavior-considered-undefined.html
- /// [issue #27060]: https://github.com/rust-lang/rust/issues/27060
+ /// [issue #82523]: https://github.com/rust-lang/rust/issues/82523
pub UNALIGNED_REFERENCES,
- Allow,
+ Warn,
"detects unaligned references to fields of packed structs",
+ @future_incompatible = FutureIncompatibleInfo {
+ reference: "issue #82523 <https://github.com/rust-lang/rust/issues/82523>",
+ edition: None,
+ };
report_in_external_macro
}
"detects attempts to mutate a `const` item",
}
-declare_lint! {
- /// The `safe_packed_borrows` lint detects borrowing a field in the
- /// interior of a packed structure with alignment other than 1.
- ///
- /// ### Example
- ///
- /// ```rust
- /// #[repr(packed)]
- /// pub struct Unaligned<T>(pub T);
- ///
- /// pub struct Foo {
- /// start: u8,
- /// data: Unaligned<u32>,
- /// }
- ///
- /// fn main() {
- /// let x = Foo { start: 0, data: Unaligned(1) };
- /// let y = &x.data.0;
- /// }
- /// ```
- ///
- /// {{produces}}
- ///
- /// ### Explanation
- ///
- /// This type of borrow is unsafe and can cause errors on some platforms
- /// and violates some assumptions made by the compiler. This was
- /// previously allowed unintentionally. This is a [future-incompatible]
- /// lint to transition this to a hard error in the future. See [issue
- /// #46043] for more details, including guidance on how to solve the
- /// problem.
- ///
- /// [issue #46043]: https://github.com/rust-lang/rust/issues/46043
- /// [future-incompatible]: ../index.md#future-incompatible-lints
- pub SAFE_PACKED_BORROWS,
- Warn,
- "safe borrows of fields of packed structs were erroneously allowed",
- @future_incompatible = FutureIncompatibleInfo {
- reference: "issue #46043 <https://github.com/rust-lang/rust/issues/46043>",
- edition: None,
- };
-}
-
declare_lint! {
/// The `patterns_in_fns_without_body` lint detects `mut` identifier
/// patterns as a parameter in functions without a body.
Warn,
"detects proc macro derives using inaccessible names from parent modules",
@future_incompatible = FutureIncompatibleInfo {
- reference: "issue #50504 <https://github.com/rust-lang/rust/issues/50504>",
+ reference: "issue #83583 <https://github.com/rust-lang/rust/issues/83583>",
edition: None,
};
}
RENAMED_AND_REMOVED_LINTS,
UNALIGNED_REFERENCES,
CONST_ITEM_MUTATION,
- SAFE_PACKED_BORROWS,
PATTERNS_IN_FNS_WITHOUT_BODY,
MISSING_FRAGMENT_SPECIFIER,
LATE_BOUND_LIFETIME_ARGUMENTS,
"Macro generated a different number of errors than expected"
);
- for (found_error, &expected_error) in found_errors.iter().zip(expected_errors.iter()) {
+ for (found_error, &expected_error) in found_errors.iter().zip(expected_errors) {
let found_error_str = format!("{}", found_error);
assert_eq!(found_error_str, expected_error);
}
ParentHirIterator { current_id, map: self }
}
- /// Checks if the node is an argument. An argument is a local variable whose
- /// immediate parent is an item or a closure.
- pub fn is_argument(&self, id: HirId) -> bool {
- match self.find(id) {
- Some(Node::Binding(_)) => (),
- _ => return false,
- }
- matches!(
- self.find(self.get_parent_node(id)),
- Some(
- Node::Item(_)
- | Node::TraitItem(_)
- | Node::ImplItem(_)
- | Node::Expr(Expr { kind: ExprKind::Closure(..), .. }),
- )
- )
- }
-
/// Checks if the node is left-hand side of an assignment.
pub fn is_lhs(&self, id: HirId) -> bool {
match self.find(self.get_parent_node(id)) {
}
}
- pub fn expect_variant_data(&self, id: HirId) -> &'hir VariantData<'hir> {
- match self.find(id) {
- Some(
- Node::Ctor(vd)
- | Node::Item(Item { kind: ItemKind::Struct(vd, _) | ItemKind::Union(vd, _), .. }),
- ) => vd,
- Some(Node::Variant(variant)) => &variant.data,
- _ => bug!("expected struct or variant, found {}", self.node_to_string(id)),
- }
- }
-
pub fn expect_variant(&self, id: HirId) -> &'hir Variant<'hir> {
match self.find(id) {
Some(Node::Variant(variant)) => variant,
Self::new_with_or_without_spans(sess, krate, definitions, cstore, always_ignore_spans)
}
- #[inline]
- pub fn sess(&self) -> &'a Session {
- self.sess
- }
-
#[inline]
pub fn while_hashing_hir_bodies<F: FnOnce(&mut Self)>(&mut self, hash_bodies: bool, f: F) {
let prev_hash_bodies = self.hash_bodies;
use rustc_index::vec::IndexVec;
use rustc_macros::HashStable;
use smallvec::SmallVec;
+use std::iter;
use std::ops::Index;
/// A "canonicalized" type `V` is one where all free inference
Certainty::Ambiguous => false,
}
}
-
- pub fn is_ambiguous(&self) -> bool {
- !self.is_proven()
- }
}
impl<'tcx, R> QueryResponse<'tcx, R> {
pub fn is_proven(&self) -> bool {
self.certainty.is_proven()
}
-
- pub fn is_ambiguous(&self) -> bool {
- !self.is_proven()
- }
}
impl<'tcx, R> Canonical<'tcx, QueryResponse<'tcx, R>> {
use crate::ty::subst::GenericArgKind;
CanonicalVarValues {
- var_values: self
- .var_values
- .iter()
- .zip(0..)
+ var_values: iter::zip(&self.var_values, 0..)
.map(|(kind, i)| match kind.unpack() {
GenericArgKind::Type(..) => {
tcx.mk_ty(ty::Bound(ty::INNERMOST, ty::BoundVar::from_u32(i).into())).into()
ConstVariableValue::Known { value } => Some(value),
}
}
-
- pub fn is_unknown(&self) -> bool {
- match *self {
- ConstVariableValue::Unknown { .. } => true,
- ConstVariableValue::Known { .. } => false,
- }
- }
}
#[derive(Copy, Clone, Debug)]
#![feature(exclusive_range_pattern)]
#![feature(control_flow_enum)]
#![feature(associated_type_defaults)]
+#![feature(iter_zip)]
#![recursion_limit = "512"]
#[macro_use]
/// Returns `true` if `subscope` is equal to or is lexically nested inside `superscope`, and
/// `false` otherwise.
+ ///
+ /// Used by clippy.
pub fn is_subscope_of(&self, subscope: Scope, superscope: Scope) -> bool {
let mut s = subscope;
debug!("is_subscope_of({:?}, {:?})", subscope, superscope);
Stable,
}
-impl StabilityLevel {
- pub fn from_attr_level(level: &attr::StabilityLevel) -> Self {
- if level.is_stable() { Stable } else { Unstable }
- }
-}
-
/// An entry in the `depr_map`.
#[derive(Clone, HashStable, Debug)]
pub struct DeprecationEntry {
}
}
- pub fn is_counter(&self) -> bool {
- matches!(self, Self::Counter { .. })
- }
-
pub fn is_expression(&self) -> bool {
matches!(self, Self::Expression { .. })
}
-
- pub fn is_unreachable(&self) -> bool {
- *self == Self::Unreachable
- }
}
impl Debug for CoverageKind {
.unwrap_or_else(|| bug!("Signed value {:#x} does not fit in {} bits", i, size.bits()))
}
- #[inline]
- pub fn from_i8(i: i8) -> Self {
- Self::from_int(i, Size::from_bits(8))
- }
-
- #[inline]
- pub fn from_i16(i: i16) -> Self {
- Self::from_int(i, Size::from_bits(16))
- }
-
#[inline]
pub fn from_i32(i: i32) -> Self {
Self::from_int(i, Size::from_bits(32))
}
}
- /// Returns an iterator over all temporaries.
- #[inline]
- pub fn temps_iter<'a>(&'a self) -> impl Iterator<Item = Local> + 'a {
- (self.arg_count + 1..self.local_decls.len()).filter_map(move |index| {
- let local = Local::new(index);
- if self.local_decls[local].is_user_variable() { None } else { Some(local) }
- })
- }
-
- /// Returns an iterator over all user-declared locals.
- #[inline]
- pub fn vars_iter<'a>(&'a self) -> impl Iterator<Item = Local> + 'a {
- (self.arg_count + 1..self.local_decls.len()).filter_map(move |index| {
- let local = Local::new(index);
- self.local_decls[local].is_user_variable().then_some(local)
- })
- }
-
/// Returns an iterator over all user-declared mutable locals.
#[inline]
pub fn mut_vars_iter<'a>(&'a self) -> impl Iterator<Item = Local> + 'a {
CtorKind::Fn => fmt_tuple(fmt, &name),
CtorKind::Fictive => {
let mut struct_fmt = fmt.debug_struct(&name);
- for (field, place) in variant_def.fields.iter().zip(places) {
+ for (field, place) in iter::zip(&variant_def.fields, places) {
struct_fmt.field(&field.ident.as_str(), place);
}
struct_fmt.finish()
let mut struct_fmt = fmt.debug_struct(&name);
if let Some(upvars) = tcx.upvars_mentioned(def_id) {
- for (&var_id, place) in upvars.keys().zip(places) {
+ for (&var_id, place) in iter::zip(upvars.keys(), places) {
let var_name = tcx.hir().name(var_id);
struct_fmt.field(&var_name.as_str(), place);
}
let mut struct_fmt = fmt.debug_struct(&name);
if let Some(upvars) = tcx.upvars_mentioned(def_id) {
- for (&var_id, place) in upvars.keys().zip(places) {
+ for (&var_id, place) in iter::zip(upvars.keys(), places) {
let var_name = tcx.hir().name(var_id);
struct_fmt.field(&var_name.as_str(), place);
}
General,
/// Permitted both in `const fn`s and regular `fn`s.
GeneralAndConstFn,
- /// Borrow of packed field.
- /// Has to be handled as a lint for backwards compatibility.
- BorrowPacked,
/// Unsafe operation in an `unsafe fn` but outside an `unsafe` block.
/// Has to be handled as a lint for backwards compatibility.
UnsafeFn,
- /// Borrow of packed field in an `unsafe fn` but outside an `unsafe` block.
- /// Has to be handled as a lint for backwards compatibility.
- UnsafeFnBorrowPacked,
}
#[derive(Copy, Clone, PartialEq, TyEncodable, TyDecodable, HashStable, Debug)]
///
/// Note that this may yield 0 elements. Only the `otherwise` branch is mandatory.
pub fn iter(&self) -> SwitchTargetsIter<'_> {
- SwitchTargetsIter { inner: self.values.iter().zip(self.targets.iter()) }
+ SwitchTargetsIter { inner: iter::zip(&self.values, &self.targets) }
}
/// Returns a slice with all possible jump targets (including the fallback target).
ReversePostorder { body, blocks, idx: len }
}
-
- pub fn reset(&mut self) {
- self.idx = self.blocks.len();
- }
}
pub fn reverse_postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> ReversePostorder<'a, 'tcx> {
matches!(self, PlaceContext::MutatingUse(..))
}
- /// Returns `true` if this place context represents a use that does not change the value.
- #[inline]
- pub fn is_nonmutating_use(&self) -> bool {
- matches!(self, PlaceContext::NonMutatingUse(..))
- }
-
/// Returns `true` if this place context represents a use.
#[inline]
pub fn is_use(&self) -> bool {
pub b: Ty<'tcx>,
}
- impl<'tcx> Eq<'tcx> {
- pub fn new(a: Ty<'tcx>, b: Ty<'tcx>) -> Self {
- Self { a, b }
- }
- }
-
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, HashStable, TypeFoldable, Lift)]
pub struct Subtype<'tcx> {
pub sub: Ty<'tcx>,
pub sup: Ty<'tcx>,
}
- impl<'tcx> Subtype<'tcx> {
- pub fn new(sub: Ty<'tcx>, sup: Ty<'tcx>) -> Self {
- Self { sub, sup }
- }
- }
-
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq, HashStable, TypeFoldable, Lift)]
pub struct ProvePredicate<'tcx> {
pub predicate: Predicate<'tcx>,
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LocalDefId};
-use rustc_hir::lang_items::LangItem;
use rustc_span::Span;
use super::{Ty, TyCtxt};
// This is the initial value used when doing upvar inference.
pub const LATTICE_BOTTOM: ClosureKind = ClosureKind::Fn;
- pub fn trait_did(&self, tcx: TyCtxt<'tcx>) -> DefId {
- match *self {
- ClosureKind::Fn => tcx.require_lang_item(LangItem::Fn, None),
- ClosureKind::FnMut => tcx.require_lang_item(LangItem::FnMut, None),
- ClosureKind::FnOnce => tcx.require_lang_item(LangItem::FnOnce, None),
- }
- }
-
/// Returns `true` if a type that impls this closure kind
/// must also implement `other`.
pub fn extends(self, other: ty::ClosureKind) -> bool {
UniqueImmBorrow => hir::Mutability::Mut,
}
}
-
- pub fn to_user_str(&self) -> &'static str {
- match *self {
- MutBorrow => "mutable",
- ImmBorrow => "immutable",
- UniqueImmBorrow => "uniquely immutable",
- }
- }
}
}
#[inline]
-pub fn decode_arena_allocable<'tcx, D, T: ArenaAllocatable<'tcx> + Decodable<D>>(
+fn decode_arena_allocable<'tcx, D, T: ArenaAllocatable<'tcx> + Decodable<D>>(
decoder: &mut D,
) -> Result<&'tcx T, D::Error>
where
}
#[inline]
-pub fn decode_arena_allocable_slice<'tcx, D, T: ArenaAllocatable<'tcx> + Decodable<D>>(
+fn decode_arena_allocable_slice<'tcx, D, T: ArenaAllocatable<'tcx> + Decodable<D>>(
decoder: &mut D,
) -> Result<&'tcx [T], D::Error>
where
return false;
}
- user_substs.substs.iter().zip(BoundVar::new(0)..).all(|(kind, cvar)| {
+ iter::zip(user_substs.substs, BoundVar::new(0)..).all(|(kind, cvar)| {
match kind.unpack() {
GenericArgKind::Type(ty) => match ty.kind() {
ty::Bound(debruijn, b) => {
self.mk_ptr(TypeAndMut { ty, mutbl: hir::Mutability::Not })
}
- #[inline]
- pub fn mk_nil_ptr(self) -> Ty<'tcx> {
- self.mk_imm_ptr(self.mk_unit())
- }
-
#[inline]
pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> {
self.mk_ty(Array(ty, ty::Const::from_usize(self, n)))
// drops of `Option::None` before LTO. We also respect the intent of
// `#[inline]` on `Drop::drop` implementations.
return ty.ty_adt_def().map_or(true, |adt_def| {
- adt_def.destructor(tcx).map_or(adt_def.is_enum(), |dtor| {
- tcx.codegen_fn_attrs(dtor.did).requests_inline()
- })
+ adt_def.destructor(tcx).map_or_else(
+ || adt_def.is_enum(),
+ |dtor| tcx.codegen_fn_attrs(dtor.did).requests_inline(),
+ )
});
}
tcx.codegen_fn_attrs(self.def_id()).requests_inline()
} else {
// Try to use a ScalarPair for all tagged enums.
let mut common_prim = None;
- for (field_layouts, layout_variant) in variants.iter().zip(&layout_variants) {
+ for (field_layouts, layout_variant) in iter::zip(&variants, &layout_variants) {
let offsets = match layout_variant.fields {
FieldsShape::Arbitrary { ref offsets, .. } => offsets,
_ => bug!(),
};
let mut fields =
- field_layouts.iter().zip(offsets).filter(|p| !p.0.is_zst());
+ iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst());
let (field, offset) = match (fields.next(), fields.next()) {
(None, None) => continue,
(Some(pair), None) => pair,
const INVALID_FIELD_IDX: u32 = !0;
let mut combined_inverse_memory_index =
vec![INVALID_FIELD_IDX; promoted_memory_index.len() + memory_index.len()];
- let mut offsets_and_memory_index = offsets.into_iter().zip(memory_index);
+ let mut offsets_and_memory_index = iter::zip(offsets, memory_index);
let combined_offsets = variant_fields
.iter()
.enumerate()
pub type PolyProjectionPredicate<'tcx> = Binder<ProjectionPredicate<'tcx>>;
impl<'tcx> PolyProjectionPredicate<'tcx> {
- /// Returns the `DefId` of the associated item being projected.
- pub fn item_def_id(&self) -> DefId {
- self.skip_binder().projection_ty.item_def_id
- }
-
/// Returns the `DefId` of the trait of the associated item being projected.
#[inline]
pub fn trait_def_id(&self, tcx: TyCtxt<'tcx>) -> DefId {
self.skip_binder().projection_ty.trait_def_id(tcx)
}
- #[inline]
- pub fn projection_self_ty(&self) -> Binder<Ty<'tcx>> {
- self.map_bound(|predicate| predicate.projection_ty.self_ty())
- }
-
/// Get the [PolyTraitRef] required for this projection to be well formed.
/// Note that for generic associated types the predicates of the associated
/// type also need to be checked.
None
}
- pub fn expect_local(self) -> WithOptConstParam<LocalDefId> {
- self.as_local().unwrap()
- }
-
pub fn is_local(self) -> bool {
self.did.is_local()
}
use std::collections::BTreeMap;
use std::convert::TryFrom;
use std::fmt::{self, Write as _};
+use std::iter;
use std::ops::{ControlFlow, Deref, DerefMut};
// `pretty` is a separate module only for organization.
CtorKind::Fictive => {
p!(" {{ ");
let mut first = true;
- for (field_def, field) in variant_def.fields.iter().zip(fields) {
+ for (field_def, field) in iter::zip(&variant_def.fields, fields) {
if !first {
p!(", ");
}
) {
let mut current_diagnostics = self.current_diagnostics.borrow_mut();
- let x = current_diagnostics.entry(dep_node_index).or_insert(Vec::new());
+ let x = current_diagnostics.entry(dep_node_index).or_default();
x.extend(Into::<Vec<_>>::into(diagnostics));
}
/// relation. Just affects error messages.
fn a_is_expected(&self) -> bool;
- /// Whether we should look into the substs of unevaluated constants
- /// even if `feature(const_evaluatable_checked)` is active.
- ///
- /// This is needed in `combine` to prevent accidentially creating
- /// infinite types as we abuse `TypeRelation` to walk a type there.
- fn visit_ct_substs(&self) -> bool {
- false
- }
-
fn with_cause<F, R>(&mut self, _cause: Cause, f: F) -> R
where
F: FnOnce(&mut Self) -> R,
) -> RelateResult<'tcx, SubstsRef<'tcx>> {
let tcx = relation.tcx();
- let params = a_subst.iter().zip(b_subst).enumerate().map(|(i, (a, b))| {
+ let params = iter::zip(a_subst, b_subst).enumerate().map(|(i, (a, b))| {
let variance = variances.map_or(ty::Invariant, |v| v[i]);
relation.relate_with_variance(variance, a, b)
});
return Err(TypeError::ArgCount);
}
- let inputs_and_output = a
- .inputs()
- .iter()
- .cloned()
- .zip(b.inputs().iter().cloned())
- .map(|x| (x, false))
+ let inputs_and_output = iter::zip(a.inputs(), b.inputs())
+ .map(|(&a, &b)| ((a, b), false))
.chain(iter::once(((a.output(), b.output()), true)))
.map(|((a, b), is_output)| {
if is_output {
) -> RelateResult<'tcx, GeneratorWitness<'tcx>> {
assert_eq!(a.0.len(), b.0.len());
let tcx = relation.tcx();
- let types = tcx.mk_type_list(a.0.iter().zip(b.0).map(|(a, b)| relation.relate(a, b)))?;
+ let types = tcx.mk_type_list(iter::zip(a.0, b.0).map(|(a, b)| relation.relate(a, b)))?;
Ok(GeneratorWitness(types))
}
}
let sz_a = sz_a.try_eval_usize(tcx, relation.param_env());
let sz_b = sz_b.try_eval_usize(tcx, relation.param_env());
match (sz_a, sz_b) {
- (Some(sz_a_val), Some(sz_b_val)) => Err(TypeError::FixedArraySize(
- expected_found(relation, sz_a_val, sz_b_val),
- )),
+ (Some(sz_a_val), Some(sz_b_val)) if sz_a_val != sz_b_val => Err(
+ TypeError::FixedArraySize(expected_found(relation, sz_a_val, sz_b_val)),
+ ),
_ => Err(err),
}
}
(&ty::Tuple(as_), &ty::Tuple(bs)) => {
if as_.len() == bs.len() {
Ok(tcx.mk_tup(
- as_.iter().zip(bs).map(|(a, b)| relation.relate(a.expect_ty(), b.expect_ty())),
+ iter::zip(as_, bs).map(|(a, b)| relation.relate(a.expect_ty(), b.expect_ty())),
)?)
} else if !(as_.is_empty() || bs.is_empty()) {
Err(TypeError::TupleSize(expected_found(relation, as_.len(), bs.len())))
}
(ty::ConstKind::Unevaluated(au), ty::ConstKind::Unevaluated(bu))
- if tcx.features().const_evaluatable_checked && !relation.visit_ct_substs() =>
+ if tcx.features().const_evaluatable_checked =>
{
tcx.try_unify_abstract_consts(((au.def, au.substs), (bu.def, bu.substs)))
}
// Both the variant and each field have to be equal.
if a_destructured.variant == b_destructured.variant {
- for (a_field, b_field) in
- a_destructured.fields.iter().zip(b_destructured.fields.iter())
- {
+ for (a_field, b_field) in iter::zip(a_destructured.fields, b_destructured.fields) {
relation.consts(a_field, b_field)?;
}
return Err(TypeError::ExistentialMismatch(expected_found(relation, a, b)));
}
- let v = a_v.into_iter().zip(b_v.into_iter()).map(|(ep_a, ep_b)| {
+ let v = iter::zip(a_v, b_v).map(|(ep_a, ep_b)| {
use crate::ty::ExistentialPredicate::*;
match (ep_a.skip_binder(), ep_b.skip_binder()) {
(Trait(a), Trait(b)) => Ok(ty::Binder::bind(Trait(
Binder(value)
}
- /// Wraps `value` in a binder without actually binding any currently
- /// unbound variables.
- ///
- /// Note that this will shift all debrujin indices of escaping bound variables
- /// by 1 to avoid accidential captures.
- pub fn wrap_nonbinding(tcx: TyCtxt<'tcx>, value: T) -> Binder<T>
- where
- T: TypeFoldable<'tcx>,
- {
- if value.has_escaping_bound_vars() {
- Binder::bind(super::fold::shift_vars(tcx, value, 1))
- } else {
- Binder::dummy(value)
- }
- }
-
/// Skips the binder and returns the "bound" value. This is a
/// risky thing to do because it's easy to get confused about
/// De Bruijn indices and the like. It is usually better to
pub type PolyGenSig<'tcx> = Binder<GenSig<'tcx>>;
-impl<'tcx> PolyGenSig<'tcx> {
- pub fn resume_ty(&self) -> ty::Binder<Ty<'tcx>> {
- self.map_bound_ref(|sig| sig.resume_ty)
- }
- pub fn yield_ty(&self) -> ty::Binder<Ty<'tcx>> {
- self.map_bound_ref(|sig| sig.yield_ty)
- }
- pub fn return_ty(&self) -> ty::Binder<Ty<'tcx>> {
- self.map_bound_ref(|sig| sig.return_ty)
- }
-}
-
/// Signature of a function type, which we have arbitrarily
/// decided to use to refer to the input/output types.
///
ParamTy { index, name }
}
- pub fn for_self() -> ParamTy {
- ParamTy::new(0, kw::SelfUpper)
- }
-
pub fn for_def(def: &ty::GenericParamDef) -> ParamTy {
ParamTy::new(def.index, def.name)
}
pub name: Symbol,
}
-impl<'tcx> ParamConst {
+impl ParamConst {
pub fn new(index: u32, name: Symbol) -> ParamConst {
ParamConst { index, name }
}
pub fn for_def(def: &ty::GenericParamDef) -> ParamConst {
ParamConst::new(def.index, def.name)
}
-
- pub fn to_const(self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> &'tcx ty::Const<'tcx> {
- tcx.mk_const_param(self.index, self.name, ty)
- }
}
pub type Region<'tcx> = &'tcx RegionKind;
}
}
- /// Adjusts any De Bruijn indices so as to make `to_binder` the
- /// innermost binder. That is, if we have something bound at `to_binder`,
- /// it will now be bound at INNERMOST. This is an appropriate thing to do
- /// when moving a region out from inside binders:
- ///
- /// ```
- /// for<'a> fn(for<'b> for<'c> fn(&'a u32), _)
- /// // Binder: D3 D2 D1 ^^
- /// ```
- ///
- /// Here, the region `'a` would have the De Bruijn index D3,
- /// because it is the bound 3 binders out. However, if we wanted
- /// to refer to that region `'a` in the second argument (the `_`),
- /// those two binders would not be in scope. In that case, we
- /// might invoke `shift_out_to_binder(D3)`. This would adjust the
- /// De Bruijn index of `'a` to D1 (the innermost binder).
- ///
- /// If we invoke `shift_out_to_binder` and the region is in fact
- /// bound by one of the binders we are shifting out of, that is an
- /// error (and should fail an assertion failure).
- pub fn shifted_out_to_binder(&self, to_binder: ty::DebruijnIndex) -> RegionKind {
- match *self {
- ty::ReLateBound(debruijn, r) => {
- ty::ReLateBound(debruijn.shifted_out_to_binder(to_binder), r)
- }
- r => r,
- }
- }
-
pub fn type_flags(&self) -> TypeFlags {
let mut flags = TypeFlags::empty();
use rustc_span::{Span, DUMMY_SP};
use rustc_target::abi::{Integer, Size, TargetDataLayout};
use smallvec::SmallVec;
-use std::{cmp, fmt};
+use std::{cmp, fmt, iter};
#[derive(Copy, Clone, Debug)]
pub struct Discr<'tcx> {
_ => bug!(),
};
- let result = item_substs
- .iter()
- .zip(impl_substs.iter())
+ let result = iter::zip(item_substs, impl_substs)
.filter(|&(_, k)| {
match k.unpack() {
GenericArgKind::Lifetime(&ty::RegionKind::ReEarlyBound(ref ebr)) => {
/// optimization as well as the rules around static values. Note
/// that the `Freeze` trait is not exposed to end users and is
/// effectively an implementation detail.
- // FIXME: use `TyCtxtAt` instead of separate `Span`.
pub fn is_freeze(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
self.is_trivially_freeze() || tcx_at.is_freeze_raw(param_env.and(self))
}
Span,
};
use rustc_target::abi::VariantIdx;
+use std::iter;
use super::borrow_set::BorrowData;
use super::MirBorrowckCtxt;
let expr = &self.infcx.tcx.hir().expect_expr(hir_id).kind;
debug!("closure_span: hir_id={:?} expr={:?}", hir_id, expr);
if let hir::ExprKind::Closure(.., body_id, args_span, _) = expr {
- for (captured_place, place) in self
- .infcx
- .tcx
- .typeck(def_id.expect_local())
- .closure_min_captures_flattened(def_id)
- .zip(places)
- {
+ for (captured_place, place) in iter::zip(
+ self.infcx.tcx.typeck(def_id.expect_local()).closure_min_captures_flattened(def_id),
+ places,
+ ) {
let upvar_hir_id = captured_place.get_root_variable();
//FIXME(project-rfc-2229#8): Use better span from captured_place
let span = self.infcx.tcx.upvars_mentioned(local_did)?[&upvar_hir_id].span;
debug!("Collected {:?}: {:?}", fr, outlived_fr);
// Add to set of constraints for final help note.
- self.constraints_to_add.entry(fr).or_insert(Vec::new()).push(outlived_fr);
+ self.constraints_to_add.entry(fr).or_default().push(outlived_fr);
}
/// Emit an intermediate note on the given `Diagnostic` if the involved regions are
use std::fmt::{self, Display};
+use std::iter;
use rustc_errors::DiagnosticBuilder;
use rustc_hir as hir;
// just worry about trying to match up the rustc type
// with the HIR types:
(ty::Tuple(elem_tys), hir::TyKind::Tup(elem_hir_tys)) => {
- search_stack.extend(elem_tys.iter().map(|k| k.expect_ty()).zip(*elem_hir_tys));
+ search_stack
+ .extend(iter::zip(elem_tys.iter().map(|k| k.expect_ty()), *elem_hir_tys));
}
(ty::Slice(elem_ty), hir::TyKind::Slice(elem_hir_ty))
args: &'hir hir::GenericArgs<'hir>,
search_stack: &mut Vec<(Ty<'tcx>, &'hir hir::Ty<'hir>)>,
) -> Option<&'hir hir::Lifetime> {
- for (kind, hir_arg) in substs.iter().zip(args.args) {
+ for (kind, hir_arg) in iter::zip(substs, args.args) {
match (kind.unpack(), hir_arg) {
(GenericArgKind::Lifetime(r), hir::GenericArg::Lifetime(lt)) => {
if r.to_region_vid() == needle_fr {
use rustc_middle::mir::{InlineAsmOperand, Terminator, TerminatorKind};
use rustc_middle::mir::{Statement, StatementKind};
use rustc_middle::ty::TyCtxt;
+use std::iter;
use crate::dataflow::indexes::BorrowIndex;
self.mutate_place(location, **place, Shallow(None), JustWrite);
}
StatementKind::LlvmInlineAsm(asm) => {
- for (o, output) in asm.asm.outputs.iter().zip(asm.outputs.iter()) {
+ for (o, output) in iter::zip(&asm.asm.outputs, &*asm.outputs) {
if o.is_indirect {
// FIXME(eddyb) indirect inline asm outputs should
// be encoded through MIR place derefs instead.
use smallvec::SmallVec;
use std::cell::RefCell;
use std::collections::BTreeMap;
+use std::iter;
use std::mem;
use std::rc::Rc;
self.mutate_place(location, (**place, span), Shallow(None), JustWrite, flow_state);
}
StatementKind::LlvmInlineAsm(ref asm) => {
- for (o, output) in asm.asm.outputs.iter().zip(asm.outputs.iter()) {
+ for (o, output) in iter::zip(&asm.asm.outputs, &*asm.outputs) {
if o.is_indirect {
// FIXME(eddyb) indirect inline asm outputs should
// be encoded through MIR place derefs instead.
use rustc_middle::mir::{Body, BorrowKind, Local, Place, PlaceElem, PlaceRef, ProjectionElem};
use rustc_middle::ty::{self, TyCtxt};
use std::cmp::max;
+use std::iter;
/// When checking if a place conflicts with another place, this enum is used to influence decisions
/// where a place might be equal or disjoint with another place, such as if `a[i] == a[j]`.
// loop invariant: borrow_c is always either equal to access_c or disjoint from it.
for (i, (borrow_c, &access_c)) in
- borrow_place.projection.iter().zip(access_place.projection.iter()).enumerate()
+ iter::zip(borrow_place.projection, access_place.projection).enumerate()
{
debug!("borrow_conflicts_with_place: borrow_c = {:?}", borrow_c);
let borrow_proj_base = &borrow_place.projection[..i];
if args.len() < sig.inputs().len() || (args.len() > sig.inputs().len() && !sig.c_variadic) {
span_mirbug!(self, term, "call to {:?} with wrong # of args", sig);
}
- for (n, (fn_arg, op_arg)) in sig.inputs().iter().zip(args).enumerate() {
+ for (n, (fn_arg, op_arg)) in iter::zip(sig.inputs(), args).enumerate() {
let op_arg_ty = op_arg.ty(body, self.tcx());
let op_arg_ty = self.normalize(op_arg_ty, term_location);
let category = if from_hir_call {
let global_mapping = iter::once((tcx.lifetimes.re_static, fr_static));
let subst_mapping =
- identity_substs.regions().zip(fr_substs.regions().map(|r| r.to_region_vid()));
+ iter::zip(identity_substs.regions(), fr_substs.regions().map(|r| r.to_region_vid()));
UniversalRegionIndices { indices: global_mapping.chain(subst_mapping).collect() }
}
}
}
- pub fn body(&self) -> &'mir mir::Body<'tcx> {
- self.body
- }
-
/// Returns the underlying `Results`.
pub fn results(&self) -> &Results<'tcx, A> {
&self.results.borrow()
use rustc_index::bit_set::BitSet;
use rustc_index::vec::{Idx, IndexVec};
+use std::iter;
/// A [partially ordered set][poset] that has a [least upper bound][lub] for any pair of elements
/// in the set.
assert_eq!(self.len(), other.len());
let mut changed = false;
- for (a, b) in self.iter_mut().zip(other.iter()) {
+ for (a, b) in iter::zip(self, other) {
changed |= a.join(b);
}
changed
assert_eq!(self.len(), other.len());
let mut changed = false;
- for (a, b) in self.iter_mut().zip(other.iter()) {
+ for (a, b) in iter::zip(self, other) {
changed |= a.meet(b);
}
changed
use crate::dataflow::{self, fmt::DebugWithContext, GenKill};
use std::fmt;
+use std::iter;
rustc_index::newtype_index! {
pub struct BorrowIndex {
}
mir::StatementKind::LlvmInlineAsm(ref asm) => {
- for (output, kind) in asm.outputs.iter().zip(&asm.asm.outputs) {
+ for (output, kind) in iter::zip(&*asm.outputs, &asm.asm.outputs) {
if !kind.is_indirect && !kind.is_rw {
self.kill_borrows_on_place(trans, *output);
}
use rustc_middle::ty::{self, TyCtxt};
use smallvec::{smallvec, SmallVec};
+use std::iter;
use std::mem;
use super::abs_domain::Lift;
self.create_move_path(**place);
}
StatementKind::LlvmInlineAsm(ref asm) => {
- for (output, kind) in asm.outputs.iter().zip(&asm.asm.outputs) {
+ for (output, kind) in iter::zip(&*asm.outputs, &asm.asm.outputs) {
if !kind.is_indirect {
self.gather_init(output.as_ref(), InitKind::Deep);
}
/// this frame (can happen e.g. during frame initialization, and during unwinding on
/// frames without cleanup code).
/// We basically abuse `Result` as `Either`.
+ ///
+ /// Used by priroda.
pub fn current_loc(&self) -> Result<mir::Location, Span> {
self.loc
}
ty.size.truncate(value)
}
- #[inline]
- pub fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
- ty.is_sized(self.tcx, self.param_env)
- }
-
#[inline]
pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
ty.is_freeze(self.tcx, self.param_env)
pub fn to_scalar(self) -> InterpResult<'tcx, Scalar<Tag>> {
self.to_scalar_or_uninit().check_init()
}
-
- #[inline]
- pub fn to_scalar_pair(self) -> InterpResult<'tcx, (Scalar<Tag>, Scalar<Tag>)> {
- match self {
- Immediate::Scalar(..) => bug!("Got a thin pointer where a scalar pair was expected"),
- Immediate::ScalarPair(a, b) => Ok((a.check_init()?, b.check_init()?)),
- }
- }
}
// ScalarPair needs a type to interpret, so we often have an immediate and a type together
#![feature(decl_macro)]
#![feature(exact_size_is_empty)]
#![feature(exhaustive_patterns)]
+#![feature(iter_zip)]
#![feature(never_type)]
#![feature(map_try_insert)]
#![feature(min_specialization)]
+use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_middle::mir::visit::{PlaceContext, Visitor};
use rustc_middle::mir::*;
+use rustc_middle::ty::query::Providers;
use rustc_middle::ty::{self, TyCtxt};
use rustc_session::lint::builtin::UNALIGNED_REFERENCES;
+use rustc_span::symbol::sym;
use crate::transform::MirPass;
use crate::util;
+pub(crate) fn provide(providers: &mut Providers) {
+ *providers = Providers { unsafe_derive_on_repr_packed, ..*providers };
+}
+
pub struct CheckPackedRef;
impl<'tcx> MirPass<'tcx> for CheckPackedRef {
source_info: SourceInfo,
}
+fn unsafe_derive_on_repr_packed(tcx: TyCtxt<'_>, def_id: LocalDefId) {
+ let lint_hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
+
+ tcx.struct_span_lint_hir(UNALIGNED_REFERENCES, lint_hir_id, tcx.def_span(def_id), |lint| {
+ // FIXME: when we make this a hard error, this should have its
+ // own error code.
+ let message = if tcx.generics_of(def_id).own_requires_monomorphization() {
+ "`#[derive]` can't be used on a `#[repr(packed)]` struct with \
+ type or const parameters (error E0133)"
+ .to_string()
+ } else {
+ "`#[derive]` can't be used on a `#[repr(packed)]` struct that \
+ does not derive Copy (error E0133)"
+ .to_string()
+ };
+ lint.build(&message).emit()
+ });
+}
+
+fn builtin_derive_def_id(tcx: TyCtxt<'_>, def_id: DefId) -> Option<DefId> {
+ debug!("builtin_derive_def_id({:?})", def_id);
+ if let Some(impl_def_id) = tcx.impl_of_method(def_id) {
+ if tcx.has_attr(impl_def_id, sym::automatically_derived) {
+ debug!("builtin_derive_def_id({:?}) - is {:?}", def_id, impl_def_id);
+ Some(impl_def_id)
+ } else {
+ debug!("builtin_derive_def_id({:?}) - not automatically derived", def_id);
+ None
+ }
+ } else {
+ debug!("builtin_derive_def_id({:?}) - not a method", def_id);
+ None
+ }
+}
+
impl<'a, 'tcx> Visitor<'tcx> for PackedRefChecker<'a, 'tcx> {
fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
// Make sure we know where in the MIR we are.
fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, _location: Location) {
if context.is_borrow() {
if util::is_disaligned(self.tcx, self.body, self.param_env, *place) {
- let source_info = self.source_info;
- let lint_root = self.body.source_scopes[source_info.scope]
- .local_data
- .as_ref()
- .assert_crate_local()
- .lint_root;
- self.tcx.struct_span_lint_hir(
- UNALIGNED_REFERENCES,
- lint_root,
- source_info.span,
- |lint| {
- lint.build("reference to packed field is unaligned")
- .note(
- "fields of packed structs are not properly aligned, and creating \
- a misaligned reference is undefined behavior (even if that \
- reference is never dereferenced)",
- )
- .emit()
- },
- );
+ let def_id = self.body.source.instance.def_id();
+ if let Some(impl_def_id) = builtin_derive_def_id(self.tcx, def_id) {
+ // If a method is defined in the local crate,
+ // the impl containing that method should also be.
+ self.tcx.ensure().unsafe_derive_on_repr_packed(impl_def_id.expect_local());
+ } else {
+ let source_info = self.source_info;
+ let lint_root = self.body.source_scopes[source_info.scope]
+ .local_data
+ .as_ref()
+ .assert_crate_local()
+ .lint_root;
+ self.tcx.struct_span_lint_hir(
+ UNALIGNED_REFERENCES,
+ lint_root,
+ source_info.span,
+ |lint| {
+ lint.build("reference to packed field is unaligned")
+ .note(
+ "fields of packed structs are not properly aligned, and creating \
+ a misaligned reference is undefined behavior (even if that \
+ reference is never dereferenced)",
+ )
+ .emit()
+ },
+ );
+ }
}
}
}
use rustc_middle::ty::cast::CastTy;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::{self, TyCtxt};
-use rustc_session::lint::builtin::{SAFE_PACKED_BORROWS, UNSAFE_OP_IN_UNSAFE_FN, UNUSED_UNSAFE};
+use rustc_session::lint::builtin::{UNSAFE_OP_IN_UNSAFE_FN, UNUSED_UNSAFE};
use rustc_session::lint::Level;
-use rustc_span::symbol::sym;
use std::ops::Bound;
use crate::const_eval::is_min_const_fn;
-use crate::util;
pub struct UnsafetyChecker<'a, 'tcx> {
body: &'a Body<'tcx>,
self.check_mut_borrowing_layout_constrained_field(*place, context.is_mutating_use());
}
- // Check for borrows to packed fields.
- // `is_disaligned` already traverses the place to consider all projections after the last
- // `Deref`, so this only needs to be called once at the top level.
- if context.is_borrow() {
- if util::is_disaligned(self.tcx, self.body, self.param_env, *place) {
- self.require_unsafe(
- UnsafetyViolationKind::BorrowPacked,
- UnsafetyViolationDetails::BorrowOfPackedField,
- );
- }
- }
-
// Some checks below need the extra metainfo of the local declaration.
let decl = &self.body.local_decls[place.local];
// `unsafe` blocks are required in safe code
Safety::Safe => {
for violation in violations {
- let mut violation = *violation;
match violation.kind {
UnsafetyViolationKind::GeneralAndConstFn
| UnsafetyViolationKind::General => {}
- UnsafetyViolationKind::BorrowPacked => {
- if self.min_const_fn {
- // const fns don't need to be backwards compatible and can
- // emit these violations as a hard error instead of a backwards
- // compat lint
- violation.kind = UnsafetyViolationKind::General;
- }
- }
- UnsafetyViolationKind::UnsafeFn
- | UnsafetyViolationKind::UnsafeFnBorrowPacked => {
+ UnsafetyViolationKind::UnsafeFn => {
bug!("`UnsafetyViolationKind::UnsafeFn` in an `Safe` context")
}
}
- if !self.violations.contains(&violation) {
- self.violations.push(violation)
+ if !self.violations.contains(violation) {
+ self.violations.push(*violation)
}
}
false
for violation in violations {
let mut violation = *violation;
- if violation.kind == UnsafetyViolationKind::BorrowPacked {
- violation.kind = UnsafetyViolationKind::UnsafeFnBorrowPacked;
- } else {
- violation.kind = UnsafetyViolationKind::UnsafeFn;
- }
+ violation.kind = UnsafetyViolationKind::UnsafeFn;
if !self.violations.contains(&violation) {
self.violations.push(violation)
}
// these unsafe things are stable in const fn
UnsafetyViolationKind::GeneralAndConstFn => {}
// these things are forbidden in const fns
- UnsafetyViolationKind::General
- | UnsafetyViolationKind::BorrowPacked => {
+ UnsafetyViolationKind::General => {
let mut violation = *violation;
// const fns don't need to be backwards compatible and can
// emit these violations as a hard error instead of a backwards
self.violations.push(violation)
}
}
- UnsafetyViolationKind::UnsafeFn
- | UnsafetyViolationKind::UnsafeFnBorrowPacked => bug!(
+ UnsafetyViolationKind::UnsafeFn => bug!(
"`UnsafetyViolationKind::UnsafeFn` in an `ExplicitUnsafe` context"
),
}
ty::WithOptConstParam { did, const_param_did: Some(param_did) },
)
},
- unsafe_derive_on_repr_packed,
..*providers
};
}
})
}
-fn unsafe_derive_on_repr_packed(tcx: TyCtxt<'_>, def_id: LocalDefId) {
- let lint_hir_id = tcx.hir().local_def_id_to_hir_id(def_id);
-
- tcx.struct_span_lint_hir(SAFE_PACKED_BORROWS, lint_hir_id, tcx.def_span(def_id), |lint| {
- // FIXME: when we make this a hard error, this should have its
- // own error code.
- let message = if tcx.generics_of(def_id).own_requires_monomorphization() {
- "`#[derive]` can't be used on a `#[repr(packed)]` struct with \
- type or const parameters (error E0133)"
- .to_string()
- } else {
- "`#[derive]` can't be used on a `#[repr(packed)]` struct that \
- does not derive Copy (error E0133)"
- .to_string()
- };
- lint.build(&message).emit()
- });
-}
-
/// Returns the `HirId` for an enclosing scope that is also `unsafe`.
fn is_enclosed(
tcx: TyCtxt<'_>,
});
}
-fn builtin_derive_def_id(tcx: TyCtxt<'_>, def_id: DefId) -> Option<DefId> {
- debug!("builtin_derive_def_id({:?})", def_id);
- if let Some(impl_def_id) = tcx.impl_of_method(def_id) {
- if tcx.has_attr(impl_def_id, sym::automatically_derived) {
- debug!("builtin_derive_def_id({:?}) - is {:?}", def_id, impl_def_id);
- Some(impl_def_id)
- } else {
- debug!("builtin_derive_def_id({:?}) - not automatically derived", def_id);
- None
- }
- } else {
- debug!("builtin_derive_def_id({:?}) - not a method", def_id);
- None
- }
-}
-
pub fn check_unsafety(tcx: TyCtxt<'_>, def_id: LocalDefId) {
debug!("check_unsafety({:?})", def_id);
.note(note)
.emit();
}
- UnsafetyViolationKind::BorrowPacked => {
- if let Some(impl_def_id) = builtin_derive_def_id(tcx, def_id.to_def_id()) {
- // If a method is defined in the local crate,
- // the impl containing that method should also be.
- tcx.ensure().unsafe_derive_on_repr_packed(impl_def_id.expect_local());
- } else {
- tcx.struct_span_lint_hir(
- SAFE_PACKED_BORROWS,
- lint_root,
- source_info.span,
- |lint| {
- lint.build(&format!(
- "{} is unsafe and requires unsafe{} block (error E0133)",
- description, unsafe_fn_msg,
- ))
- .note(note)
- .emit()
- },
- )
- }
- }
UnsafetyViolationKind::UnsafeFn => tcx.struct_span_lint_hir(
UNSAFE_OP_IN_UNSAFE_FN,
lint_root,
.emit();
},
),
- UnsafetyViolationKind::UnsafeFnBorrowPacked => {
- // When `unsafe_op_in_unsafe_fn` is disallowed, the behavior of safe and unsafe functions
- // should be the same in terms of warnings and errors. Therefore, with `#[warn(safe_packed_borrows)]`,
- // a safe packed borrow should emit a warning *but not an error* in an unsafe function,
- // just like in a safe function, even if `unsafe_op_in_unsafe_fn` is `deny`.
- //
- // Also, `#[warn(unsafe_op_in_unsafe_fn)]` can't cause any new errors. Therefore, with
- // `#[deny(safe_packed_borrows)]` and `#[warn(unsafe_op_in_unsafe_fn)]`, a packed borrow
- // should only issue a warning for the sake of backwards compatibility.
- //
- // The solution those 2 expectations is to always take the minimum of both lints.
- // This prevent any new errors (unless both lints are explicitly set to `deny`).
- let lint = if tcx.lint_level_at_node(SAFE_PACKED_BORROWS, lint_root).0
- <= tcx.lint_level_at_node(UNSAFE_OP_IN_UNSAFE_FN, lint_root).0
- {
- SAFE_PACKED_BORROWS
- } else {
- UNSAFE_OP_IN_UNSAFE_FN
- };
- tcx.struct_span_lint_hir(&lint, lint_root, source_info.span, |lint| {
- lint.build(&format!(
- "{} is unsafe and requires unsafe block (error E0133)",
- description,
- ))
- .span_label(source_info.span, description)
- .note(note)
- .emit();
- })
- }
}
}
use rustc_middle::mir::{self, BasicBlock, TerminatorKind};
use rustc_middle::ty::TyCtxt;
+use std::iter;
use std::lazy::SyncOnceCell;
pub const NESTED_INDENT: &str = " ";
let edge_counters = from_terminator
.successors()
.map(|&successor_bb| graphviz_data.get_edge_counter(from_bcb, successor_bb));
- edge_labels
- .iter()
- .zip(edge_counters)
+ iter::zip(&edge_labels, edge_counters)
.map(|(label, some_counter)| {
if let Some(counter) = some_counter {
format!("{}\n{}", label, debug_counters.format_counter(counter))
//! This pass finds basic blocks that are completely equal,
//! and replaces all uses with just one of them.
-use std::{collections::hash_map::Entry, hash::Hash, hash::Hasher};
+use std::{collections::hash_map::Entry, hash::Hash, hash::Hasher, iter};
use crate::transform::MirPass;
fn eq(&self, other: &Self) -> bool {
self.basic_block_data.statements.len() == other.basic_block_data.statements.len()
&& &self.basic_block_data.terminator().kind == &other.basic_block_data.terminator().kind
- && self
- .basic_block_data
- .statements
- .iter()
- .zip(&other.basic_block_data.statements)
+ && iter::zip(&self.basic_block_data.statements, &other.basic_block_data.statements)
.all(|(x, y)| statement_eq(&x.kind, &y.kind))
}
}
use crate::transform::MirPass;
use rustc_middle::mir::*;
use rustc_middle::ty::TyCtxt;
+use std::iter;
use super::simplify::simplify_cfg;
if first_stmts.len() != scnd_stmts.len() {
continue;
}
- for (f, s) in first_stmts.iter().zip(scnd_stmts.iter()) {
+ for (f, s) in iter::zip(first_stmts, scnd_stmts) {
match (&f.kind, &s.kind) {
// If two statements are exactly the same, we can optimize.
(f_s, s_s) if f_s == s_s => {}
// and bb_idx has a different terminator from both of them.
let (from, first, second) = bbs.pick3_mut(bb_idx, first, second);
- let new_stmts = first.statements.iter().zip(second.statements.iter()).map(|(f, s)| {
+ let new_stmts = iter::zip(&first.statements, &second.statements).map(|(f, s)| {
match (&f.kind, &s.kind) {
(f_s, s_s) if f_s == s_s => (*f).clone(),
pub(crate) fn provide(providers: &mut Providers) {
self::check_unsafety::provide(providers);
+ self::check_packed_ref::provide(providers);
*providers = Providers {
mir_keys,
mir_const,
use rustc_middle::mir::*;
use rustc_middle::ty::{self, TyCtxt};
+use rustc_target::abi::Align;
/// Returns `true` if this place is allowed to be less aligned
/// than its containing struct (because it is within a packed
L: HasLocalDecls<'tcx>,
{
debug!("is_disaligned({:?})", place);
- if !is_within_packed(tcx, local_decls, place) {
- debug!("is_disaligned({:?}) - not within packed", place);
- return false;
- }
+ let pack = match is_within_packed(tcx, local_decls, place) {
+ None => {
+ debug!("is_disaligned({:?}) - not within packed", place);
+ return false;
+ }
+ Some(pack) => pack,
+ };
let ty = place.ty(local_decls, tcx).ty;
match tcx.layout_raw(param_env.and(ty)) {
- Ok(layout) if layout.align.abi.bytes() == 1 => {
- // if the alignment is 1, the type can't be further
- // disaligned.
- debug!("is_disaligned({:?}) - align = 1", place);
+ Ok(layout) if layout.align.abi <= pack => {
+ // If the packed alignment is greater or equal to the field alignment, the type won't be
+ // further disaligned.
+ debug!(
+ "is_disaligned({:?}) - align = {}, packed = {}; not disaligned",
+ place,
+ layout.align.abi.bytes(),
+ pack.bytes()
+ );
false
}
_ => {
}
}
-fn is_within_packed<'tcx, L>(tcx: TyCtxt<'tcx>, local_decls: &L, place: Place<'tcx>) -> bool
+fn is_within_packed<'tcx, L>(
+ tcx: TyCtxt<'tcx>,
+ local_decls: &L,
+ place: Place<'tcx>,
+) -> Option<Align>
where
L: HasLocalDecls<'tcx>,
{
ProjectionElem::Field(..) => {
let ty = place_base.ty(local_decls, tcx).ty;
match ty.kind() {
- ty::Adt(def, _) if def.repr.packed() => return true,
+ ty::Adt(def, _) => return def.repr.pack,
_ => {}
}
}
}
}
- false
+ None
}
}
}
- pub fn new_subgraph(
- graph: &'a G,
- graphviz_name: &str,
- node_content_fn: NodeContentFn,
- edge_labels_fn: EdgeLabelsFn,
- ) -> Self {
- Self {
- graph,
- is_subgraph: true,
- graphviz_name: graphviz_name.to_owned(),
- graph_label: None,
- node_content_fn,
- edge_labels_fn,
- }
- }
-
pub fn set_graph_label(&mut self, graph_label: &str) {
self.graph_label = Some(graph_label.to_owned());
}
new_locals: Vec<LocalDecl<'tcx>>,
resume_block: BasicBlock,
next_local: usize,
- make_nop: Vec<Location>,
}
impl<'tcx> MirPatch<'tcx> {
new_locals: vec![],
next_local: body.local_decls.len(),
resume_block: START_BLOCK,
- make_nop: vec![],
};
// make sure the MIR we create has a resume block. It is
self.add_statement(loc, StatementKind::Assign(box (place, rv)));
}
- pub fn make_nop(&mut self, loc: Location) {
- self.make_nop.push(loc);
- }
-
pub fn apply(self, body: &mut Body<'tcx>) {
- debug!("MirPatch: make nops at: {:?}", self.make_nop);
- for loc in self.make_nop {
- body.make_statement_nop(loc);
- }
debug!(
"MirPatch: {:?} new temps, starting from index {}: {:?}",
self.new_locals.len(),
use rustc_index::vec::Idx;
+use std::iter;
+
/// The "outermost" place that holds this value.
#[derive(Copy, Clone, Debug, PartialEq)]
crate enum PlaceBase {
return false;
}
- proj_possible_ancestor.iter().zip(proj_capture).all(|(a, b)| a == b)
+ iter::zip(proj_possible_ancestor, proj_capture).all(|(a, b)| a == b)
}
/// Computes the index of a capture within the desugared closure provided the closure's
use rustc_index::vec::Idx;
use rustc_middle::mir::*;
use rustc_middle::ty::{self, CanonicalUserTypeAnnotation};
+use std::iter;
impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Compile `expr`, storing the result into `destination`, which
// MIR does not natively support FRU, so for each
// base-supplied field, generate an operand that
// reads it from the base.
- field_names
- .into_iter()
- .zip(field_types.into_iter())
+ iter::zip(field_names, *field_types)
.map(|(n, ty)| match fields_map.get(&n) {
Some(v) => v.clone(),
None => {
#![feature(control_flow_enum)]
#![feature(crate_visibility_modifier)]
#![feature(bool_to_option)]
+#![feature(iter_zip)]
#![feature(once_cell)]
#![cfg_attr(bootstrap, feature(or_patterns))]
#![recursion_limit = "256"]
}
impl CreateTokenStream for LazyTokenStreamImpl {
fn create_token_stream(&self) -> TokenStream {
- // The token produced by the final call to `next` or `next_desugared`
- // was not actually consumed by the callback. The combination
- // of chaining the initial token and using `take` produces the desired
- // result - we produce an empty `TokenStream` if no calls were made,
- // and omit the final token otherwise.
+ if self.num_calls == 0 {
+ return TokenStream::new(vec![]);
+ }
+
let mut cursor_snapshot = self.cursor_snapshot.clone();
- let tokens = std::iter::once(self.start_token.clone())
- .chain((0..self.num_calls).map(|_| {
- if self.desugar_doc_comments {
+ // Don't skip `None` delimiters, since we want to pass them to
+ // proc macros. Normally, we'll end up capturing `TokenKind::Interpolated`,
+ // which gets converted to a `None`-delimited group when we invoke
+ // a proc-macro. However, it's possible to already have a `None`-delimited
+ // group in the stream (such as when parsing the output of a proc-macro,
+ // or in certain unusual cases with cross-crate `macro_rules!` macros).
+ cursor_snapshot.skip_none_delims = false;
+
+ // The token produced by the final call to `next` or `next_desugared`
+ // was not actually consumed by the callback.
+ let num_calls = self.num_calls - 1;
+ let mut i = 0;
+ let tokens =
+ std::iter::once(self.start_token.clone()).chain(std::iter::from_fn(|| {
+ if i >= num_calls {
+ return None;
+ }
+
+ let token = if self.desugar_doc_comments {
cursor_snapshot.next_desugared()
} else {
cursor_snapshot.next()
+ };
+
+ // When the `LazyTokenStreamImpl` was original produced, we did *not*
+ // include `NoDelim` tokens in `num_calls`, since they are normally ignored
+ // by the parser. Therefore, we only increment our counter for other types of tokens.
+ if !matches!(
+ token.0.kind,
+ token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)
+ ) {
+ i += 1;
}
- }))
- .take(self.num_calls);
+ Some(token)
+ }));
make_token_stream(tokens, self.append_unglued_token.clone())
}
// appended to the captured stream when
// we evaluate a `LazyTokenStream`
append_unglued_token: Option<TreeAndSpacing>,
+ // If `true`, skip the delimiters for `None`-delimited groups,
+ // and just yield the inner tokens. This is `true` during
+ // normal parsing, since the parser code is not currently prepared
+ // to handle `None` delimiters. When capturing a `TokenStream`,
+ // however, we want to handle `None`-delimiters, since
+ // proc-macros always see `None`-delimited groups.
+ skip_none_delims: bool,
}
#[derive(Clone)]
}
impl TokenCursorFrame {
- fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self {
+ fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream, skip_none_delims: bool) -> Self {
TokenCursorFrame {
delim,
span,
- open_delim: delim == token::NoDelim,
+ open_delim: delim == token::NoDelim && skip_none_delims,
tree_cursor: tts.into_trees(),
- close_delim: delim == token::NoDelim,
+ close_delim: delim == token::NoDelim && skip_none_delims,
}
}
}
return (token, spacing);
}
TokenTree::Delimited(sp, delim, tts) => {
- let frame = TokenCursorFrame::new(sp, delim, tts);
+ let frame = TokenCursorFrame::new(sp, delim, tts, self.skip_none_delims);
self.stack.push(mem::replace(&mut self.frame, frame));
}
}
.cloned()
.collect::<TokenStream>()
},
+ self.skip_none_delims,
),
));
prev_token: Token::dummy(),
restrictions: Restrictions::empty(),
expected_tokens: Vec::new(),
+ // Skip over the delimiters for `None`-delimited groups
token_cursor: TokenCursor {
- frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens),
+ frame: TokenCursorFrame::new(
+ DelimSpan::dummy(),
+ token::NoDelim,
+ tokens,
+ /* skip_none_delims */ true,
+ ),
stack: Vec::new(),
num_next_calls: 0,
desugar_doc_comments,
append_unglued_token: None,
+ skip_none_delims: true,
},
desugar_doc_comments,
unmatched_angle_bracket_count: 0,
Some(String(self.string(pos + 1)))
} else {
let arg = self.argument();
- if let Some(end) = self.must_consume('}') {
- let start = self.to_span_index(pos);
- let end = self.to_span_index(end + 1);
+ if let Some(rbrace_byte_idx) = self.must_consume('}') {
+ let lbrace_inner_offset = self.to_span_index(pos);
+ let rbrace_inner_offset = self.to_span_index(rbrace_byte_idx);
if self.is_literal {
- self.arg_places.push(start.to(end));
+ self.arg_places.push(
+ lbrace_inner_offset.to(InnerOffset(rbrace_inner_offset.0 + 1)),
+ );
}
}
Some(NextArgument(arg))
#![feature(const_panic)]
#![feature(crate_visibility_modifier)]
#![feature(in_band_lifetimes)]
+#![feature(iter_zip)]
#![feature(nll)]
#![cfg_attr(bootstrap, feature(or_patterns))]
#![recursion_limit = "256"]
use std::collections::VecDeque;
use std::io;
use std::io::prelude::*;
+use std::iter;
use std::rc::Rc;
mod rwu_table;
let ia = &asm.inner;
let outputs = asm.outputs_exprs;
let inputs = asm.inputs_exprs;
- let succ = ia.outputs.iter().zip(outputs).rev().fold(succ, |succ, (o, output)| {
+ let succ = iter::zip(&ia.outputs, outputs).rev().fold(succ, |succ, (o, output)| {
// see comment on places
// in propagate_through_place_components()
if o.is_indirect {
}
// Output operands must be places
- for (o, output) in asm.inner.outputs.iter().zip(asm.outputs_exprs) {
+ for (o, output) in iter::zip(&asm.inner.outputs, asm.outputs_exprs) {
if !o.is_indirect {
this.check_place(output);
}
use crate::liveness::{LiveNode, Variable};
+use std::iter;
#[derive(Clone, Copy)]
pub(super) struct RWU {
let mut changed = false;
let (dst_row, src_row) = self.pick2_rows_mut(dst, src);
- for (dst_word, src_word) in dst_row.iter_mut().zip(src_row.iter()) {
+ for (dst_word, src_word) in iter::zip(dst_row, &*src_row) {
let old = *dst_word;
let new = *dst_word | src_word;
*dst_word = new;
use rustc_span::{Span, DUMMY_SP};
use std::cmp::Ordering;
+use std::iter;
use std::mem::replace;
use std::num::NonZeroU32;
{
// Explicit version of iter::order::lt to handle parse errors properly
for (dep_v, stab_v) in
- dep_since.as_str().split('.').zip(stab_since.as_str().split('.'))
+ iter::zip(dep_since.as_str().split('.'), stab_since.as_str().split('.'))
{
match stab_v.parse::<u64>() {
Err(_) => {
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_errors::{DiagnosticBuilder, Handler};
-use rustc_hir::def_id::CrateNum;
-use rustc_index::vec::IndexVec;
+use rustc_hir::def_id::LOCAL_CRATE;
use rustc_middle::dep_graph;
use rustc_middle::ich::StableHashingContext;
use rustc_middle::ty::query::{query_keys, query_storage, query_stored, query_values};
#[inline]
fn compute(tcx: QueryCtxt<'tcx>, key: Self::Key) -> Self::Value {
- let provider = tcx.queries.providers.get(key.query_crate())
- // HACK(eddyb) it's possible crates may be loaded after
- // the query engine is created, and because crate loading
- // is not yet integrated with the query engine, such crates
- // would be missing appropriate entries in `providers`.
- .unwrap_or(&tcx.queries.fallback_extern_providers)
- .$name;
+ let is_local = key.query_crate() == LOCAL_CRATE;
+ let provider = if is_local {
+ tcx.queries.local_providers.$name
+ } else {
+ tcx.queries.extern_providers.$name
+ };
provider(*tcx, key)
}
(tcx: $tcx:tt,
input: ($(([$($modifiers:tt)*] [$($attr:tt)*] [$name:ident]))*)) => {
pub struct Queries<$tcx> {
- providers: IndexVec<CrateNum, Providers>,
- fallback_extern_providers: Box<Providers>,
+ local_providers: Box<Providers>,
+ extern_providers: Box<Providers>,
$($(#[$attr])* $name: QueryState<
crate::dep_graph::DepKind,
impl<$tcx> Queries<$tcx> {
pub fn new(
- providers: IndexVec<CrateNum, Providers>,
- fallback_extern_providers: Providers,
+ local_providers: Providers,
+ extern_providers: Providers,
) -> Self {
Queries {
- providers,
- fallback_extern_providers: Box::new(fallback_extern_providers),
+ local_providers: Box::new(local_providers),
+ extern_providers: Box::new(extern_providers),
$($name: Default::default()),*
}
}
use crate::dep_graph::{DepContext, DepNodeIndex};
use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::sync::HashMapExt;
use rustc_data_structures::sync::Lock;
use std::hash::Hash;
pub fn insert(&self, key: Key, dep_node: DepNodeIndex, value: Value) {
self.hashmap.borrow_mut().insert(key, WithDepNode::new(dep_node, value));
}
-
- pub fn insert_same(&self, key: Key, dep_node: DepNodeIndex, value: Value)
- where
- Value: Eq,
- {
- self.hashmap.borrow_mut().insert_same(key, WithDepNode::new(dep_node, value));
- }
}
#[derive(Clone, Eq, PartialEq)]
}
}
+ #[cfg(debug_assertions)]
pub fn test<K: DepKind>(&self, source: &DepNode<K>, target: &DepNode<K>) -> bool {
self.source.test(source) && self.target.test(target)
}
self.data.is_some() && self.dep_node_index_of_opt(dep_node).is_some()
}
- #[inline]
- pub fn dep_node_of(&self, dep_node_index: DepNodeIndex) -> DepNode<K> {
+ #[cfg(debug_assertions)]
+ fn dep_node_of(&self, dep_node_index: DepNodeIndex) -> DepNode<K> {
let data = self.data.as_ref().unwrap();
let previous = &data.previous;
let data = data.current.data.lock();
self.data.nodes[dep_node_index]
}
- #[inline]
- pub fn node_to_index(&self, dep_node: &DepNode<K>) -> SerializedDepNodeIndex {
- self.index[dep_node]
- }
-
#[inline]
pub fn node_to_index_opt(&self, dep_node: &DepNode<K>) -> Option<SerializedDepNodeIndex> {
self.index.get(dep_node).cloned()
DepGraphQuery { graph, indices }
}
- pub fn contains_node(&self, node: &DepNode<K>) -> bool {
- self.indices.contains_key(&node)
- }
-
pub fn nodes(&self) -> Vec<&DepNode<K>> {
self.graph.all_nodes().iter().map(|n| &n.data).collect()
}
#![feature(const_panic)]
#![feature(core_intrinsics)]
#![feature(hash_raw_entry)]
+#![feature(iter_zip)]
#![feature(min_specialization)]
#![feature(stmt_expr_attributes)]
rustc_data_structures::{jobserver, OnDrop},
rustc_rayon_core as rayon_core,
rustc_span::DUMMY_SP,
- std::iter::FromIterator,
+ std::iter::{self, FromIterator},
std::{mem, process},
};
spans.rotate_right(1);
// Zip them back together
- let mut stack: Vec<_> = spans.into_iter().zip(queries).collect();
+ let mut stack: Vec<_> = iter::zip(spans, queries).collect();
// Remove the queries in our cycle from the list of jobs to look at
for r in &stack {
/// Lookup typo candidate in scope for a macro or import.
fn early_lookup_typo_candidate(
&mut self,
- scope_set: ScopeSet,
+ scope_set: ScopeSet<'a>,
parent_scope: &ParentScope<'a>,
ident: Ident,
filter_fn: &impl Fn(Res) -> bool,
let root_module = this.resolve_crate_root(root_ident);
this.add_module_candidates(root_module, &mut suggestions, filter_fn);
}
- Scope::Module(module) => {
+ Scope::Module(module, _) => {
this.add_module_candidates(module, &mut suggestions, filter_fn);
}
Scope::RegisteredAttrs => {
ExprKind::Call(ref callee, ref arguments) => {
self.resolve_expr(callee, Some(expr));
- let const_args = self.r.legacy_const_generic_args(callee).unwrap_or(Vec::new());
+ let const_args = self.r.legacy_const_generic_args(callee).unwrap_or_default();
for (idx, argument) in arguments.iter().enumerate() {
// Constant arguments need to be treated as AnonConst since
// that is how they will be later lowered to HIR.
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{BytePos, MultiSpan, Span, DUMMY_SP};
+use std::iter;
+
use tracing::debug;
type Res = def::Res<ast::NodeId>;
PathResult::Module(ModuleOrUniformRoot::Module(module)) => module.res(),
_ => None,
}
- .map_or(String::new(), |res| format!("{} ", res.descr()));
+ .map_or_else(String::new, |res| format!("{} ", res.descr()));
(mod_prefix, format!("`{}`", Segment::names_to_string(mod_path)))
};
(
if let Some(spans) =
field_spans.filter(|spans| spans.len() > 0 && fields.len() == spans.len())
{
- let non_visible_spans: Vec<Span> = fields
- .iter()
- .zip(spans.iter())
+ let non_visible_spans: Vec<Span> = iter::zip(&fields, &spans)
.filter(|(vis, _)| {
!self.r.is_accessible_from(**vis, self.parent_scope.module)
})
if let Some(span) = self.def_span(def_id) {
err.span_label(span, &format!("`{}` defined here", path_str));
}
- let fields =
- self.r.field_names.get(&def_id).map_or("/* fields */".to_string(), |fields| {
- vec!["_"; fields.len()].join(", ")
- });
+ let fields = self.r.field_names.get(&def_id).map_or_else(
+ || "/* fields */".to_string(),
+ |fields| vec!["_"; fields.len()].join(", "),
+ );
err.span_suggestion(
span,
"use the tuple variant pattern syntax instead",
#![feature(control_flow_enum)]
#![feature(crate_visibility_modifier)]
#![feature(format_args_capture)]
+#![feature(iter_zip)]
#![feature(nll)]
#![cfg_attr(bootstrap, feature(or_patterns))]
#![recursion_limit = "256"]
use rustc_arena::{DroplessArena, TypedArena};
use rustc_ast::node_id::NodeMap;
use rustc_ast::ptr::P;
-use rustc_ast::unwrap_or;
use rustc_ast::visit::{self, Visitor};
use rustc_ast::{self as ast, NodeId};
use rustc_ast::{Crate, CRATE_NODE_ID};
use rustc_hir::def::{self, CtorOf, DefKind, NonMacroAttrKind, PartialRes};
use rustc_hir::def_id::{CrateNum, DefId, DefIdMap, LocalDefId, CRATE_DEF_INDEX};
use rustc_hir::definitions::{DefKey, DefPathData, Definitions};
-use rustc_hir::{PrimTy, TraitCandidate};
+use rustc_hir::TraitCandidate;
use rustc_index::vec::IndexVec;
use rustc_metadata::creader::{CStore, CrateLoader};
use rustc_middle::hir::exports::ExportMap;
DeriveHelpersCompat,
MacroRules(MacroRulesScopeRef<'a>),
CrateRoot,
- Module(Module<'a>),
+ // The node ID is for reporting the `PROC_MACRO_DERIVE_RESOLUTION_FALLBACK`
+ // lint if it should be reported.
+ Module(Module<'a>, Option<NodeId>),
RegisteredAttrs,
MacroUsePrelude,
BuiltinAttrs,
/// with different restrictions when looking up the resolution.
/// This enum is currently used only for early resolution (imports and macros),
/// but not for late resolution yet.
-enum ScopeSet {
+#[derive(Clone, Copy)]
+enum ScopeSet<'a> {
/// All scopes with the given namespace.
All(Namespace, /*is_import*/ bool),
/// Crate root, then extern prelude (used for mixed 2015-2018 mode in macros).
AbsolutePath(Namespace),
/// All scopes with macro namespace and the given macro kind restriction.
Macro(MacroKind),
+ /// All scopes with the given namespace, used for partially performing late resolution.
+ /// The node id enables lints and is used for reporting them.
+ Late(Namespace, Module<'a>, Option<NodeId>),
}
/// Everything you need to know about a name's location to resolve it.
self.visit_scopes(ScopeSet::All(TypeNS, false), parent_scope, ctxt, |this, scope, _, _| {
match scope {
- Scope::Module(module) => {
+ Scope::Module(module, _) => {
this.traits_in_module(module, assoc_item, &mut found_traits);
}
Scope::StdLibPrelude => {
/// If the callback returns `Some` result, we stop visiting scopes and return it.
fn visit_scopes<T>(
&mut self,
- scope_set: ScopeSet,
+ scope_set: ScopeSet<'a>,
parent_scope: &ParentScope<'a>,
ctxt: SyntaxContext,
mut visitor: impl FnMut(
ScopeSet::All(ns, _) => (ns, None, false),
ScopeSet::AbsolutePath(ns) => (ns, None, true),
ScopeSet::Macro(macro_kind) => (MacroNS, Some(macro_kind), false),
+ ScopeSet::Late(ns, ..) => (ns, None, false),
+ };
+ let module = match scope_set {
+ // Start with the specified module.
+ ScopeSet::Late(_, module, _) => module,
+ // Jump out of trait or enum modules, they do not act as scopes.
+ _ => parent_scope.module.nearest_item_scope(),
};
- // Jump out of trait or enum modules, they do not act as scopes.
- let module = parent_scope.module.nearest_item_scope();
let mut scope = match ns {
_ if is_absolute_path => Scope::CrateRoot,
- TypeNS | ValueNS => Scope::Module(module),
+ TypeNS | ValueNS => Scope::Module(module, None),
MacroNS => Scope::DeriveHelpers(parent_scope.expansion),
};
let mut ctxt = ctxt.normalize_to_macros_2_0();
MacroRulesScope::Invocation(invoc_id) => {
Scope::MacroRules(self.invocation_parent_scopes[&invoc_id].macro_rules)
}
- MacroRulesScope::Empty => Scope::Module(module),
+ MacroRulesScope::Empty => Scope::Module(module, None),
},
Scope::CrateRoot => match ns {
TypeNS => {
}
ValueNS | MacroNS => break,
},
- Scope::Module(module) => {
+ Scope::Module(module, prev_lint_id) => {
use_prelude = !module.no_implicit_prelude;
- match self.hygienic_lexical_parent(module, &mut ctxt) {
- Some(parent_module) => Scope::Module(parent_module),
+ let derive_fallback_lint_id = match scope_set {
+ ScopeSet::Late(.., lint_id) => lint_id,
+ _ => None,
+ };
+ match self.hygienic_lexical_parent(module, &mut ctxt, derive_fallback_lint_id) {
+ Some((parent_module, lint_id)) => {
+ Scope::Module(parent_module, lint_id.or(prev_lint_id))
+ }
None => {
ctxt.adjust(ExpnId::root());
match ns {
ribs: &[Rib<'a>],
) -> Option<LexicalScopeBinding<'a>> {
assert!(ns == TypeNS || ns == ValueNS);
+ let orig_ident = ident;
if ident.name == kw::Empty {
return Some(LexicalScopeBinding::Res(Res::Err));
}
_ => continue,
};
- let item = self.resolve_ident_in_module_unadjusted(
- ModuleOrUniformRoot::Module(module),
- ident,
- ns,
- parent_scope,
- record_used,
- path_span,
- );
- if let Ok(binding) = item {
- // The ident resolves to an item.
- return Some(LexicalScopeBinding::Item(binding));
- }
-
match module.kind {
ModuleKind::Block(..) => {} // We can see through blocks
_ => break,
}
- }
- ident = normalized_ident;
- let mut poisoned = None;
- loop {
- let mut span_data = ident.span.data();
- let opt_module = if let Some(node_id) = record_used_id {
- self.hygienic_lexical_parent_with_compatibility_fallback(
- module,
- &mut span_data.ctxt,
- node_id,
- &mut poisoned,
- )
- } else {
- self.hygienic_lexical_parent(module, &mut span_data.ctxt)
- };
- ident.span = span_data.span();
- module = unwrap_or!(opt_module, break);
- let adjusted_parent_scope = &ParentScope { module, ..*parent_scope };
- let result = self.resolve_ident_in_module_unadjusted(
+ let item = self.resolve_ident_in_module_unadjusted(
ModuleOrUniformRoot::Module(module),
ident,
ns,
- adjusted_parent_scope,
+ parent_scope,
record_used,
path_span,
);
-
- match result {
- Ok(binding) => {
- if let Some(node_id) = poisoned {
- self.lint_buffer.buffer_lint_with_diagnostic(
- lint::builtin::PROC_MACRO_DERIVE_RESOLUTION_FALLBACK,
- node_id,
- ident.span,
- &format!("cannot find {} `{}` in this scope", ns.descr(), ident),
- BuiltinLintDiagnostics::ProcMacroDeriveResolutionFallback(ident.span),
- );
- }
- return Some(LexicalScopeBinding::Item(binding));
- }
- Err(Determined) => continue,
- Err(Undetermined) => {
- span_bug!(ident.span, "undetermined resolution during main resolution pass")
- }
- }
- }
-
- if !module.no_implicit_prelude {
- ident.span.adjust(ExpnId::root());
- if ns == TypeNS {
- if let Some(binding) = self.extern_prelude_get(ident, !record_used) {
- return Some(LexicalScopeBinding::Item(binding));
- }
- if let Some(ident) = self.registered_tools.get(&ident) {
- let binding =
- (Res::ToolMod, ty::Visibility::Public, ident.span, ExpnId::root())
- .to_name_binding(self.arenas);
- return Some(LexicalScopeBinding::Item(binding));
- }
- }
- if let Some(prelude) = self.prelude {
- if let Ok(binding) = self.resolve_ident_in_module_unadjusted(
- ModuleOrUniformRoot::Module(prelude),
- ident,
- ns,
- parent_scope,
- false,
- path_span,
- ) {
- return Some(LexicalScopeBinding::Item(binding));
- }
- }
- }
-
- if ns == TypeNS {
- if let Some(prim_ty) = PrimTy::from_name(ident.name) {
- let binding =
- (Res::PrimTy(prim_ty), ty::Visibility::Public, DUMMY_SP, ExpnId::root())
- .to_name_binding(self.arenas);
+ if let Ok(binding) = item {
+ // The ident resolves to an item.
return Some(LexicalScopeBinding::Item(binding));
}
}
- None
+ self.early_resolve_ident_in_lexical_scope(
+ orig_ident,
+ ScopeSet::Late(ns, module, record_used_id),
+ parent_scope,
+ record_used,
+ record_used,
+ path_span,
+ )
+ .ok()
+ .map(LexicalScopeBinding::Item)
}
fn hygienic_lexical_parent(
&mut self,
module: Module<'a>,
ctxt: &mut SyntaxContext,
- ) -> Option<Module<'a>> {
+ derive_fallback_lint_id: Option<NodeId>,
+ ) -> Option<(Module<'a>, Option<NodeId>)> {
if !module.expansion.outer_expn_is_descendant_of(*ctxt) {
- return Some(self.macro_def_scope(ctxt.remove_mark()));
+ return Some((self.macro_def_scope(ctxt.remove_mark()), None));
}
if let ModuleKind::Block(..) = module.kind {
- return Some(module.parent.unwrap().nearest_item_scope());
- }
-
- None
- }
-
- fn hygienic_lexical_parent_with_compatibility_fallback(
- &mut self,
- module: Module<'a>,
- ctxt: &mut SyntaxContext,
- node_id: NodeId,
- poisoned: &mut Option<NodeId>,
- ) -> Option<Module<'a>> {
- if let module @ Some(..) = self.hygienic_lexical_parent(module, ctxt) {
- return module;
+ return Some((module.parent.unwrap().nearest_item_scope(), None));
}
// We need to support the next case under a deprecation warning
// ---- end
// ```
// So we have to fall back to the module's parent during lexical resolution in this case.
- if let Some(parent) = module.parent {
- // Inner module is inside the macro, parent module is outside of the macro.
- if module.expansion != parent.expansion
- && module.expansion.is_descendant_of(parent.expansion)
- {
- // The macro is a proc macro derive
- if let Some(def_id) = module.expansion.expn_data().macro_def_id {
- let ext = self.get_macro_by_def_id(def_id);
- if ext.builtin_name.is_none()
- && ext.macro_kind() == MacroKind::Derive
- && parent.expansion.outer_expn_is_descendant_of(*ctxt)
- {
- *poisoned = Some(node_id);
- return module.parent;
+ if derive_fallback_lint_id.is_some() {
+ if let Some(parent) = module.parent {
+ // Inner module is inside the macro, parent module is outside of the macro.
+ if module.expansion != parent.expansion
+ && module.expansion.is_descendant_of(parent.expansion)
+ {
+ // The macro is a proc macro derive
+ if let Some(def_id) = module.expansion.expn_data().macro_def_id {
+ let ext = self.get_macro_by_def_id(def_id);
+ if ext.builtin_name.is_none()
+ && ext.macro_kind() == MacroKind::Derive
+ && parent.expansion.outer_expn_is_descendant_of(*ctxt)
+ {
+ return Some((parent, derive_fallback_lint_id));
+ }
}
}
}
use rustc_hir::PrimTy;
use rustc_middle::middle::stability;
use rustc_middle::ty;
-use rustc_session::lint::builtin::{LEGACY_DERIVE_HELPERS, SOFT_UNSTABLE, UNUSED_MACROS};
+use rustc_session::lint::builtin::{LEGACY_DERIVE_HELPERS, PROC_MACRO_DERIVE_RESOLUTION_FALLBACK};
+use rustc_session::lint::builtin::{SOFT_UNSTABLE, UNUSED_MACROS};
use rustc_session::lint::BuiltinLintDiagnostics;
use rustc_session::parse::feature_err;
use rustc_session::Session;
crate fn early_resolve_ident_in_lexical_scope(
&mut self,
orig_ident: Ident,
- scope_set: ScopeSet,
+ scope_set: ScopeSet<'a>,
parent_scope: &ParentScope<'a>,
record_used: bool,
force: bool,
ScopeSet::All(ns, is_import) => (ns, None, is_import),
ScopeSet::AbsolutePath(ns) => (ns, None, false),
ScopeSet::Macro(macro_kind) => (MacroNS, Some(macro_kind), false),
+ ScopeSet::Late(ns, ..) => (ns, None, false),
};
// This is *the* result, resolution from the scope closest to the resolved identifier.
Err((Determinacy::Determined, _)) => Err(Determinacy::Determined),
}
}
- Scope::Module(module) => {
+ Scope::Module(module, derive_fallback_lint_id) => {
let adjusted_parent_scope = &ParentScope { module, ..*parent_scope };
let binding = this.resolve_ident_in_module_unadjusted_ext(
ModuleOrUniformRoot::Module(module),
ident,
ns,
adjusted_parent_scope,
- true,
+ !matches!(scope_set, ScopeSet::Late(..)),
record_used,
path_span,
);
match binding {
Ok(binding) => {
+ if let Some(lint_id) = derive_fallback_lint_id {
+ this.lint_buffer.buffer_lint_with_diagnostic(
+ PROC_MACRO_DERIVE_RESOLUTION_FALLBACK,
+ lint_id,
+ orig_ident.span,
+ &format!(
+ "cannot find {} `{}` in this scope",
+ ns.descr(),
+ ident
+ ),
+ BuiltinLintDiagnostics::ProcMacroDeriveResolutionFallback(
+ orig_ident.span,
+ ),
+ );
+ }
let misc_flags = if ptr::eq(module, this.graph_root) {
Flags::MISC_SUGGEST_CRATE
} else if module.is_normal() {
Ok((binding, flags))
if sub_namespace_match(binding.macro_kind(), macro_kind) =>
{
- if !record_used {
+ if !record_used || matches!(scope_set, ScopeSet::Late(..)) {
return Some(Ok(binding));
}
})
}
- pub fn get_trait_ref_data(&self, trait_ref: &hir::TraitRef<'_>) -> Option<Ref> {
- self.lookup_def_id(trait_ref.hir_ref_id).and_then(|def_id| {
- let span = trait_ref.path.span;
- if generated_code(span) {
- return None;
- }
- let sub_span = trait_ref.path.segments.last().unwrap().ident.span;
- filter!(self.span_utils, sub_span);
- let span = self.span_from_span(sub_span);
- Some(Ref { kind: RefKind::Type, span, ref_id: id_from_def_id(def_id) })
- })
- }
-
pub fn get_expr_data(&self, expr: &hir::Expr<'_>) -> Option<Data> {
let ty = self.typeck_results().expr_ty_adjusted_opt(expr)?;
if matches!(ty.kind(), ty::Error(_)) {
/// For a given piece of AST defined by the supplied Span and NodeId,
/// returns `None` if the node is not macro-generated or the span is malformed,
/// else uses the expansion callsite and callee to return some MacroRef.
- pub fn get_macro_use_data(&self, span: Span) -> Option<MacroRef> {
+ ///
+ /// FIXME: [`DumpVisitor::process_macro_use`] should actually dump this data
+ #[allow(dead_code)]
+ fn get_macro_use_data(&self, span: Span) -> Option<MacroRef> {
if !generated_code(span) {
return None;
}
}
impl Externs {
+ /// Used for testing.
pub fn new(data: BTreeMap<String, ExternEntry>) -> Externs {
Externs(data)
}
}
}
- pub fn get_input(&mut self) -> Option<&mut String> {
- match *self {
- Input::File(_) => None,
- Input::Str { ref mut input, .. } => Some(input),
- }
- }
-
pub fn source_name(&self) -> FileName {
match *self {
Input::File(ref ifile) => ifile.clone().into(),
|| self.debugging_opts.query_dep_graph
}
- #[inline(always)]
- pub fn enable_dep_node_debug_strs(&self) -> bool {
- cfg!(debug_assertions)
- && (self.debugging_opts.query_dep_graph || self.debugging_opts.incremental_info)
- }
-
pub fn file_path_mapping(&self) -> FilePathMapping {
FilePathMapping::new(self.remap_path_prefix.clone())
}
pub fn flag_s(a: S, b: S, c: S) -> R {
stable(longer(a, b), move |opts| opts.optflag(a, b, c))
}
- pub fn flagopt_s(a: S, b: S, c: S, d: S) -> R {
- stable(longer(a, b), move |opts| opts.optflagopt(a, b, c, d))
- }
pub fn flagmulti_s(a: S, b: S, c: S) -> R {
stable(longer(a, b), move |opts| opts.optflagmulti(a, b, c))
}
pub fn multi(a: S, b: S, c: S, d: S) -> R {
unstable(longer(a, b), move |opts| opts.optmulti(a, b, c, d))
}
- pub fn flag(a: S, b: S, c: S) -> R {
- unstable(longer(a, b), move |opts| opts.optflag(a, b, c))
- }
- pub fn flagopt(a: S, b: S, c: S, d: S) -> R {
- unstable(longer(a, b), move |opts| opts.optflagopt(a, b, c, d))
- }
- pub fn flagmulti(a: S, b: S, c: S) -> R {
- unstable(longer(a, b), move |opts| opts.optflagmulti(a, b, c))
- }
}
/// Returns the "short" subset of the rustc command line options,
}
// This is a stable hash because BTreeMap is a sorted container
- pub fn stable_hash(
+ crate fn stable_hash(
sub_hashes: BTreeMap<&'static str, &dyn DepTrackingHash>,
hasher: &mut DefaultHasher,
error_format: ErrorOutputType,
(default: no)"),
borrowck: String = ("migrate".to_string(), parse_string, [UNTRACKED],
"select which borrowck is used (`mir` or `migrate`) (default: `migrate`)"),
- borrowck_stats: bool = (false, parse_bool, [UNTRACKED],
- "gather borrowck statistics (default: no)"),
cgu_partitioning_strategy: Option<String> = (None, parse_opt_string, [TRACKED],
"the codegen unit partitioning strategy to use"),
chalk: bool = (false, parse_bool, [TRACKED],
}
impl ParseSess {
+ /// Used for testing.
pub fn new(file_path_mapping: FilePathMapping) -> Self {
let sm = Lrc::new(SourceMap::new(file_path_mapping));
let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, None, Some(sm.clone()));
use rustc_errors::emitter::{Emitter, EmitterWriter, HumanReadableErrorType};
use rustc_errors::json::JsonEmitter;
use rustc_errors::registry::Registry;
-use rustc_errors::{Applicability, Diagnostic, DiagnosticBuilder, DiagnosticId, ErrorReported};
+use rustc_errors::{Diagnostic, DiagnosticBuilder, DiagnosticId, ErrorReported};
use rustc_lint_defs::FutureBreakage;
pub use rustc_span::crate_disambiguator::CrateDisambiguator;
use rustc_span::edition::Edition;
enum DiagnosticBuilderMethod {
Note,
SpanNote,
- SpanSuggestion(String), // suggestion
- // Add more variants as needed to support one-time diagnostics.
+ // Add more variants as needed to support one-time diagnostics.
}
/// Trait implemented by error types. This should not be implemented manually. Instead, use
let span = span_maybe.expect("`span_note` needs a span");
diag_builder.span_note(span, message);
}
- DiagnosticBuilderMethod::SpanSuggestion(suggestion) => {
- let span = span_maybe.expect("`span_suggestion_*` needs a span");
- diag_builder.span_suggestion(
- span,
- message,
- suggestion,
- Applicability::Unspecified,
- );
- }
}
}
}
self.diag_once(diag_builder, DiagnosticBuilderMethod::Note, msg_id, message, None);
}
- pub fn diag_span_suggestion_once<'a, 'b>(
- &'a self,
- diag_builder: &'b mut DiagnosticBuilder<'a>,
- msg_id: DiagnosticMessageId,
- span: Span,
- message: &str,
- suggestion: String,
- ) {
- self.diag_once(
- diag_builder,
- DiagnosticBuilderMethod::SpanSuggestion(suggestion),
- msg_id,
- message,
- Some(span),
- );
- }
-
#[inline]
pub fn source_map(&self) -> &SourceMap {
self.parse_sess.source_map()
pub fn verify_llvm_ir(&self) -> bool {
self.opts.debugging_opts.verify_llvm_ir || option_env!("RUSTC_VERIFY_LLVM_IR").is_some()
}
- pub fn borrowck_stats(&self) -> bool {
- self.opts.debugging_opts.borrowck_stats
- }
pub fn print_llvm_passes(&self) -> bool {
self.opts.debugging_opts.print_llvm_passes
}
)
}
- pub fn set_incr_session_load_dep_graph(&self, load: bool) {
- let mut incr_comp_session = self.incr_comp_session.borrow_mut();
-
- if let IncrCompSession::Active { ref mut load_dep_graph, .. } = *incr_comp_session {
- *load_dep_graph = load;
- }
- }
-
- pub fn incr_session_load_dep_graph(&self) -> bool {
- let incr_comp_session = self.incr_comp_session.borrow();
- match *incr_comp_session {
- IncrCompSession::Active { load_dep_graph, .. } => load_dep_graph,
- _ => false,
- }
- }
-
pub fn init_incr_comp_session(
&self,
session_dir: PathBuf,
}
/// Returns the crate-local part of the [DefPathHash].
+ ///
+ /// Used for tests.
#[inline]
pub fn local_hash(&self) -> u64 {
self.0.as_value().1
let names: Vec<_> =
range_to_update.clone().map(|idx| get_name(SyntaxContext::from_u32(idx as u32))).collect();
HygieneData::with(|data| {
- range_to_update.zip(names.into_iter()).for_each(|(idx, name)| {
+ range_to_update.zip(names).for_each(|(idx, name)| {
data.syntax_context_data[idx].dollar_crate_name = name;
})
})
Ok(new_ctxt)
}
-pub fn num_syntax_ctxts() -> usize {
- HygieneData::with(|data| data.syntax_context_data.len())
-}
-
-pub fn for_all_ctxts_in<E, F: FnMut((u32, SyntaxContext, &SyntaxContextData)) -> Result<(), E>>(
+fn for_all_ctxts_in<E, F: FnMut((u32, SyntaxContext, &SyntaxContextData)) -> Result<(), E>>(
ctxts: impl Iterator<Item = SyntaxContext>,
mut f: F,
) -> Result<(), E> {
Ok(())
}
-pub fn for_all_expns_in<E, F: FnMut(u32, ExpnId, &ExpnData) -> Result<(), E>>(
+fn for_all_expns_in<E, F: FnMut(u32, ExpnId, &ExpnData) -> Result<(), E>>(
expns: impl Iterator<Item = ExpnId>,
mut f: F,
) -> Result<(), E> {
Ok(())
}
-pub fn for_all_data<E, F: FnMut((u32, SyntaxContext, &SyntaxContextData)) -> Result<(), E>>(
- mut f: F,
-) -> Result<(), E> {
- let all_data = HygieneData::with(|data| data.syntax_context_data.clone());
- for (i, data) in all_data.into_iter().enumerate() {
- f((i as u32, SyntaxContext(i as u32), &data))?;
- }
- Ok(())
-}
-
impl<E: Encoder> Encodable<E> for ExpnId {
default fn encode(&self, _: &mut E) -> Result<(), E::Error> {
panic!("cannot encode `ExpnId` with `{}`", std::any::type_name::<E>());
}
}
-pub fn for_all_expn_data<E, F: FnMut(u32, &ExpnData) -> Result<(), E>>(mut f: F) -> Result<(), E> {
- let all_data = HygieneData::with(|data| data.expn_data.clone());
- for (i, data) in all_data.into_iter().enumerate() {
- f(i as u32, &data.unwrap_or_else(|| panic!("Missing ExpnData!")))?;
- }
- Ok(())
-}
-
pub fn raw_encode_syntax_context<E: Encoder>(
ctxt: SyntaxContext,
context: &HygieneEncodeContext,
}
impl ExternalSource {
- pub fn is_absent(&self) -> bool {
- !matches!(self, ExternalSource::Foreign { kind: ExternalSourceKind::Present(_), .. })
- }
-
pub fn get_source(&self) -> Option<&Lrc<String>> {
match self {
ExternalSource::Foreign { kind: ExternalSourceKind::Present(ref src), .. } => Some(src),
self.src.is_none()
}
- pub fn byte_length(&self) -> u32 {
- self.end_pos.0 - self.start_pos.0
- }
pub fn count_lines(&self) -> usize {
self.lines.len()
}
// Many of the symbols defined in compiler-rt are also defined in libgcc.
// Android's linker doesn't like that by default.
base.pre_link_args
- .get_mut(&LinkerFlavor::Gcc)
- .unwrap()
+ .entry(LinkerFlavor::Gcc)
+ .or_default()
.push("-Wl,--allow-multiple-definition".to_string());
base.dwarf_version = Some(2);
base.position_independent_executables = true;
let mut base = super::android_base::opts();
base.features = "+v7,+thumb-mode,+thumb2,+vfp3,-d32,-neon".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-march=armv7-a".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-march=armv7-a".to_string());
Target {
llvm_target: "armv7-none-linux-android".to_string(),
has_rpath: false,
position_independent_executables: false,
eh_frame_header: false,
- pre_link_args: vec![(
- LinkerFlavor::Gcc,
- vec![
- format!("-mmcu={}", target_cpu),
- // We want to be able to strip as much executable code as possible
- // from the linker command line, and this flag indicates to the
- // linker that it can avoid linking in dynamic libraries that don't
- // actually satisfy any symbols up to that point (as with many other
- // resolutions the linker does). This option only applies to all
- // following libraries so we're sure to pass it as one of the first
- // arguments.
- "-Wl,--as-needed".to_string(),
- ],
- )]
- .into_iter()
- .collect(),
+ pre_link_args: vec![(LinkerFlavor::Gcc, vec![format!("-mmcu={}", target_cpu)])]
+ .into_iter()
+ .collect(),
late_link_args: vec![(LinkerFlavor::Gcc, vec!["-lgcc".to_owned()])]
.into_iter()
.collect(),
-use crate::spec::{LinkArgs, LinkerFlavor, RelroLevel, TargetOptions};
+use crate::spec::{RelroLevel, TargetOptions};
pub fn opts() -> TargetOptions {
- let mut args = LinkArgs::new();
- args.insert(
- LinkerFlavor::Gcc,
- vec![
- // GNU-style linkers will use this to omit linking to libraries
- // which don't actually fulfill any relocations, but only for
- // libraries which follow this flag. Thus, use it before
- // specifying libraries to link to.
- "-Wl,--as-needed".to_string(),
- // Always enable NX protection when it is available
- "-Wl,-z,noexecstack".to_string(),
- ],
- );
-
TargetOptions {
os: "dragonfly".to_string(),
dynamic_linking: true,
os_family: Some("unix".to_string()),
linker_is_gnu: true,
has_rpath: true,
- pre_link_args: args,
position_independent_executables: true,
relro_level: RelroLevel::Full,
dwarf_version: Some(2),
-use crate::spec::{LinkArgs, LinkerFlavor, RelroLevel, TargetOptions};
+use crate::spec::{RelroLevel, TargetOptions};
pub fn opts() -> TargetOptions {
- let mut args = LinkArgs::new();
- args.insert(
- LinkerFlavor::Gcc,
- vec![
- // GNU-style linkers will use this to omit linking to libraries
- // which don't actually fulfill any relocations, but only for
- // libraries which follow this flag. Thus, use it before
- // specifying libraries to link to.
- "-Wl,--as-needed".to_string(),
- // Always enable NX protection when it is available
- "-Wl,-z,noexecstack".to_string(),
- ],
- );
-
TargetOptions {
os: "freebsd".to_string(),
dynamic_linking: true,
os_family: Some("unix".to_string()),
linker_is_gnu: true,
has_rpath: true,
- pre_link_args: args,
position_independent_executables: true,
eliminate_frame_pointer: false, // FIXME 43575
relro_level: RelroLevel::Full,
// Mark all dynamic libraries and executables as compatible with the larger 4GiB address
// space available to x86 Windows binaries on x86_64.
base.pre_link_args
- .get_mut(&LinkerFlavor::Gcc)
- .unwrap()
+ .entry(LinkerFlavor::Gcc)
+ .or_default()
.push("-Wl,--large-address-aware".to_string());
Target {
// https://docs.microsoft.com/en-us/cpp/build/reference/safeseh-image-has-safe-exception-handlers
"/SAFESEH".to_string(),
];
- base.pre_link_args.get_mut(&LinkerFlavor::Msvc).unwrap().extend(pre_link_args_msvc.clone());
+ base.pre_link_args.entry(LinkerFlavor::Msvc).or_default().extend(pre_link_args_msvc.clone());
base.pre_link_args
- .get_mut(&LinkerFlavor::Lld(LldFlavor::Link))
- .unwrap()
+ .entry(LinkerFlavor::Lld(LldFlavor::Link))
+ .or_default()
.extend(pre_link_args_msvc);
Target {
let mut base = super::freebsd_base::opts();
base.cpu = "pentium4".to_string();
base.max_atomic_width = Some(64);
- let pre_link_args = base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap();
+ let pre_link_args = base.pre_link_args.entry(LinkerFlavor::Gcc).or_default();
pre_link_args.push("-m32".to_string());
pre_link_args.push("-Wl,-znotext".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
let mut base = super::linux_gnu_base::opts();
base.cpu = "pentium4".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m32".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m32".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
Target {
let mut base = super::linux_musl_base::opts();
base.cpu = "pentium4".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m32".to_string());
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-Wl,-melf_i386".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m32".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-Wl,-melf_i386".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
// The unwinder used by i686-unknown-linux-musl, the LLVM libunwind
let mut base = super::netbsd_base::opts();
base.cpu = "pentium4".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m32".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m32".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
Target {
let mut base = super::openbsd_base::opts();
base.cpu = "pentium4".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m32".to_string());
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-fuse-ld=lld".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m32".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-fuse-ld=lld".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
Target {
// Mark all dynamic libraries and executables as compatible with the larger 4GiB address
// space available to x86 Windows binaries on x86_64.
base.pre_link_args
- .get_mut(&LinkerFlavor::Gcc)
- .unwrap()
+ .entry(LinkerFlavor::Gcc)
+ .or_default()
.push("-Wl,--large-address-aware".to_string());
Target {
let mut base = super::vxworks_base::opts();
base.cpu = "pentium4".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m32".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m32".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
Target {
-use crate::spec::{LinkArgs, LinkerFlavor, RelroLevel, TargetOptions};
+use crate::spec::{RelroLevel, TargetOptions};
pub fn opts() -> TargetOptions {
- let mut args = LinkArgs::new();
- args.insert(
- LinkerFlavor::Gcc,
- vec![
- // We want to be able to strip as much executable code as possible
- // from the linker command line, and this flag indicates to the
- // linker that it can avoid linking in dynamic libraries that don't
- // actually satisfy any symbols up to that point (as with many other
- // resolutions the linker does). This option only applies to all
- // following libraries so we're sure to pass it as one of the first
- // arguments.
- "-Wl,--as-needed".to_string(),
- // Always enable NX protection when it is available
- "-Wl,-z,noexecstack".to_string(),
- ],
- );
-
TargetOptions {
os: "linux".to_string(),
dynamic_linking: true,
os_family: Some("unix".to_string()),
linker_is_gnu: true,
has_rpath: true,
- pre_link_args: args,
position_independent_executables: true,
relro_level: RelroLevel::Full,
has_elf_tls: true,
-use crate::spec::{
- LinkArgs, LinkerFlavor, PanicStrategy, RelocModel, RelroLevel, StackProbeType, TargetOptions,
-};
+use crate::spec::{PanicStrategy, RelocModel, RelroLevel, StackProbeType, TargetOptions};
pub fn opts() -> TargetOptions {
- let mut pre_link_args = LinkArgs::new();
- pre_link_args.insert(
- LinkerFlavor::Gcc,
- vec!["-Wl,--as-needed".to_string(), "-Wl,-z,noexecstack".to_string()],
- );
-
TargetOptions {
env: "gnu".to_string(),
disable_redzone: true,
needs_plt: true,
relro_level: RelroLevel::Full,
relocation_model: RelocModel::Static,
- pre_link_args,
..Default::default()
}
// Suppress the verbose logo and authorship debugging output, which would needlessly
// clog any log files.
"/NOLOGO".to_string(),
- // Tell the compiler that non-code sections can be marked as non-executable,
- // including stack pages.
- // UEFI is fully compatible to non-executable data pages.
- // In fact, firmware might enforce this, so we better let the linker know about this,
- // so it will fail if the compiler ever tries placing code on the stack
- // (e.g., trampoline constructs and alike).
- "/NXCOMPAT".to_string(),
];
let mut pre_link_args = LinkArgs::new();
pre_link_args.insert(LinkerFlavor::Msvc, pre_link_args_msvc.clone());
-use crate::spec::{LinkArgs, LinkerFlavor, RelroLevel, TargetOptions};
+use crate::spec::{RelroLevel, TargetOptions};
pub fn opts() -> TargetOptions {
- let mut args = LinkArgs::new();
- args.insert(
- LinkerFlavor::Gcc,
- vec![
- // GNU-style linkers will use this to omit linking to libraries
- // which don't actually fulfill any relocations, but only for
- // libraries which follow this flag. Thus, use it before
- // specifying libraries to link to.
- "-Wl,--as-needed".to_string(),
- ],
- );
-
TargetOptions {
os: "netbsd".to_string(),
dynamic_linking: true,
linker_is_gnu: true,
no_default_libraries: false,
has_rpath: true,
- pre_link_args: args,
position_independent_executables: true,
relro_level: RelroLevel::Full,
use_ctors_section: true,
-use crate::spec::{LinkArgs, LinkerFlavor, RelroLevel, TargetOptions};
+use crate::spec::{RelroLevel, TargetOptions};
pub fn opts() -> TargetOptions {
- let mut args = LinkArgs::new();
- args.insert(
- LinkerFlavor::Gcc,
- vec![
- // GNU-style linkers will use this to omit linking to libraries
- // which don't actually fulfill any relocations, but only for
- // libraries which follow this flag. Thus, use it before
- // specifying libraries to link to.
- "-Wl,--as-needed".to_string(),
- // Always enable NX protection when it is available
- "-Wl,-z,noexecstack".to_string(),
- ],
- );
-
TargetOptions {
os: "openbsd".to_string(),
dynamic_linking: true,
linker_is_gnu: true,
has_rpath: true,
abi_return_struct_as_int: true,
- pre_link_args: args,
position_independent_executables: true,
eliminate_frame_pointer: false, // FIXME 43575
relro_level: RelroLevel::Full,
pub fn target() -> Target {
let mut base = super::freebsd_base::opts();
base.cpu = "ppc64".to_string();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.max_atomic_width = Some(64);
Target {
pub fn target() -> Target {
let mut base = super::linux_gnu_base::opts();
base.cpu = "ppc64".to_string();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.max_atomic_width = Some(64);
// ld.so in at least RHEL6 on ppc64 has a bug related to BIND_NOW, so only enable partial RELRO
pub fn target() -> Target {
let mut base = super::linux_musl_base::opts();
base.cpu = "ppc64".to_string();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.max_atomic_width = Some(64);
Target {
pub fn target() -> Target {
let mut base = super::vxworks_base::opts();
base.cpu = "ppc64".to_string();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.max_atomic_width = Some(64);
Target {
pub fn target() -> Target {
let mut base = super::linux_gnu_base::opts();
base.cpu = "ppc64le".to_string();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.max_atomic_width = Some(64);
Target {
pub fn target() -> Target {
let mut base = super::linux_musl_base::opts();
base.cpu = "ppc64le".to_string();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.max_atomic_width = Some(64);
Target {
pub fn target() -> Target {
let mut base = super::linux_gnu_base::opts();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m32".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m32".to_string());
base.max_atomic_width = Some(32);
Target {
pub fn target() -> Target {
let mut base = super::linux_gnu_base::opts();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-mspe".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-mspe".to_string());
base.max_atomic_width = Some(32);
Target {
pub fn target() -> Target {
let mut base = super::linux_musl_base::opts();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m32".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m32".to_string());
base.max_atomic_width = Some(32);
Target {
pub fn target() -> Target {
let mut base = super::netbsd_base::opts();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m32".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m32".to_string());
base.max_atomic_width = Some(32);
Target {
pub fn target() -> Target {
let mut base = super::vxworks_base::opts();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m32".to_string());
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("--secure-plt".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m32".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("--secure-plt".to_string());
base.max_atomic_width = Some(32);
Target {
pub fn target() -> Target {
let mut base = super::vxworks_base::opts();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-mspe".to_string());
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("--secure-plt".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-mspe".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("--secure-plt".to_string());
base.max_atomic_width = Some(32);
Target {
-use crate::spec::{LinkArgs, LinkerFlavor, RelroLevel, TargetOptions};
+use crate::spec::{RelroLevel, TargetOptions};
pub fn opts() -> TargetOptions {
- let mut args = LinkArgs::new();
- args.insert(
- LinkerFlavor::Gcc,
- vec![
- // We want to be able to strip as much executable code as possible
- // from the linker command line, and this flag indicates to the
- // linker that it can avoid linking in dynamic libraries that don't
- // actually satisfy any symbols up to that point (as with many other
- // resolutions the linker does). This option only applies to all
- // following libraries so we're sure to pass it as one of the first
- // arguments.
- "-Wl,--as-needed".to_string(),
- // Always enable NX protection when it is available
- "-Wl,-z,noexecstack".to_string(),
- ],
- );
-
TargetOptions {
os: "redox".to_string(),
env: "relibc".to_string(),
os_family: Some("unix".to_string()),
linker_is_gnu: true,
has_rpath: true,
- pre_link_args: args,
position_independent_executables: true,
relro_level: RelroLevel::Full,
has_elf_tls: true,
pub fn target() -> Target {
let mut base = super::netbsd_base::opts();
base.cpu = "v9".to_string();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.max_atomic_width = Some(64);
Target {
let mut base = super::openbsd_base::opts();
base.endian = Endian::Big;
base.cpu = "v9".to_string();
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.max_atomic_width = Some(64);
Target {
base.endian = Endian::Big;
base.cpu = "v9".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-mv8plus".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-mv8plus".to_string());
Target {
llvm_target: "sparc-unknown-linux-gnu".to_string(),
// where necessary, but this is not the observed behavior.
// Disabling the LBR optimization works around the issue.
let pre_link_args_msvc = "/OPT:NOLBR".to_string();
- base.pre_link_args.get_mut(&LinkerFlavor::Msvc).unwrap().push(pre_link_args_msvc.clone());
+ base.pre_link_args.entry(LinkerFlavor::Msvc).or_default().push(pre_link_args_msvc.clone());
base.pre_link_args
- .get_mut(&LinkerFlavor::Lld(LldFlavor::Link))
- .unwrap()
+ .entry(LinkerFlavor::Lld(LldFlavor::Link))
+ .or_default()
.push(pre_link_args_msvc);
// FIXME(jordanrh): use PanicStrategy::Unwind when SEH is
let mut base = super::android_base::opts();
base.features = "+v7,+thumb-mode,+thumb2,+vfp3,+neon".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-march=armv7-a".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-march=armv7-a".to_string());
Target {
llvm_target: "armv7-none-linux-android".to_string(),
// exit (default for applications).
"/subsystem:efi_application".to_string(),
];
- base.pre_link_args.get_mut(&LinkerFlavor::Msvc).unwrap().extend(pre_link_args_msvc.clone());
+ base.pre_link_args.entry(LinkerFlavor::Msvc).or_default().extend(pre_link_args_msvc.clone());
base.pre_link_args
- .get_mut(&LinkerFlavor::Lld(LldFlavor::Link))
- .unwrap()
+ .entry(LinkerFlavor::Lld(LldFlavor::Link))
+ .or_default()
.extend(pre_link_args_msvc);
TargetOptions {
-use crate::spec::{LinkArgs, LinkerFlavor, TargetOptions};
+use crate::spec::TargetOptions;
pub fn opts() -> TargetOptions {
- let mut args = LinkArgs::new();
- args.insert(
- LinkerFlavor::Gcc,
- vec![
- // We want to be able to strip as much executable code as possible
- // from the linker command line, and this flag indicates to the
- // linker that it can avoid linking in dynamic libraries that don't
- // actually satisfy any symbols up to that point (as with many other
- // resolutions the linker does). This option only applies to all
- // following libraries so we're sure to pass it as one of the first
- // arguments.
- "-Wl,--as-needed".to_string(),
- ],
- );
-
TargetOptions {
os: "vxworks".to_string(),
env: "gnu".to_string(),
os_family: Some("unix".to_string()),
linker_is_gnu: true,
has_rpath: true,
- pre_link_args: args,
position_independent_executables: false,
has_elf_tls: true,
crt_static_default: true,
pub fn target() -> Target {
let mut options = wasm32_base::options();
- let clang_args = options.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap();
+ let clang_args = options.pre_link_args.entry(LinkerFlavor::Gcc).or_default();
// Rust really needs a way for users to specify exports and imports in
// the source code. --export-dynamic isn't the right tool for this job,
let mut options = wasm32_base::options();
options.os = "unknown".to_string();
options.linker_flavor = LinkerFlavor::Lld(LldFlavor::Wasm);
- let clang_args = options.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap();
+ let clang_args = options.pre_link_args.entry(LinkerFlavor::Gcc).or_default();
// Make sure clang uses LLD as its linker and is configured appropriately
// otherwise
clang_args.push("-Wl,--export-dynamic".to_string());
// Add the flags to wasm-ld's args too.
- let lld_args = options.pre_link_args.get_mut(&LinkerFlavor::Lld(LldFlavor::Wasm)).unwrap();
+ let lld_args = options.pre_link_args.entry(LinkerFlavor::Lld(LldFlavor::Wasm)).or_default();
lld_args.push("--no-entry".to_string());
lld_args.push("--export-dynamic".to_string());
// Tell GCC to avoid linker plugins, because we are not bundling
// them with Windows installer, and Rust does its own LTO anyways.
"-fno-use-linker-plugin".to_string(),
- // Always enable DEP (NX bit) when it is available
- "-Wl,--nxcompat".to_string(),
// Enable ASLR
"-Wl,--dynamicbase".to_string(),
// ASLR will rebase it anyway so leaving that option enabled only leads to confusion
opts.vendor = "uwp".to_string();
let pre_link_args_msvc = vec!["/APPCONTAINER".to_string(), "mincore.lib".to_string()];
- opts.pre_link_args.get_mut(&LinkerFlavor::Msvc).unwrap().extend(pre_link_args_msvc.clone());
+ opts.pre_link_args.entry(LinkerFlavor::Msvc).or_default().extend(pre_link_args_msvc.clone());
opts.pre_link_args
- .get_mut(&LinkerFlavor::Lld(LldFlavor::Link))
- .unwrap()
+ .entry(LinkerFlavor::Lld(LldFlavor::Link))
+ .or_default()
.extend(pre_link_args_msvc);
opts
pub fn target() -> Target {
const PRE_LINK_ARGS: &[&str] = &[
- "--as-needed",
- "-z",
- "noexecstack",
"-e",
"elf_entry",
"-Bstatic",
// https://developer.android.com/ndk/guides/abis.html#86-64
base.features = "+mmx,+sse,+sse2,+sse3,+ssse3,+sse4.1,+sse4.2,+popcnt".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
Target {
pub fn target() -> Target {
let mut base = super::windows_gnu_base::opts();
base.cpu = "x86-64".to_string();
- let gcc_pre_link_args = base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap();
+ let gcc_pre_link_args = base.pre_link_args.entry(LinkerFlavor::Gcc).or_default();
gcc_pre_link_args.push("-m64".to_string());
// Use high-entropy 64 bit address space for ASLR
gcc_pre_link_args.push("-Wl,--high-entropy-va".to_string());
let mut base = super::dragonfly_base::opts();
base.cpu = "x86-64".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
Target {
let mut base = super::freebsd_base::opts();
base.cpu = "x86-64".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
Target {
let mut base = super::linux_gnu_base::opts();
base.cpu = "x86-64".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
Target {
let mut base = super::linux_gnu_base::opts();
base.cpu = "x86-64".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-mx32".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-mx32".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
base.has_elf_tls = false;
// BUG(GabrielMajeri): disabling the PLT on x86_64 Linux with x32 ABI
let mut base = super::linux_musl_base::opts();
base.cpu = "x86-64".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
base.static_position_independent_executables = true;
let mut base = super::netbsd_base::opts();
base.cpu = "x86-64".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
Target {
"-mmx,-sse,-sse2,-sse3,-ssse3,-sse4.1,-sse4.2,-3dnow,-3dnowa,-avx,-avx2,+soft-float"
.to_string();
base.code_model = Some(CodeModel::Kernel);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
Target {
// FIXME: Some dispute, the linux-on-clang folks think this should use
let mut base = super::openbsd_base::opts();
base.cpu = "x86-64".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
Target {
let mut base = super::redox_base::opts();
base.cpu = "x86-64".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
Target {
pub fn target() -> Target {
let mut base = super::windows_uwp_gnu_base::opts();
base.cpu = "x86-64".to_string();
- let gcc_pre_link_args = base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap();
+ let gcc_pre_link_args = base.pre_link_args.entry(LinkerFlavor::Gcc).or_default();
gcc_pre_link_args.push("-m64".to_string());
// Use high-entropy 64 bit address space for ASLR
gcc_pre_link_args.push("-Wl,--high-entropy-va".to_string());
let mut base = super::vxworks_base::opts();
base.cpu = "x86-64".to_string();
base.max_atomic_width = Some(64);
- base.pre_link_args.get_mut(&LinkerFlavor::Gcc).unwrap().push("-m64".to_string());
+ base.pre_link_args.entry(LinkerFlavor::Gcc).or_default().push("-m64".to_string());
base.stack_probes = StackProbeType::InlineOrCall { min_llvm_version_for_inline: (11, 0, 1) };
base.disable_redzone = true;
#![feature(box_patterns)]
#![feature(drain_filter)]
#![feature(in_band_lifetimes)]
+#![feature(iter_zip)]
#![feature(never_type)]
#![feature(crate_visibility_modifier)]
#![cfg_attr(bootstrap, feature(or_patterns))]
use std::collections::hash_map::Entry;
use std::collections::VecDeque;
+use std::iter;
// FIXME(twk): this is obviously not nice to duplicate like that
#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)]
return true;
}
- for (new_region, old_region) in new_substs.regions().zip(old_substs.regions()) {
+ for (new_region, old_region) in
+ iter::zip(new_substs.regions(), old_substs.regions())
+ {
match (new_region, old_region) {
// If both predicates have an `ReLateBound` (a HRTB) in the
// same spot, we do nothing.
let impl2_ref = tcx.impl_trait_ref(impl2_def_id);
// Check if any of the input types definitely do not unify.
- if impl1_ref
- .iter()
- .flat_map(|tref| tref.substs.types())
- .zip(impl2_ref.iter().flat_map(|tref| tref.substs.types()))
- .any(|(ty1, ty2)| {
- let t1 = fast_reject::simplify_type(tcx, ty1, false);
- let t2 = fast_reject::simplify_type(tcx, ty2, false);
- if let (Some(t1), Some(t2)) = (t1, t2) {
- // Simplified successfully
- // Types cannot unify if they differ in their reference mutability or simplify to different types
- t1 != t2 || ty1.ref_mutability() != ty2.ref_mutability()
- } else {
- // Types might unify
- false
- }
- })
- {
+ if iter::zip(
+ impl1_ref.iter().flat_map(|tref| tref.substs.types()),
+ impl2_ref.iter().flat_map(|tref| tref.substs.types()),
+ )
+ .any(|(ty1, ty2)| {
+ let t1 = fast_reject::simplify_type(tcx, ty1, false);
+ let t2 = fast_reject::simplify_type(tcx, ty2, false);
+ if let (Some(t1), Some(t2)) = (t1, t2) {
+ // Simplified successfully
+ // Types cannot unify if they differ in their reference mutability or simplify to different types
+ t1 != t2 || ty1.ref_mutability() != ty2.ref_mutability()
+ } else {
+ // Types might unify
+ false
+ }
+ }) {
// Some types involved are definitely different, so the impls couldn't possibly overlap.
debug!("overlapping_impls: fast_reject early-exit");
return no_overlap();
use rustc_span::Span;
use std::cmp;
+use std::iter;
use std::ops::ControlFlow;
/// Check if a given constant can be evaluated.
if a_args.len() == b_args.len() =>
{
try_unify(tcx, a.subtree(a_f), b.subtree(b_f))
- && a_args
- .iter()
- .zip(b_args)
+ && iter::zip(a_args, b_args)
.all(|(&an, &bn)| try_unify(tcx, a.subtree(an), b.subtree(bn)))
}
_ => false,
use rustc_span::symbol::{kw, sym};
use rustc_span::{ExpnKind, MultiSpan, Span, DUMMY_SP};
use std::fmt;
+use std::iter;
use crate::traits::query::evaluate_obligation::InferCtxtExt as _;
use crate::traits::query::normalize::AtExt as _;
}
}
- for (error, suppressed) in errors.iter().zip(is_suppressed) {
+ for (error, suppressed) in iter::zip(errors, is_suppressed) {
if !suppressed {
self.report_fulfillment_error(error, body_id, fallback_has_occurred);
}
use rustc_middle::ty::subst::Subst;
use rustc_middle::ty::{self, GenericParamDefKind};
use rustc_span::symbol::sym;
+use std::iter;
use super::InferCtxtPrivExt;
if let Ok(..) = self.can_eq(param_env, trait_self_ty, impl_self_ty) {
self_match_impls.push(def_id);
- if trait_ref
- .substs
- .types()
- .skip(1)
- .zip(impl_trait_ref.substs.types().skip(1))
- .all(|(u, v)| self.fuzzy_match_tys(u, v))
+ if iter::zip(
+ trait_ref.substs.types().skip(1),
+ impl_trait_ref.substs.types().skip(1),
+ )
+ .all(|(u, v)| self.fuzzy_match_tys(u, v))
{
fuzzy_match_impls.push(def_id);
}
flags.push((sym::from_desugaring, None));
flags.push((sym::from_desugaring, Some(format!("{:?}", k))));
}
- let generics = self.tcx.generics_of(def_id);
- let self_ty = trait_ref.self_ty();
- // This is also included through the generics list as `Self`,
- // but the parser won't allow you to use it
- flags.push((sym::_Self, Some(self_ty.to_string())));
- if let Some(def) = self_ty.ty_adt_def() {
- // We also want to be able to select self's original
- // signature with no type arguments resolved
- flags.push((sym::_Self, Some(self.tcx.type_of(def.did).to_string())));
- }
- for param in generics.params.iter() {
- let value = match param.kind {
- GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => {
- trait_ref.substs[param.index as usize].to_string()
- }
- GenericParamDefKind::Lifetime => continue,
- };
- let name = param.name;
- flags.push((name, Some(value)));
- }
+ // Add all types without trimmed paths.
+ ty::print::with_no_trimmed_paths(|| {
+ let generics = self.tcx.generics_of(def_id);
+ let self_ty = trait_ref.self_ty();
+ // This is also included through the generics list as `Self`,
+ // but the parser won't allow you to use it
+ flags.push((sym::_Self, Some(self_ty.to_string())));
+ if let Some(def) = self_ty.ty_adt_def() {
+ // We also want to be able to select self's original
+ // signature with no type arguments resolved
+ flags.push((sym::_Self, Some(self.tcx.type_of(def.did).to_string())));
+ }
- if let Some(true) = self_ty.ty_adt_def().map(|def| def.did.is_local()) {
- flags.push((sym::crate_local, None));
- }
+ for param in generics.params.iter() {
+ let value = match param.kind {
+ GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => {
+ trait_ref.substs[param.index as usize].to_string()
+ }
+ GenericParamDefKind::Lifetime => continue,
+ };
+ let name = param.name;
+ flags.push((name, Some(value)));
+ }
- // Allow targeting all integers using `{integral}`, even if the exact type was resolved
- if self_ty.is_integral() {
- flags.push((sym::_Self, Some("{integral}".to_owned())));
- }
+ if let Some(true) = self_ty.ty_adt_def().map(|def| def.did.is_local()) {
+ flags.push((sym::crate_local, None));
+ }
- if let ty::Array(aty, len) = self_ty.kind() {
- flags.push((sym::_Self, Some("[]".to_owned())));
- flags.push((sym::_Self, Some(format!("[{}]", aty))));
- if let Some(def) = aty.ty_adt_def() {
- // We also want to be able to select the array's type's original
- // signature with no type arguments resolved
- let type_string = self.tcx.type_of(def.did).to_string();
- flags.push((sym::_Self, Some(format!("[{}]", type_string))));
+ // Allow targeting all integers using `{integral}`, even if the exact type was resolved
+ if self_ty.is_integral() {
+ flags.push((sym::_Self, Some("{integral}".to_owned())));
+ }
- let len = len.val.try_to_value().and_then(|v| v.try_to_machine_usize(self.tcx));
- let string = match len {
- Some(n) => format!("[{}; {}]", type_string, n),
- None => format!("[{}; _]", type_string),
- };
- flags.push((sym::_Self, Some(string)));
+ if let ty::Array(aty, len) = self_ty.kind() {
+ flags.push((sym::_Self, Some("[]".to_owned())));
+ flags.push((sym::_Self, Some(format!("[{}]", aty))));
+ if let Some(def) = aty.ty_adt_def() {
+ // We also want to be able to select the array's type's original
+ // signature with no type arguments resolved
+ let type_string = self.tcx.type_of(def.did).to_string();
+ flags.push((sym::_Self, Some(format!("[{}]", type_string))));
+
+ let len = len.val.try_to_value().and_then(|v| v.try_to_machine_usize(self.tcx));
+ let string = match len {
+ Some(n) => format!("[{}; {}]", type_string, n),
+ None => format!("[{}; _]", type_string),
+ };
+ flags.push((sym::_Self, Some(string)));
+ }
}
- }
- if let ty::Dynamic(traits, _) = self_ty.kind() {
- for t in traits.iter() {
- if let ty::ExistentialPredicate::Trait(trait_ref) = t.skip_binder() {
- flags.push((sym::_Self, Some(self.tcx.def_path_str(trait_ref.def_id))))
+ if let ty::Dynamic(traits, _) = self_ty.kind() {
+ for t in traits.iter() {
+ if let ty::ExistentialPredicate::Trait(trait_ref) = t.skip_binder() {
+ flags.push((sym::_Self, Some(self.tcx.def_path_str(trait_ref.def_id))))
+ }
}
}
- }
+ });
if let Ok(Some(command)) =
OnUnimplementedDirective::of_item(self.tcx, trait_ref.def_id, def_id)
pub ty: Ty<'tcx>,
}
-impl<'tcx> ImpliedOutlivesBounds<'tcx> {
- pub fn new(ty: Ty<'tcx>) -> Self {
- ImpliedOutlivesBounds { ty }
- }
-}
-
impl<'tcx> super::QueryTypeOp<'tcx> for ImpliedOutlivesBounds<'tcx> {
type QueryResponse = Vec<OutlivesBound<'tcx>>;
// substitution if we find that any of the input types, when
// simplified, do not match.
- obligation.predicate.skip_binder().trait_ref.substs.iter().zip(impl_trait_ref.substs).any(
+ iter::zip(obligation.predicate.skip_binder().trait_ref.substs, impl_trait_ref.substs).any(
|(obligation_arg, impl_arg)| {
match (obligation_arg.unpack(), impl_arg.unpack()) {
(GenericArgKind::Type(obligation_ty), GenericArgKind::Type(impl_ty)) => {
let predicates = predicates.instantiate(self.infcx.tcx, substs);
debug_assert_eq!(predicates.predicates.len(), origins.len());
- predicates
- .predicates
- .into_iter()
- .zip(predicates.spans.into_iter())
- .zip(origins.into_iter().rev())
+ iter::zip(iter::zip(predicates.predicates, predicates.spans), origins.into_iter().rev())
.map(|((pred, span), origin_def_id)| {
let cause = self.cause(traits::BindingObligation(origin_def_id, span));
traits::Obligation::with_depth(cause, self.recursion_depth, self.param_env, pred)
use rustc_span::{Span, DUMMY_SP};
use std::collections::BTreeSet;
+use std::iter;
impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
/// On missing type parameters, emit an E0393 error and provide a structured suggestion using
// that the user forgot to give the associtated type's name. The canonical
// example would be trying to use `Iterator<isize>` instead of
// `Iterator<Item = isize>`.
- for (potential, item) in potential_assoc_types.iter().zip(assoc_items.iter()) {
+ for (potential, item) in iter::zip(&potential_assoc_types, assoc_items) {
if let Ok(snippet) = tcx.sess.source_map().span_to_snippet(*potential) {
suggestions.push((*potential, format!("{} = {}", item.ident, snippet)));
}
use rustc_span::Span;
use rustc_target::spec::abi;
use rustc_trait_selection::autoderef::Autoderef;
+use std::iter;
/// Checks that it is legal to call methods of the trait corresponding
/// to `trait_id` (this only cares about the trait, not the specific
debug!("attempt_resolution: method_callee={:?}", method_callee);
for (method_arg_ty, self_arg_ty) in
- method_sig.inputs().iter().skip(1).zip(self.fn_sig.inputs())
+ iter::zip(method_sig.inputs().iter().skip(1), self.fn_sig.inputs())
{
fcx.demand_eqtype(self.call_expr.span, &self_arg_ty, &method_arg_ty);
}
use rustc_trait_selection::traits::error_reporting::InferCtxtExt as _;
use rustc_trait_selection::traits::{self, ObligationCauseCode};
+use std::iter;
use std::ops::ControlFlow;
pub fn check_wf_new(tcx: TyCtxt<'_>) {
}
let mut disr_vals: Vec<Discr<'tcx>> = Vec::with_capacity(vs.len());
- for ((_, discr), v) in def.discriminants(tcx).zip(vs) {
+ for ((_, discr), v) in iter::zip(def.discriminants(tcx), vs) {
// Check for duplicate discriminant values
if let Some(i) = disr_vals.iter().position(|&x| x.val == discr.val) {
let variant_did = def.variants[VariantIdx::new(i)].def_id;
// The liberated version of this signature should be a subtype
// of the liberated form of the expectation.
- for ((hir_ty, &supplied_ty), expected_ty) in decl
- .inputs
- .iter()
- .zip(supplied_sig.inputs().skip_binder()) // binder moved to (*) below
- .zip(expected_sigs.liberated_sig.inputs())
- // `liberated_sig` is E'.
- {
+ for ((hir_ty, &supplied_ty), expected_ty) in iter::zip(
+ iter::zip(
+ decl.inputs,
+ supplied_sig.inputs().skip_binder(), // binder moved to (*) below
+ ),
+ expected_sigs.liberated_sig.inputs(), // `liberated_sig` is E'.
+ ) {
// Instantiate (this part of..) S to S', i.e., with fresh variables.
let (supplied_ty, _) = self.infcx.replace_bound_vars_with_fresh_vars(
hir_ty.span,
use rustc_span::Span;
use rustc_trait_selection::traits::error_reporting::InferCtxtExt;
use rustc_trait_selection::traits::{self, ObligationCause, ObligationCauseCode, Reveal};
+use std::iter;
use super::{potentially_plural_count, FnCtxt, Inherited};
_ => bug!("{:?} is not a TraitItemKind::Fn", trait_m),
};
- impl_m_iter
- .zip(trait_m_iter)
+ iter::zip(impl_m_iter, trait_m_iter)
.find(|&(ref impl_arg, ref trait_arg)| {
match (&impl_arg.kind, &trait_arg.kind) {
(
let impl_iter = impl_sig.inputs().iter();
let trait_iter = trait_sig.inputs().iter();
- impl_iter
- .zip(trait_iter)
- .zip(impl_m_iter)
- .zip(trait_m_iter)
- .find_map(|(((&impl_arg_ty, &trait_arg_ty), impl_arg), trait_arg)| match infcx
+ iter::zip(iter::zip(impl_iter, trait_iter), iter::zip(impl_m_iter, trait_m_iter))
+ .find_map(|((&impl_arg_ty, &trait_arg_ty), (impl_arg, trait_arg))| match infcx
.at(&cause, param_env)
.sub(trait_arg_ty, impl_arg_ty)
{
GenericParamDefKind::Lifetime | GenericParamDefKind::Const { .. } => None,
});
for ((impl_def_id, impl_synthetic), (trait_def_id, trait_synthetic)) in
- impl_m_type_params.zip(trait_m_type_params)
+ iter::zip(impl_m_type_params, trait_m_type_params)
{
if impl_synthetic != trait_synthetic {
let impl_hir_id = tcx.hir().local_def_id_to_hir_id(impl_def_id.expect_local());
};
use std::collections::hash_map::Entry;
+use std::iter;
use std::slice;
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
(
hir::ItemKind::OpaqueTy(hir::OpaqueTy { bounds: last_bounds, .. }),
hir::ItemKind::OpaqueTy(hir::OpaqueTy { bounds: exp_bounds, .. }),
- ) if last_bounds.iter().zip(exp_bounds.iter()).all(|(left, right)| {
+ ) if iter::zip(*last_bounds, *exp_bounds).all(|(left, right)| {
match (left, right) {
(
hir::GenericBound::Trait(tl, ml),
use rustc_trait_selection::traits::{self, ObligationCauseCode, StatementAsExpression};
use crate::structured_errors::StructuredDiagnostic;
+use std::iter;
use std::slice;
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// All the input types from the fn signature must outlive the call
// so as to validate implied bounds.
- for (&fn_input_ty, arg_expr) in fn_inputs.iter().zip(args.iter()) {
+ for (&fn_input_ty, arg_expr) in iter::zip(fn_inputs, args) {
self.register_wf_obligation(fn_input_ty.into(), arg_expr.span, traits::MiscObligation);
}
self.is_hir_id_from_struct_pattern_shorthand_field(expr.hir_id, expr.span);
let methods = self.get_conversion_methods(expr.span, expected, found, expr.hir_id);
if let Ok(expr_text) = self.sess().source_map().span_to_snippet(expr.span) {
- let mut suggestions = iter::repeat(&expr_text)
- .zip(methods.iter())
+ let mut suggestions = iter::zip(iter::repeat(&expr_text), &methods)
.filter_map(|(receiver, method)| {
let method_call = format!(".{}()", method.ident);
if receiver.ends_with(&method_call) {
use rustc_span::Span;
use rustc_trait_selection::traits;
+use std::iter;
use std::ops::Deref;
struct ConfirmContext<'a, 'tcx> {
// We don't care about regions here.
.filter_map(|obligation| match obligation.predicate.kind().skip_binder() {
ty::PredicateKind::Trait(trait_pred, _) if trait_pred.def_id() == sized_def_id => {
- let span = predicates
- .predicates
- .iter()
- .zip(predicates.spans.iter())
+ let span = iter::zip(&predicates.predicates, &predicates.spans)
.find_map(
|(p, span)| {
if *p == obligation.predicate { Some(*span) } else { None }
use rustc_index::vec::Idx;
use rustc_target::abi::VariantIdx;
+use std::iter;
+
/// Describe the relationship between the paths of two places
/// eg:
/// - `foo` is ancestor of `foo.bar.baz`
let projections_b = &place_b.projections;
let same_initial_projections =
- projections_a.iter().zip(projections_b.iter()).all(|(proj_a, proj_b)| proj_a == proj_b);
+ iter::zip(projections_a, projections_b).all(|(proj_a, proj_b)| proj_a == proj_b);
if same_initial_projections {
// First min(n, m) projections are the same
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
use rustc_trait_selection::traits::{self, ObligationCause, ObligationCauseCode};
+use std::iter;
use std::ops::ControlFlow;
/// Helper type of a temporary returned by `.for_item(...)`.
debug!("check_where_clauses: predicates={:?}", predicates.predicates);
assert_eq!(predicates.predicates.len(), predicates.spans.len());
let wf_obligations =
- predicates.predicates.iter().zip(predicates.spans.iter()).flat_map(|(&p, &sp)| {
+ iter::zip(&predicates.predicates, &predicates.spans).flat_map(|(&p, &sp)| {
traits::wf::predicate_obligations(fcx, fcx.param_env, fcx.body_id, p, sp)
});
let sig = fcx.normalize_associated_types_in(span, sig);
let sig = fcx.tcx.liberate_late_bound_regions(def_id, sig);
- for (&input_ty, span) in sig.inputs().iter().zip(hir_decl.inputs.iter().map(|t| t.span)) {
- fcx.register_wf_obligation(input_ty.into(), span, ObligationCauseCode::MiscObligation);
+ for (&input_ty, ty) in iter::zip(sig.inputs(), hir_decl.inputs) {
+ fcx.register_wf_obligation(input_ty.into(), ty.span, ObligationCauseCode::MiscObligation);
}
implied_bounds.extend(sig.inputs());
use rustc_span::{Span, DUMMY_SP};
use rustc_target::spec::abi;
use rustc_trait_selection::traits::error_reporting::suggestions::NextTypeParamName;
+use std::iter;
mod item_bounds;
mod type_of;
.emit();
}
};
- for (input, ty) in decl.inputs.iter().zip(fty.inputs().skip_binder()) {
+ for (input, ty) in iter::zip(decl.inputs, fty.inputs().skip_binder()) {
check(&input, ty)
}
if let hir::FnRetTy::Return(ref ty) = decl.output {
use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty::{self, adjustment, TyCtxt};
use rustc_target::abi::VariantIdx;
+use std::iter;
use crate::mem_categorization as mc;
}
hir::ExprKind::LlvmInlineAsm(ref ia) => {
- for (o, output) in ia.inner.outputs.iter().zip(ia.outputs_exprs) {
+ for (o, output) in iter::zip(&ia.inner.outputs, ia.outputs_exprs) {
if o.is_indirect {
self.consume_expr(output);
} else {
#![feature(format_args_capture)]
#![feature(in_band_lifetimes)]
#![feature(is_sorted)]
+#![feature(iter_zip)]
#![feature(nll)]
#![cfg_attr(bootstrap, feature(or_patterns))]
#![feature(try_blocks)]
self.data.shrink_to(min_capacity)
}
+ /// Returns a slice of all values in the underlying vector, in arbitrary
+ /// order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(binary_heap_as_slice)]
+ /// use std::collections::BinaryHeap;
+ /// use std::io::{self, Write};
+ ///
+ /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5, 6, 7]);
+ ///
+ /// io::sink().write(heap.as_slice()).unwrap();
+ /// ```
+ #[unstable(feature = "binary_heap_as_slice", issue = "83659")]
+ pub fn as_slice(&self) -> &[T] {
+ self.data.as_slice()
+ }
+
/// Consumes the `BinaryHeap` and returns the underlying vector
/// in arbitrary order.
///
// that the feature-gate isn't enabled. Ideally, it wouldn't check for the feature gate for docs
// from other crates, but since this can only appear for lang items, it doesn't seem worth fixing.
#![feature(intra_doc_pointers)]
+#![feature(iter_zip)]
#![feature(lang_items)]
#![feature(layout_for_ptr)]
#![feature(maybe_uninit_ref)]
/// # vector.push_all(&[1, 3, 5, 7, 9]);
/// # }
/// ```
+ #[inline]
pub fn reserve(&mut self, len: usize, additional: usize) {
- handle_reserve(self.try_reserve(len, additional));
+ // Callers expect this function to be very cheap when there is already sufficient capacity.
+ // Therefore, we move all the resizing and error-handling logic from grow_amortized and
+ // handle_reserve behind a call, while making sure that the this function is likely to be
+ // inlined as just a comparison and a call if the comparison fails.
+ #[cold]
+ fn do_reserve_and_handle<T, A: Allocator>(
+ slf: &mut RawVec<T, A>,
+ len: usize,
+ additional: usize,
+ ) {
+ handle_reserve(slf.grow_amortized(len, additional));
+ }
+
+ if self.needs_to_grow(len, additional) {
+ do_reserve_and_handle(self, len, additional);
+ }
}
/// The same as `reserve`, but returns on errors instead of panicking or aborting.
///
/// To uppercase the value in-place, use [`make_ascii_uppercase`].
///
- /// [`make_ascii_uppercase`]: u8::make_ascii_uppercase
+ /// [`make_ascii_uppercase`]: slice::make_ascii_uppercase
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]
pub fn to_ascii_uppercase(&self) -> Vec<u8> {
///
/// To lowercase the value in-place, use [`make_ascii_lowercase`].
///
- /// [`make_ascii_lowercase`]: u8::make_ascii_lowercase
+ /// [`make_ascii_lowercase`]: slice::make_ascii_lowercase
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]
pub fn to_ascii_lowercase(&self) -> Vec<u8> {
}
}
-macro_rules! spezialize_for_lengths {
- ($separator:expr, $target:expr, $iter:expr; $($num:expr),*) => {
+macro_rules! specialize_for_lengths {
+ ($separator:expr, $target:expr, $iter:expr; $($num:expr),*) => {{
let mut target = $target;
let iter = $iter;
let sep_bytes = $separator;
$num => {
for s in iter {
copy_slice_and_advance!(target, sep_bytes);
- copy_slice_and_advance!(target, s.borrow().as_ref());
+ let content_bytes = s.borrow().as_ref();
+ copy_slice_and_advance!(target, content_bytes);
}
},
)*
// arbitrary non-zero size fallback
for s in iter {
copy_slice_and_advance!(target, sep_bytes);
- copy_slice_and_advance!(target, s.borrow().as_ref());
+ let content_bytes = s.borrow().as_ref();
+ copy_slice_and_advance!(target, content_bytes);
}
}
}
- };
+ target
+ }}
}
macro_rules! copy_slice_and_advance {
// if the `len` calculation overflows, we'll panic
// we would have run out of memory anyway and the rest of the function requires
// the entire Vec pre-allocated for safety
- let len = sep_len
+ let reserved_len = sep_len
.checked_mul(iter.len())
.and_then(|n| {
slice.iter().map(|s| s.borrow().as_ref().len()).try_fold(n, usize::checked_add)
})
.expect("attempt to join into collection with len > usize::MAX");
- // crucial for safety
- let mut result = Vec::with_capacity(len);
- assert!(result.capacity() >= len);
+ // prepare an uninitialized buffer
+ let mut result = Vec::with_capacity(reserved_len);
+ debug_assert!(result.capacity() >= reserved_len);
result.extend_from_slice(first.borrow().as_ref());
unsafe {
- {
- let pos = result.len();
- let target = result.get_unchecked_mut(pos..len);
-
- // copy separator and slices over without bounds checks
- // generate loops with hardcoded offsets for small separators
- // massive improvements possible (~ x2)
- spezialize_for_lengths!(sep, target, iter; 0, 1, 2, 3, 4);
- }
- result.set_len(len);
+ let pos = result.len();
+ let target = result.get_unchecked_mut(pos..reserved_len);
+
+ // copy separator and slices over without bounds checks
+ // generate loops with hardcoded offsets for small separators
+ // massive improvements possible (~ x2)
+ let remain = specialize_for_lengths!(sep, target, iter; 0, 1, 2, 3, 4);
+
+ // A weird borrow implementation may return different
+ // slices for the length calculation and the actual copy.
+ // Make sure we don't expose uninitialized bytes to the caller.
+ let result_len = reserved_len - remain.len();
+ result.set_len(result_len);
}
result
}
use core::fmt;
use core::hash::{Hash, Hasher};
use core::intrinsics::{arith_offset, assume};
-use core::iter::FromIterator;
+use core::iter::{self, FromIterator};
use core::marker::PhantomData;
use core::mem::{self, ManuallyDrop, MaybeUninit};
use core::ops::{self, Index, IndexMut, Range, RangeBounds};
/// unspecified, and you should use the appropriate methods to modify these.
/// The pointer will never be null, so this type is null-pointer-optimized.
///
-/// However, the pointer may not actually point to allocated memory. In particular,
+/// However, the pointer might not actually point to allocated memory. In particular,
/// if you construct a `Vec` with capacity 0 via [`Vec::new`], [`vec![]`][`vec!`],
/// [`Vec::with_capacity(0)`][`Vec::with_capacity`], or by calling [`shrink_to_fit`]
/// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized
/// types inside a `Vec`, it will not allocate space for them. *Note that in this case
-/// the `Vec` may not report a [`capacity`] of 0*. `Vec` will allocate if and only
+/// the `Vec` might not report a [`capacity`] of 0*. `Vec` will allocate if and only
/// if [`mem::size_of::<T>`]`() * capacity() > 0`. In general, `Vec`'s allocation
/// details are very subtle — if you intend to allocate memory using a `Vec`
/// and use it for something else (either to pass to unsafe code, or to build your
/// whatever is most efficient or otherwise easy to implement. Do not rely on
/// removed data to be erased for security purposes. Even if you drop a `Vec`, its
/// buffer may simply be reused by another `Vec`. Even if you zero a `Vec`'s memory
-/// first, that may not actually happen because the optimizer does not consider
+/// first, that might not actually happen because the optimizer does not consider
/// this a side-effect that must be preserved. There is one case which we will
/// not break, however: using `unsafe` code to write to the excess capacity,
/// and then increasing the length to match, is always valid.
// - caller guaratees that src is a valid index
let to_clone = unsafe { this.get_unchecked(src) };
- to_clone
- .iter()
- .cloned()
- .zip(spare.iter_mut())
- .map(|(src, dst)| dst.write(src))
+ iter::zip(to_clone, spare)
+ .map(|(src, dst)| dst.write(src.clone()))
// Note:
// - Element was just initialized with `MaybeUninit::write`, so it's ok to increace len
// - len is increased after each element to prevent leaks (see issue #82533)
#![feature(binary_heap_drain_sorted)]
#![feature(slice_ptr_get)]
#![feature(binary_heap_retain)]
+#![feature(binary_heap_as_slice)]
#![feature(inplace_iteration)]
#![feature(iter_map_while)]
#![feature(vecdeque_binary_search)]
test_join!("~~~~~a~~~~~bc", ["", "a", "bc"], "~~~~~");
}
+#[test]
+fn test_join_isue_80335() {
+ use core::{borrow::Borrow, cell::Cell};
+
+ struct WeirdBorrow {
+ state: Cell<bool>,
+ }
+
+ impl Default for WeirdBorrow {
+ fn default() -> Self {
+ WeirdBorrow { state: Cell::new(false) }
+ }
+ }
+
+ impl Borrow<str> for WeirdBorrow {
+ fn borrow(&self) -> &str {
+ let state = self.state.get();
+ if state {
+ "0"
+ } else {
+ self.state.set(true);
+ "123456"
+ }
+ }
+ }
+
+ let arr: [WeirdBorrow; 3] = Default::default();
+ test_join!("0-0-0", arr, "-");
+}
+
#[test]
#[cfg_attr(miri, ignore)] // Miri is too slow
fn test_unsafe_slice() {
rotate!(rotate_16_usize_5, 16, |i| [i; 5]);
rotate!(rotate_64_usize_4, 64, |i| [i; 4]);
rotate!(rotate_64_usize_5, 64, |i| [i; 5]);
+
+#[bench]
+fn fill_byte_sized(b: &mut Bencher) {
+ #[derive(Copy, Clone)]
+ struct NewType(u8);
+
+ let mut ary = [NewType(0); 1024];
+
+ b.iter(|| {
+ let slice = &mut ary[..];
+ black_box(slice.fill(black_box(NewType(42))));
+ });
+}
use crate::{
fmt,
- iter::{ExactSizeIterator, FusedIterator, TrustedLen, TrustedRandomAccess},
+ iter::{self, ExactSizeIterator, FusedIterator, TrustedLen, TrustedRandomAccess},
mem::{self, MaybeUninit},
ops::Range,
ptr,
let mut new = Self { data: MaybeUninit::uninit_array(), alive: 0..0 };
// Clone all alive elements.
- for (src, dst) in self.as_slice().iter().zip(&mut new.data) {
+ for (src, dst) in iter::zip(self.as_slice(), &mut new.data) {
// Write a clone into the new array, then update its alive range.
// If cloning panics, we'll correctly drop the previous items.
dst.write(src.clone());
use crate::cell::{Cell, Ref, RefCell, RefMut, UnsafeCell};
use crate::char::EscapeDebugExtArgs;
+use crate::iter;
use crate::marker::PhantomData;
use crate::mem;
use crate::num::flt2dec;
match args.fmt {
None => {
// We can use default formatting parameters for all arguments.
- for (arg, piece) in args.args.iter().zip(args.pieces.iter()) {
+ for (arg, piece) in iter::zip(args.args, args.pieces) {
formatter.buf.write_str(*piece)?;
(arg.formatter)(arg.value, &mut formatter)?;
idx += 1;
Some(fmt) => {
// Every spec has a corresponding argument that is preceded by
// a string piece.
- for (arg, piece) in fmt.iter().zip(args.pieces.iter()) {
+ for (arg, piece) in iter::zip(fmt, args.pieces) {
formatter.buf.write_str(*piece)?;
// SAFETY: arg and args.args come from the same Arguments,
// which guarantees the indexes are always within bounds.
#[unstable(feature = "trusted_random_access", issue = "none")]
pub use self::zip::TrustedRandomAccess;
+#[unstable(feature = "iter_zip", issue = "83574")]
+pub use self::zip::zip;
+
/// This trait provides transitive access to source-stage in an interator-adapter pipeline
/// under the conditions that
/// * the iterator source `S` itself implements `SourceIter<Source = S>`
/// An iterator that iterates two other iterators simultaneously.
///
-/// This `struct` is created by [`Iterator::zip`]. See its documentation
-/// for more.
+/// This `struct` is created by [`zip`] or [`Iterator::zip`].
+/// See their documentation for more.
#[derive(Clone)]
#[must_use = "iterators are lazy and do nothing unless consumed"]
#[stable(feature = "rust1", since = "1.0.0")]
}
}
+/// Converts the arguments to iterators and zips them.
+///
+/// See the documentation of [`Iterator::zip`] for more.
+///
+/// # Examples
+///
+/// ```
+/// #![feature(iter_zip)]
+/// use std::iter::zip;
+///
+/// let xs = [1, 2, 3];
+/// let ys = [4, 5, 6];
+/// for (x, y) in zip(&xs, &ys) {
+/// println!("x:{}, y:{}", x, y);
+/// }
+///
+/// // Nested zips are also possible:
+/// let zs = [7, 8, 9];
+/// for ((x, y), z) in zip(zip(&xs, &ys), &zs) {
+/// println!("x:{}, y:{}, z:{}", x, y, z);
+/// }
+/// ```
+#[unstable(feature = "iter_zip", issue = "83574")]
+pub fn zip<A, B>(a: A, b: B) -> Zip<A::IntoIter, B::IntoIter>
+where
+ A: IntoIterator,
+ B: IntoIterator,
+{
+ ZipImpl::new(a.into_iter(), b.into_iter())
+}
+
#[stable(feature = "rust1", since = "1.0.0")]
impl<A, B> Iterator for Zip<A, B>
where
DoubleEndedIterator, ExactSizeIterator, Extend, FromIterator, IntoIterator, Product, Sum,
};
+#[unstable(feature = "iter_zip", issue = "83574")]
+pub use self::adapters::zip;
#[stable(feature = "iter_cloned", since = "1.1.0")]
pub use self::adapters::Cloned;
#[stable(feature = "iter_copied", since = "1.36.0")]
),
on(
_Self = "[]",
- label = "borrow the array with `&` or call `.iter()` on it to iterate over it",
- note = "arrays are not iterators, but slices like the following are: `&[1, 2, 3]`"
+ label = "arrays do not yet implement `IntoIterator`; try using `std::array::IntoIter::new(arr)`",
+ note = "see <https://github.com/rust-lang/rust/pull/65819> for more details"
),
on(
_Self = "{integral}",
/// Create a new array of `MaybeUninit<T>` items, in an uninitialized state.
///
/// Note: in a future Rust version this method may become unnecessary
- /// when array literal syntax allows
- /// [repeating const expressions](https://github.com/rust-lang/rust/issues/49147).
- /// The example below could then use `let mut buf = [MaybeUninit::<u8>::uninit(); 32];`.
+ /// when Rust allows
+ /// [inline const expressions](https://github.com/rust-lang/rust/issues/76001).
+ /// The example below could then use `let mut buf = [const { MaybeUninit::<u8>::uninit() }; 32];`.
///
/// # Examples
///
/// Adds `other` to itself and returns its own mutable reference.
pub fn add<'a>(&'a mut self, other: &$name) -> &'a mut $name {
use crate::cmp;
+ use crate::iter;
use crate::num::bignum::FullOps;
let mut sz = cmp::max(self.size, other.size);
let mut carry = false;
- for (a, b) in self.base[..sz].iter_mut().zip(&other.base[..sz]) {
+ for (a, b) in iter::zip(&mut self.base[..sz], &other.base[..sz]) {
let (c, v) = (*a).full_add(*b, carry);
*a = v;
carry = c;
/// Subtracts `other` from itself and returns its own mutable reference.
pub fn sub<'a>(&'a mut self, other: &$name) -> &'a mut $name {
use crate::cmp;
+ use crate::iter;
use crate::num::bignum::FullOps;
let sz = cmp::max(self.size, other.size);
let mut noborrow = true;
- for (a, b) in self.base[..sz].iter_mut().zip(&other.base[..sz]) {
+ for (a, b) in iter::zip(&mut self.base[..sz], &other.base[..sz]) {
let (c, v) = (*a).full_add(!*b, noborrow);
*a = v;
noborrow = c;
#![stable(feature = "rust1", since = "1.0.0")]
+use crate::ascii;
use crate::intrinsics;
use crate::mem;
use crate::str::FromStr;
pub const fn is_ascii_control(&self) -> bool {
matches!(*self, b'\0'..=b'\x1F' | b'\x7F')
}
+
+ /// Returns an iterator that produces an escaped version of a `u8`,
+ /// treating it as an ASCII character.
+ ///
+ /// The behavior is identical to [`ascii::escape_default`].
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(inherent_ascii_escape)]
+ ///
+ /// assert_eq!("0", b'0'.escape_ascii().to_string());
+ /// assert_eq!("\\t", b'\t'.escape_ascii().to_string());
+ /// assert_eq!("\\r", b'\r'.escape_ascii().to_string());
+ /// assert_eq!("\\n", b'\n'.escape_ascii().to_string());
+ /// assert_eq!("\\'", b'\''.escape_ascii().to_string());
+ /// assert_eq!("\\\"", b'"'.escape_ascii().to_string());
+ /// assert_eq!("\\\\", b'\\'.escape_ascii().to_string());
+ /// assert_eq!("\\x9d", b'\x9d'.escape_ascii().to_string());
+ /// ```
+ #[unstable(feature = "inherent_ascii_escape", issue = "77174")]
+ #[inline]
+ pub fn escape_ascii(&self) -> ascii::EscapeDefault {
+ ascii::escape_default(*self)
+ }
}
#[lang = "u16"]
/// unaligned: 0x01020304,
/// };
///
+/// #[allow(unaligned_references)]
/// let v = unsafe {
/// // Here we attempt to take the address of a 32-bit integer which is not aligned.
/// let unaligned =
/// let v = 0x01020304;
/// let mut packed: Packed = unsafe { std::mem::zeroed() };
///
+/// #[allow(unaligned_references)]
/// let v = unsafe {
/// // Here we attempt to take the address of a 32-bit integer which is not aligned.
/// let unaligned =
//! Operations on ASCII `[u8]`.
+use crate::ascii;
+use crate::fmt::{self, Write};
+use crate::iter;
use crate::mem;
+use crate::ops;
#[lang = "slice_u8"]
#[cfg(not(test))]
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]
pub fn eq_ignore_ascii_case(&self, other: &[u8]) -> bool {
- self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a.eq_ignore_ascii_case(b))
+ self.len() == other.len() && iter::zip(self, other).all(|(a, b)| a.eq_ignore_ascii_case(b))
}
/// Converts this slice to its ASCII upper case equivalent in-place.
byte.make_ascii_lowercase();
}
}
+
+ /// Returns an iterator that produces an escaped version of this slice,
+ /// treating it as an ASCII string.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(inherent_ascii_escape)]
+ ///
+ /// let s = b"0\t\r\n'\"\\\x9d";
+ /// let escaped = s.escape_ascii().to_string();
+ /// assert_eq!(escaped, "0\\t\\r\\n\\'\\\"\\\\\\x9d");
+ /// ```
+ #[unstable(feature = "inherent_ascii_escape", issue = "77174")]
+ pub fn escape_ascii(&self) -> EscapeAscii<'_> {
+ EscapeAscii { inner: self.iter().flat_map(EscapeByte) }
+ }
+}
+
+impl_fn_for_zst! {
+ #[derive(Clone)]
+ struct EscapeByte impl Fn = |byte: &u8| -> ascii::EscapeDefault {
+ ascii::escape_default(*byte)
+ };
+}
+
+/// An iterator over the escaped version of a byte slice.
+///
+/// This `struct` is created by the [`slice::escape_ascii`] method. See its
+/// documentation for more information.
+#[unstable(feature = "inherent_ascii_escape", issue = "77174")]
+#[derive(Clone)]
+pub struct EscapeAscii<'a> {
+ inner: iter::FlatMap<super::Iter<'a, u8>, ascii::EscapeDefault, EscapeByte>,
+}
+
+#[unstable(feature = "inherent_ascii_escape", issue = "77174")]
+impl<'a> iter::Iterator for EscapeAscii<'a> {
+ type Item = u8;
+ #[inline]
+ fn next(&mut self) -> Option<u8> {
+ self.inner.next()
+ }
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+ #[inline]
+ fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R
+ where
+ Fold: FnMut(Acc, Self::Item) -> R,
+ R: ops::Try<Ok = Acc>,
+ {
+ self.inner.try_fold(init, fold)
+ }
+ #[inline]
+ fn fold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc
+ where
+ Fold: FnMut(Acc, Self::Item) -> Acc,
+ {
+ self.inner.fold(init, fold)
+ }
+ #[inline]
+ fn last(mut self) -> Option<u8> {
+ self.next_back()
+ }
+}
+
+#[unstable(feature = "inherent_ascii_escape", issue = "77174")]
+impl<'a> iter::DoubleEndedIterator for EscapeAscii<'a> {
+ fn next_back(&mut self) -> Option<u8> {
+ self.inner.next_back()
+ }
+}
+#[unstable(feature = "inherent_ascii_escape", issue = "77174")]
+impl<'a> iter::ExactSizeIterator for EscapeAscii<'a> {}
+#[unstable(feature = "inherent_ascii_escape", issue = "77174")]
+impl<'a> iter::FusedIterator for EscapeAscii<'a> {}
+#[unstable(feature = "inherent_ascii_escape", issue = "77174")]
+impl<'a> fmt::Display for EscapeAscii<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.clone().try_for_each(|b| f.write_char(b as char))
+ }
+}
+#[unstable(feature = "inherent_ascii_escape", issue = "77174")]
+impl<'a> fmt::Debug for EscapeAscii<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.pad("EscapeAscii { .. }")
+ }
}
/// Returns `true` if any byte in the word `v` is nonascii (>= 128). Snarfed
#[unstable(feature = "slice_range", issue = "76393")]
pub use index::range;
+#[unstable(feature = "inherent_ascii_escape", issue = "77174")]
+pub use ascii::EscapeAscii;
+
#[lang = "slice"]
#[cfg(not(test))]
impl<T> [T] {
} else if cmp == Greater {
right = mid;
} else {
+ // SAFETY: same as the `get_unchecked` above
+ unsafe { crate::intrinsics::assume(mid < self.len()) };
return Ok(mid);
}
+use crate::mem::{size_of, transmute_copy};
use crate::ptr::write_bytes;
pub(super) trait SpecFill<T> {
}
impl<T: Copy> SpecFill<T> for [T] {
- default fn spec_fill(&mut self, value: T) {
- for item in self.iter_mut() {
- *item = value;
- }
- }
-}
-
-impl SpecFill<u8> for [u8] {
- fn spec_fill(&mut self, value: u8) {
- // SAFETY: this is slice of u8
- unsafe {
- let ptr = self.as_mut_ptr();
- let len = self.len();
- write_bytes(ptr, value, len);
- }
- }
-}
-
-impl SpecFill<i8> for [i8] {
- fn spec_fill(&mut self, value: i8) {
- // SAFETY: this is slice of i8
- unsafe {
- let ptr = self.as_mut_ptr();
- let len = self.len();
- write_bytes(ptr, value as u8, len);
- }
- }
-}
-
-impl SpecFill<bool> for [bool] {
- fn spec_fill(&mut self, value: bool) {
- // SAFETY: this is slice of bool
- unsafe {
- let ptr = self.as_mut_ptr();
- let len = self.len();
- write_bytes(ptr, value as u8, len);
+ fn spec_fill(&mut self, value: T) {
+ if size_of::<T>() == 1 {
+ // SAFETY: The size_of check above ensures that values are 1 byte wide, as required
+ // for the transmute and write_bytes
+ unsafe {
+ let value: u8 = transmute_copy(&value);
+ write_bytes(self.as_mut_ptr(), value, self.len());
+ }
+ } else {
+ for item in self.iter_mut() {
+ *item = value;
+ }
}
}
}
#[unstable(feature = "hash_raw_entry", issue = "56167")]
impl<K, V, S> Debug for RawEntryBuilderMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("RawEntryBuilder").finish()
+ f.debug_struct("RawEntryBuilder").finish_non_exhaustive()
}
}
f.debug_struct("RawOccupiedEntryMut")
.field("key", self.key())
.field("value", self.get())
- .finish()
+ .finish_non_exhaustive()
}
}
#[unstable(feature = "hash_raw_entry", issue = "56167")]
impl<K, V, S> Debug for RawVacantEntryMut<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("RawVacantEntryMut").finish()
+ f.debug_struct("RawVacantEntryMut").finish_non_exhaustive()
}
}
#[unstable(feature = "hash_raw_entry", issue = "56167")]
impl<K, V, S> Debug for RawEntryBuilder<'_, K, V, S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("RawEntryBuilder").finish()
+ f.debug_struct("RawEntryBuilder").finish_non_exhaustive()
}
}
#[stable(feature = "debug_hash_map", since = "1.12.0")]
impl<K: Debug, V: Debug> Debug for OccupiedEntry<'_, K, V> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish()
+ f.debug_struct("OccupiedEntry")
+ .field("key", self.key())
+ .field("value", self.get())
+ .finish_non_exhaustive()
}
}
.field("key", self.entry.key())
.field("old_value", self.entry.get())
.field("new_value", &self.value)
- .finish()
+ .finish_non_exhaustive()
}
}
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn as_bytes(&self) -> &[u8] {
- &self.inner[..self.inner.len() - 1]
+ // SAFETY: CString has a length at least 1
+ unsafe { self.inner.get_unchecked(..self.inner.len() - 1) }
}
/// Equivalent to [`CString::as_bytes()`] except that the
#[stable(feature = "rust1", since = "1.0.0")]
pub fn to_bytes(&self) -> &[u8] {
let bytes = self.to_bytes_with_nul();
- &bytes[..bytes.len() - 1]
+ // SAFETY: to_bytes_with_nul returns slice with length at least 1
+ unsafe { bytes.get_unchecked(..bytes.len() - 1) }
}
/// Converts this C string to a byte slice containing the trailing 0 byte.
let cstr = CStr::from_bytes_with_nul(original).unwrap();
let _ = &cstr[original.len()..];
}
+
+#[test]
+fn c_string_from_empty_string() {
+ let original = "";
+ let cstring = CString::new(original).unwrap();
+ assert_eq!(original.as_bytes(), cstring.as_bytes());
+ assert_eq!([b'\0'], cstring.as_bytes_with_nul());
+}
+
+#[test]
+fn c_str_from_empty_string() {
+ let original = b"\0";
+ let cstr = CStr::from_bytes_with_nul(original).unwrap();
+ assert_eq!([] as [u8; 0], cstr.to_bytes());
+ assert_eq!([b'\0'], cstr.to_bytes_with_nul());
+}
.field("modified", &self.modified())
.field("accessed", &self.accessed())
.field("created", &self.created())
- .finish()
+ .finish_non_exhaustive()
}
}
/// This function will return an error in the following situations, but is not
/// limited to just these cases:
///
-/// * The `from` path is not a file.
-/// * The `from` file does not exist.
-/// * The current process does not have the permission rights to access
+/// * `from` is neither a regular file nor a symlink to a regular file.
+/// * `from` does not exist.
+/// * The current process does not have the permission rights to read
/// `from` or write `to`.
///
/// # Examples
"buffer",
&format_args!("{}/{}", self.inner.buffer().len(), self.inner.capacity()),
)
- .finish()
+ .finish_non_exhaustive()
}
}
/// This function should maybe change to
/// `new_const<const MSG: &'static str>(kind: ErrorKind)`
/// in the future, when const generics allow that.
+ #[inline]
pub(crate) const fn new_const(kind: ErrorKind, message: &'static &'static str) -> Error {
Self { repr: Repr::SimpleMessage(kind, message) }
}
/// println!("last OS error: {:?}", Error::last_os_error());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
+ #[inline]
pub fn last_os_error() -> Error {
Error::from_raw_os_error(sys::os::errno() as i32)
}
/// # }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
+ #[inline]
pub fn from_raw_os_error(code: i32) -> Error {
Error { repr: Repr::Os(code) }
}
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
+ #[inline]
pub fn raw_os_error(&self) -> Option<i32> {
match self.repr {
Repr::Os(i) => Some(i),
/// }
/// ```
#[stable(feature = "io_error_inner", since = "1.3.0")]
+ #[inline]
pub fn get_ref(&self) -> Option<&(dyn error::Error + Send + Sync + 'static)> {
match self.repr {
Repr::Os(..) => None,
/// }
/// ```
#[stable(feature = "io_error_inner", since = "1.3.0")]
+ #[inline]
pub fn get_mut(&mut self) -> Option<&mut (dyn error::Error + Send + Sync + 'static)> {
match self.repr {
Repr::Os(..) => None,
/// }
/// ```
#[stable(feature = "io_error_inner", since = "1.3.0")]
+ #[inline]
pub fn into_inner(self) -> Option<Box<dyn error::Error + Send + Sync>> {
match self.repr {
Repr::Os(..) => None,
/// }
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
+ #[inline]
pub fn kind(&self) -> ErrorKind {
match self.repr {
Repr::Os(code) => sys::decode_error_kind(code),
///
/// [`chain`]: Read::chain
#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Debug)]
pub struct Chain<T, U> {
first: T,
second: U,
}
}
-#[stable(feature = "std_debug", since = "1.16.0")]
-impl<T: fmt::Debug, U: fmt::Debug> fmt::Debug for Chain<T, U> {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("Chain").field("t", &self.first).field("u", &self.second).finish()
- }
-}
-
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Read, U: Read> Read for Chain<T, U> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
#[unstable(feature = "once_cell", issue = "74465")]
impl<T: fmt::Debug, F> fmt::Debug for SyncLazy<T, F> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("Lazy").field("cell", &self.cell).field("init", &"..").finish()
+ f.debug_struct("Lazy").field("cell", &self.cell).finish_non_exhaustive()
}
}
#![feature(integer_atomics)]
#![feature(into_future)]
#![feature(intra_doc_pointers)]
+#![feature(iter_zip)]
#![feature(lang_items)]
#![feature(link_args)]
#![feature(linkage)]
.field("stdin", &self.stdin)
.field("stdout", &self.stdout)
.field("stderr", &self.stderr)
- .finish()
+ .finish_non_exhaustive()
}
}
#[stable(feature = "mpsc_debug", since = "1.8.0")]
impl<T> fmt::Debug for Sender<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("Sender").finish()
+ f.debug_struct("Sender").finish_non_exhaustive()
}
}
#[stable(feature = "mpsc_debug", since = "1.8.0")]
impl<T> fmt::Debug for SyncSender<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("SyncSender").finish()
+ f.debug_struct("SyncSender").finish_non_exhaustive()
}
}
#[stable(feature = "mpsc_debug", since = "1.8.0")]
impl<T> fmt::Debug for Receiver<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("Receiver").finish()
+ f.debug_struct("Receiver").finish_non_exhaustive()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + fmt::Debug> fmt::Debug for Mutex<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut d = f.debug_struct("Mutex");
match self.try_lock() {
- Ok(guard) => f.debug_struct("Mutex").field("data", &&*guard).finish(),
+ Ok(guard) => {
+ d.field("data", &&*guard);
+ }
Err(TryLockError::Poisoned(err)) => {
- f.debug_struct("Mutex").field("data", &&**err.get_ref()).finish()
+ d.field("data", &&**err.get_ref());
}
Err(TryLockError::WouldBlock) => {
struct LockedPlaceholder;
f.write_str("<locked>")
}
}
-
- f.debug_struct("Mutex").field("data", &LockedPlaceholder).finish()
+ d.field("data", &LockedPlaceholder);
}
}
+ d.field("poisoned", &self.poison.get());
+ d.finish_non_exhaustive()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + fmt::Debug> fmt::Debug for RwLock<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut d = f.debug_struct("RwLock");
match self.try_read() {
- Ok(guard) => f.debug_struct("RwLock").field("data", &&*guard).finish(),
+ Ok(guard) => {
+ d.field("data", &&*guard);
+ }
Err(TryLockError::Poisoned(err)) => {
- f.debug_struct("RwLock").field("data", &&**err.get_ref()).finish()
+ d.field("data", &&**err.get_ref());
}
Err(TryLockError::WouldBlock) => {
struct LockedPlaceholder;
f.write_str("<locked>")
}
}
-
- f.debug_struct("RwLock").field("data", &LockedPlaceholder).finish()
+ d.field("data", &LockedPlaceholder);
}
}
+ d.field("poisoned", &self.poison.get());
+ d.finish_non_exhaustive()
}
}
#[stable(feature = "std_debug", since = "1.16.0")]
impl<T: fmt::Debug> fmt::Debug for RwLockReadGuard<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("RwLockReadGuard").field("lock", &self.lock).finish()
+ (**self).fmt(f)
}
}
#[stable(feature = "std_debug", since = "1.16.0")]
impl<T: fmt::Debug> fmt::Debug for RwLockWriteGuard<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("RwLockWriteGuard").field("lock", &self.lock).finish()
+ (**self).fmt(f)
}
}
use crate::os::unix::ffi::OsStrExt;
use crate::path::Path;
use crate::sys::cvt;
-use crate::{ascii, fmt, io, mem};
+use crate::{ascii, fmt, io, iter, mem};
// FIXME(#43348): Make libc adapt #[doc(cfg(...))] so we don't need these fake definitions here?
#[cfg(not(unix))]
&"path must be shorter than SUN_LEN",
));
}
- for (dst, src) in addr.sun_path.iter_mut().zip(bytes.iter()) {
+ for (dst, src) in iter::zip(&mut addr.sun_path, bytes) {
*dst = *src as libc::c_char;
}
// null byte for pathname addresses is already there because we zeroed the
use crate::mem::{size_of, zeroed};
use crate::os::unix::io::RawFd;
use crate::path::Path;
-#[cfg(target_os = "android")]
-use crate::ptr::eq;
-use crate::ptr::read_unaligned;
+use crate::ptr::{eq, read_unaligned};
use crate::slice::from_raw_parts;
use crate::sys::net::Socket;
) -> io::Result<(usize, bool, io::Result<SocketAddr>)> {
unsafe {
let mut msg_name: libc::sockaddr_un = zeroed();
-
let mut msg: libc::msghdr = zeroed();
msg.msg_name = &mut msg_name as *mut _ as *mut _;
msg.msg_namelen = size_of::<libc::sockaddr_un>() as libc::socklen_t;
msg.msg_iov = bufs.as_mut_ptr().cast();
- msg.msg_control = ancillary.buffer.as_mut_ptr().cast();
cfg_if::cfg_if! {
if #[cfg(any(target_os = "android", all(target_os = "linux", target_env = "gnu")))] {
msg.msg_iovlen = bufs.len() as libc::size_t;
target_os = "emscripten",
target_os = "freebsd",
all(target_os = "linux", target_env = "musl",),
+ target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))] {
msg.msg_controllen = ancillary.buffer.len() as libc::socklen_t;
}
}
+ // macos requires that the control pointer is NULL when the len is 0.
+ if msg.msg_controllen > 0 {
+ msg.msg_control = ancillary.buffer.as_mut_ptr().cast();
+ }
let count = socket.recv_msg(&mut msg)?;
msg.msg_name = &mut msg_name as *mut _ as *mut _;
msg.msg_namelen = msg_namelen;
msg.msg_iov = bufs.as_ptr() as *mut _;
- msg.msg_control = ancillary.buffer.as_mut_ptr().cast();
cfg_if::cfg_if! {
if #[cfg(any(target_os = "android", all(target_os = "linux", target_env = "gnu")))] {
msg.msg_iovlen = bufs.len() as libc::size_t;
target_os = "emscripten",
target_os = "freebsd",
all(target_os = "linux", target_env = "musl",),
+ target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))] {
msg.msg_controllen = ancillary.length as libc::socklen_t;
}
}
+ // macos requires that the control pointer is NULL when the len is 0.
+ if msg.msg_controllen > 0 {
+ msg.msg_control = ancillary.buffer.as_mut_ptr().cast();
+ }
ancillary.truncated = false;
target_os = "emscripten",
target_os = "freebsd",
all(target_os = "linux", target_env = "musl",),
+ target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))] {
while !cmsg.is_null() {
previous_cmsg = cmsg;
cmsg = libc::CMSG_NXTHDR(&msg, cmsg);
- cfg_if::cfg_if! {
- // Android return the same pointer if it is the last cmsg.
- // Therefore, check it if the previous pointer is the same as the current one.
- if #[cfg(target_os = "android")] {
- if cmsg == previous_cmsg {
- break;
- }
- }
+
+ // Most operating systems, but not Linux or emscripten, return the previous pointer
+ // when its length is zero. Therefore, check if the previous pointer is the same as
+ // the current one.
+ if eq(cmsg, previous_cmsg) {
+ break;
}
}
target_os = "emscripten",
target_os = "freebsd",
all(target_os = "linux", target_env = "musl",),
+ target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))] {
target_os = "emscripten",
target_os = "freebsd",
all(target_os = "linux", target_env = "musl",),
+ target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))] {
target_os = "emscripten",
target_os = "freebsd",
all(target_os = "linux", target_env = "musl",),
+ target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))] {
};
let cmsg = cmsg.as_ref()?;
- cfg_if::cfg_if! {
- // Android return the same pointer if it is the last cmsg.
- // Therefore, check it if the previous pointer is the same as the current one.
- if #[cfg(target_os = "android")] {
- if let Some(current) = self.current {
- if eq(current, cmsg) {
- return None;
- }
- }
+
+ // Most operating systems, but not Linux or emscripten, return the previous pointer
+ // when its length is zero. Therefore, check if the previous pointer is the same as
+ // the current one.
+ if let Some(current) = self.current {
+ if eq(current, cmsg) {
+ return None;
}
}
self.buffer.len()
}
+ /// Returns `true` if the ancillary data is empty.
+ #[unstable(feature = "unix_socket_ancillary_data", issue = "76915")]
+ pub fn is_empty(&self) -> bool {
+ self.length == 0
+ }
+
/// Returns the number of used bytes.
#[unstable(feature = "unix_socket_ancillary_data", issue = "76915")]
pub fn len(&self) -> usize {
use crate::ffi::{CStr, CString, OsStr, OsString};
use crate::fmt;
-use crate::io::{self, Error, ErrorKind, IoSlice, IoSliceMut, SeekFrom};
+use crate::io::{self, Error, IoSlice, IoSliceMut, SeekFrom};
use crate::mem;
use crate::path::{Path, PathBuf};
use crate::ptr;
fn open_from(from: &Path) -> io::Result<(crate::fs::File, crate::fs::Metadata)> {
use crate::fs::File;
+ use crate::sys_common::fs::NOT_FILE_ERROR;
let reader = File::open(from)?;
let metadata = reader.metadata()?;
if !metadata.is_file() {
- return Err(Error::new_const(
- ErrorKind::InvalidInput,
- &"the source path is not an existing regular file",
- ));
+ return Err(NOT_FILE_ERROR);
}
Ok((reader, metadata))
}
impl fmt::Display for ExitStatus {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(code) = self.code() {
- write!(f, "exit code: {}", code)
+ write!(f, "exit status: {}", code)
} else if let Some(signal) = self.signal() {
if self.core_dumped() {
write!(f, "signal: {} (core dumped)", signal)
t(0x0000f, "signal: 15");
t(0x0008b, "signal: 11 (core dumped)");
- t(0x00000, "exit code: 0");
- t(0x0ff00, "exit code: 255");
+ t(0x00000, "exit status: 0");
+ t(0x0ff00, "exit status: 255");
// On MacOS, 0x0137f is WIFCONTINUED, not WIFSTOPPED. Probably *BSD is similar.
// https://github.com/rust-lang/rust/pull/82749#issuecomment-790525956
impl fmt::Debug for ReadDir {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("ReadDir").finish()
+ f.debug_struct("ReadDir").finish_non_exhaustive()
}
}
use crate::io::{self, Error, ErrorKind};
use crate::path::Path;
+pub(crate) const NOT_FILE_ERROR: Error = Error::new_const(
+ ErrorKind::InvalidInput,
+ &"the source path is neither a regular file nor a symlink to a regular file",
+);
+
pub fn copy(from: &Path, to: &Path) -> io::Result<u64> {
let mut reader = fs::File::open(from)?;
let metadata = reader.metadata()?;
if !metadata.is_file() {
- return Err(Error::new_const(
- ErrorKind::InvalidInput,
- &"the source path is not an existing regular file",
- ));
+ return Err(NOT_FILE_ERROR);
}
let mut writer = fs::File::create(to)?;
#[stable(feature = "rust1", since = "1.0.0")]
impl fmt::Debug for Thread {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("Thread").field("id", &self.id()).field("name", &self.name()).finish()
+ f.debug_struct("Thread")
+ .field("id", &self.id())
+ .field("name", &self.name())
+ .finish_non_exhaustive()
}
}
#[cfg(not(unix))]
pub fn get_exit_code(status: ExitStatus) -> Result<i32, String> {
- status.code().ok_or("received no exit code from child process".into())
+ status.code().ok_or_else(|| "received no exit code from child process".into())
}
#[cfg(unix)]
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let builder = run.builder;
- run.path("src/tools/linkchecker").default_condition(builder.config.docs)
+ let run = run.path("src/tools/linkchecker");
+ let hosts = &builder.hosts;
+ let targets = &builder.targets;
+
+ // if we have different hosts and targets, some things may be built for
+ // the host (e.g. rustc) and others for the target (e.g. std). The
+ // documentation built for each will contain broken links to
+ // docs built for the other platform (e.g. rustc linking to cargo)
+ if (hosts != targets) && !hosts.is_empty() && !targets.is_empty() {
+ panic!(
+ "Linkcheck currently does not support builds with different hosts and targets.
+You can skip linkcheck with --exclude src/tools/linkchecker"
+ );
+ }
+ run.default_condition(builder.config.docs)
}
fn make_run(run: RunConfig<'_>) {
cargo.add_rustc_lib_path(builder, compiler);
- if !try_run(builder, &mut cargo.into()) {
+ let mut cargo = Command::from(cargo);
+ if !try_run(builder, &mut cargo) {
+ return;
+ }
+
+ // # Run `cargo test` with `-Zmir-opt-level=4`.
+ cargo.env("MIRIFLAGS", "-O -Zmir-opt-level=4");
+ if !try_run(builder, &mut cargo) {
return;
}
RUN sh /scripts/sccache.sh
ENV RUST_CONFIGURE_ARGS --qemu-riscv64-rootfs=/tmp/rootfs
-ENV SCRIPT python3 ../x.py --stage 2 test --target riscv64gc-unknown-linux-gnu
+ENV SCRIPT python3 ../x.py --stage 2 test --host='' --target riscv64gc-unknown-linux-gnu
ENV NO_CHANGE_USER=1
# This is intended to make sure that both `--pass=check` continues to
# work.
#
- python2.7 ../x.py --stage 2 test src/test/ui --pass=check --target=i686-unknown-linux-gnu && \
+ python2.7 ../x.py --stage 2 test src/test/ui --pass=check \
+ --host='' --target=i686-unknown-linux-gnu && \
# Run tidy at the very end, after all the other tests.
python2.7 ../x.py --stage 2 test src/tools/tidy
system linker, and possibly other things.
[rustup]: https://github.com/rust-lang/rustup
-[rustup-cross]: https://github.com/rust-lang/rustup#cross-compilation
+[rustup-cross]: https://rust-lang.github.io/rustup/cross-compilation.html
trait_def_id, impl_def_id
);
let trait_ref = self.cx.tcx.impl_trait_ref(impl_def_id).unwrap();
- let may_apply = self.cx.tcx.infer_ctxt().enter(|infcx| {
- match trait_ref.self_ty().kind() {
- ty::Param(_) => {}
- _ => return false,
- }
-
+ let is_param = matches!(trait_ref.self_ty().kind(), ty::Param(_));
+ let may_apply = is_param && self.cx.tcx.infer_ctxt().enter(|infcx| {
let substs = infcx.fresh_substs_for_item(DUMMY_SP, item_def_id);
let ty = ty.subst(infcx.tcx, substs);
let param_env = param_env.subst(infcx.tcx, substs);
s
}
+#[derive(Debug)]
crate struct MarkdownLink {
pub kind: LinkType,
pub link: String,
self.scx.include_sources = match self.emit_source(&filename) {
Ok(()) => true,
Err(e) => {
- println!(
- "warning: source code was requested to be rendered, \
- but processing `{}` had an error: {}",
- filename, e
+ self.scx.tcx.sess.span_err(
+ item.span.inner(),
+ &format!("failed to render source code for `{}`: {}", filename, e),
);
- println!(" skipping rendering of source code");
false
}
};
}
let link = ori_link.link.replace("`", "");
+ let no_backticks_range = range_between_backticks(&ori_link);
let parts = link.split('#').collect::<Vec<_>>();
let (link, extra_fragment) = if parts.len() > 2 {
// A valid link can't have multiple #'s
};
// Parse and strip the disambiguator from the link, if present.
- let (mut path_str, disambiguator) = if let Ok((d, path)) = Disambiguator::from_str(&link) {
- (path.trim(), Some(d))
- } else {
- (link.trim(), None)
+ let (mut path_str, disambiguator) = match Disambiguator::from_str(&link) {
+ Ok(Some((d, path))) => (path.trim(), Some(d)),
+ Ok(None) => (link.trim(), None),
+ Err((err_msg, relative_range)) => {
+ let disambiguator_range = (no_backticks_range.start + relative_range.start)
+ ..(no_backticks_range.start + relative_range.end);
+ disambiguator_error(self.cx, &item, dox, disambiguator_range, &err_msg);
+ return None;
+ }
};
if path_str.contains(|ch: char| !(ch.is_alphanumeric() || ":_<>, !*&;".contains(ch))) {
}
}
+/// Get the section of a link between the backticks,
+/// or the whole link if there aren't any backticks.
+///
+/// For example:
+///
+/// ```text
+/// [`Foo`]
+/// ^^^
+/// ```
+fn range_between_backticks(ori_link: &MarkdownLink) -> Range<usize> {
+ let after_first_backtick_group = ori_link.link.bytes().position(|b| b != b'`').unwrap_or(0);
+ let before_second_backtick_group = ori_link
+ .link
+ .bytes()
+ .skip(after_first_backtick_group)
+ .position(|b| b == b'`')
+ .unwrap_or(ori_link.link.len());
+ (ori_link.range.start + after_first_backtick_group)
+ ..(ori_link.range.start + before_second_backtick_group)
+}
+
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
/// Disambiguators for a link.
enum Disambiguator {
}
}
- /// Given a link, parse and return `(disambiguator, path_str)`
- fn from_str(link: &str) -> Result<(Self, &str), ()> {
+ /// Given a link, parse and return `(disambiguator, path_str)`.
+ ///
+ /// This returns `Ok(Some(...))` if a disambiguator was found,
+ /// `Ok(None)` if no disambiguator was found, or `Err(...)`
+ /// if there was a problem with the disambiguator.
+ fn from_str(link: &str) -> Result<Option<(Self, &str)>, (String, Range<usize>)> {
use Disambiguator::{Kind, Namespace as NS, Primitive};
- let find_suffix = || {
- let suffixes = [
- ("!()", DefKind::Macro(MacroKind::Bang)),
- ("()", DefKind::Fn),
- ("!", DefKind::Macro(MacroKind::Bang)),
- ];
- for &(suffix, kind) in &suffixes {
- if let Some(link) = link.strip_suffix(suffix) {
- // Avoid turning `!` or `()` into an empty string
- if !link.is_empty() {
- return Ok((Kind(kind), link));
- }
- }
- }
- Err(())
- };
-
if let Some(idx) = link.find('@') {
let (prefix, rest) = link.split_at(idx);
let d = match prefix {
"value" => NS(Namespace::ValueNS),
"macro" => NS(Namespace::MacroNS),
"prim" | "primitive" => Primitive,
- _ => return find_suffix(),
+ _ => return Err((format!("unknown disambiguator `{}`", prefix), 0..idx)),
};
- Ok((d, &rest[1..]))
+ Ok(Some((d, &rest[1..])))
} else {
- find_suffix()
+ let suffixes = [
+ ("!()", DefKind::Macro(MacroKind::Bang)),
+ ("()", DefKind::Fn),
+ ("!", DefKind::Macro(MacroKind::Bang)),
+ ];
+ for &(suffix, kind) in &suffixes {
+ if let Some(link) = link.strip_suffix(suffix) {
+ // Avoid turning `!` or `()` into an empty string
+ if !link.is_empty() {
+ return Ok(Some((Kind(kind), link)));
+ }
+ }
+ }
+ Ok(None)
}
}
});
}
+/// Report an error in the link disambiguator.
+fn disambiguator_error(
+ cx: &DocContext<'_>,
+ item: &Item,
+ dox: &str,
+ link_range: Range<usize>,
+ msg: &str,
+) {
+ report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, item, dox, &link_range, |_diag, _sp| {});
+}
+
/// Report an ambiguity error, where there were multiple possible resolutions.
fn ambiguity_error(
cx: &DocContext<'_>,
--- /dev/null
+// min-llvm-version: 11.0.0
+// compile-flags: -O
+// ignore-debug: the debug assertions get in the way
+#![crate_type = "lib"]
+
+// Make sure no bounds checks are emitted when slicing or indexing
+// with an index from `binary_search`.
+
+// CHECK-LABEL: @binary_search_index_no_bounds_check
+#[no_mangle]
+pub fn binary_search_index_no_bounds_check(s: &[u8]) -> u8 {
+ // CHECK-NOT: panic
+ // CHECK-NOT: slice_index_len_fail
+ if let Ok(idx) = s.binary_search(&b'\\') {
+ s[idx]
+ } else {
+ 42
+ }
+}
-include ../../run-make-fulldeps/tools.mk
+# ignore-none no-std is not supported
+# ignore-nvptx64-nvidia-cuda FIXME: can't find crate for `std`
-# FIXME https://github.com/rust-lang/rust/issues/78911
-# ignore-32bit wrong/no cross compiler and sometimes we pass wrong gcc args (-m64)
+include ../../run-make-fulldeps/tools.mk
# Tests that we don't ICE during incremental compilation after modifying a
# function span such that its previous end line exceeds the number of lines
mkdir $(SRC)
mkdir $(INCR)
cp a.rs $(SRC)/main.rs
- $(RUSTC) -C incremental=$(INCR) $(SRC)/main.rs
+ $(RUSTC) -C incremental=$(INCR) $(SRC)/main.rs --target $(TARGET)
cp b.rs $(SRC)/main.rs
- $(RUSTC) -C incremental=$(INCR) $(SRC)/main.rs
+ $(RUSTC) -C incremental=$(INCR) $(SRC)/main.rs --target $(TARGET)
-include ../../run-make-fulldeps/tools.mk
+# ignore-riscv64 $(call RUN,foo) expects to run the target executable natively
+# so it won't work with remote-test-server
+# ignore-arm Another build using remote-test-server
+# ignore-none no-std is not supported
+# ignore-wasm32 FIXME: don't attempt to compile C++ to WASM
+# ignore-wasm64 FIXME: don't attempt to compile C++ to WASM
+# ignore-nvptx64-nvidia-cuda FIXME: can't find crate for `std`
+# ignore-musl FIXME: this makefile needs teaching how to use a musl toolchain
+# (see dist-i586-gnu-i586-i686-musl Dockerfile)
-# FIXME https://github.com/rust-lang/rust/issues/78911
-# ignore-32bit wrong/no cross compiler and sometimes we pass wrong gcc args (-m64)
+include ../../run-make-fulldeps/tools.mk
all: foo
$(call RUN,foo)
foo: foo.rs $(call NATIVE_STATICLIB,foo)
- $(RUSTC) $< -lfoo $(EXTRARSCXXFLAGS)
+ $(RUSTC) $< -lfoo $(EXTRARSCXXFLAGS) --target $(TARGET)
$(TMPDIR)/libfoo.o: foo.cpp
$(call COMPILE_OBJ_CXX,$@,$<)
include ../../run-make-fulldeps/tools.mk
-# FIXME https://github.com/rust-lang/rust/issues/78911
-# ignore-32bit wrong/no cross compiler and sometimes we pass wrong gcc args (-m64)
+# ignore-none no-std is not supported
+# ignore-nvptx64-nvidia-cuda FIXME: can't find crate for 'std'
# Regression test for issue #83112
# The generated test harness code contains spans with a dummy location,
mkdir $(SRC)/mydir
mkdir $(INCR)
cp main.rs $(SRC)/main.rs
- $(RUSTC) --test -C incremental=$(INCR) $(SRC)/main.rs
+ $(RUSTC) --test -C incremental=$(INCR) $(SRC)/main.rs --target $(TARGET)
mv $(SRC)/main.rs $(SRC)/mydir/main.rs
- $(RUSTC) --test -C incremental=$(INCR) $(SRC)/mydir/main.rs
+ $(RUSTC) --test -C incremental=$(INCR) $(SRC)/mydir/main.rs --target $(TARGET)
--- /dev/null
+#![deny(warnings)]
+
+//! Linking to [foo@banana] and [`bar@banana!()`].
+//~^ ERROR unknown disambiguator `foo`
+//~| ERROR unknown disambiguator `bar`
+//! And to [no disambiguator](@nectarine) and [another](@apricot!()).
+//~^ ERROR unknown disambiguator ``
+//~| ERROR unknown disambiguator ``
+//! And with weird backticks: [``foo@hello``] [foo`@`hello].
+//~^ ERROR unknown disambiguator `foo`
+//~| ERROR unknown disambiguator `foo`
+
+fn main() {}
--- /dev/null
+error: unknown disambiguator `foo`
+ --> $DIR/unknown-disambiguator.rs:3:17
+ |
+LL | //! Linking to [foo@banana] and [`bar@banana!()`].
+ | ^^^
+ |
+note: the lint level is defined here
+ --> $DIR/unknown-disambiguator.rs:1:9
+ |
+LL | #![deny(warnings)]
+ | ^^^^^^^^
+ = note: `#[deny(rustdoc::broken_intra_doc_links)]` implied by `#[deny(warnings)]`
+
+error: unknown disambiguator `bar`
+ --> $DIR/unknown-disambiguator.rs:3:35
+ |
+LL | //! Linking to [foo@banana] and [`bar@banana!()`].
+ | ^^^
+
+error: unknown disambiguator `foo`
+ --> $DIR/unknown-disambiguator.rs:9:34
+ |
+LL | //! And with weird backticks: [``foo@hello``] [foo`@`hello].
+ | ^^^
+
+error: unknown disambiguator `foo`
+ --> $DIR/unknown-disambiguator.rs:9:48
+ |
+LL | //! And with weird backticks: [``foo@hello``] [foo`@`hello].
+ | ^^^
+
+error: unknown disambiguator ``
+ --> $DIR/unknown-disambiguator.rs:6:31
+ |
+LL | //! And to [no disambiguator](@nectarine) and [another](@apricot!()).
+ | ^
+
+error: unknown disambiguator ``
+ --> $DIR/unknown-disambiguator.rs:6:57
+ |
+LL | //! And to [no disambiguator](@nectarine) and [another](@apricot!()).
+ | ^
+
+error: aborting due to 6 previous errors
+
let u1 = U { a: I(0) };
let u2 = U { a: I(1) };
let p = P { a: &2, b: &3 };
- let _ = &p.b; //~ WARN E0133
+ let _ = &p.b; //~ WARN reference to packed field
//~^ WARN will become a hard error
let _ = u1.a; // #53114: should eventually signal error as well
let _ = &u2.a; //~ ERROR [E0133]
// variation on above with `_` in substructure
- let (_,) = (&p.b,); //~ WARN E0133
+ let (_,) = (&p.b,); //~ WARN reference to packed field
//~^ WARN will become a hard error
let (_,) = (u1.a,); //~ ERROR [E0133]
let (_,) = (&u2.a,); //~ ERROR [E0133]
let u1 = U { a: I(0) };
let u2 = U { a: I(1) };
let p = P { a: &2, b: &3 };
- match &p.b { _ => { } } //~ WARN E0133
+ match &p.b { _ => { } } //~ WARN reference to packed field
//~^ WARN will become a hard error
match u1.a { _ => { } } //~ ERROR [E0133]
match &u2.a { _ => { } } //~ ERROR [E0133]
// variation on above with `_` in substructure
- match (&p.b,) { (_,) => { } } //~ WARN E0133
+ match (&p.b,) { (_,) => { } } //~ WARN reference to packed field
//~^ WARN will become a hard error
match (u1.a,) { (_,) => { } } //~ ERROR [E0133]
match (&u2.a,) { (_,) => { } } //~ ERROR [E0133]
-warning: borrow of packed field is unsafe and requires unsafe function or block (error E0133)
+warning: reference to packed field is unaligned
--> $DIR/issue-53114-safety-checks.rs:23:13
|
LL | let _ = &p.b;
| ^^^^
|
- = note: `#[warn(safe_packed_borrows)]` on by default
+ = note: `#[warn(unaligned_references)]` on by default
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
- = note: fields of packed structs might be misaligned: dereferencing a misaligned pointer or even just creating a misaligned reference is undefined behavior
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
+ = note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
+
+warning: reference to packed field is unaligned
+ --> $DIR/issue-53114-safety-checks.rs:29:17
+ |
+LL | let (_,) = (&p.b,);
+ | ^^^^
+ |
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
+ = note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
+
+warning: reference to packed field is unaligned
+ --> $DIR/issue-53114-safety-checks.rs:39:11
+ |
+LL | match &p.b { _ => { } }
+ | ^^^^
+ |
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
+ = note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
+
+warning: reference to packed field is unaligned
+ --> $DIR/issue-53114-safety-checks.rs:45:12
+ |
+LL | match (&p.b,) { (_,) => { } }
+ | ^^^^
+ |
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
+ = note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
error[E0133]: access to union field is unsafe and requires unsafe function or block
--> $DIR/issue-53114-safety-checks.rs:26:13
|
= note: the field may not be properly initialized: using uninitialized data will cause undefined behavior
-warning: borrow of packed field is unsafe and requires unsafe function or block (error E0133)
- --> $DIR/issue-53114-safety-checks.rs:29:17
- |
-LL | let (_,) = (&p.b,);
- | ^^^^
- |
- = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
- = note: fields of packed structs might be misaligned: dereferencing a misaligned pointer or even just creating a misaligned reference is undefined behavior
-
error[E0133]: access to union field is unsafe and requires unsafe function or block
--> $DIR/issue-53114-safety-checks.rs:31:17
|
|
= note: the field may not be properly initialized: using uninitialized data will cause undefined behavior
-warning: borrow of packed field is unsafe and requires unsafe function or block (error E0133)
- --> $DIR/issue-53114-safety-checks.rs:39:11
- |
-LL | match &p.b { _ => { } }
- | ^^^^
- |
- = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
- = note: fields of packed structs might be misaligned: dereferencing a misaligned pointer or even just creating a misaligned reference is undefined behavior
-
error[E0133]: access to union field is unsafe and requires unsafe function or block
--> $DIR/issue-53114-safety-checks.rs:41:11
|
|
= note: the field may not be properly initialized: using uninitialized data will cause undefined behavior
-warning: borrow of packed field is unsafe and requires unsafe function or block (error E0133)
- --> $DIR/issue-53114-safety-checks.rs:45:12
- |
-LL | match (&p.b,) { (_,) => { } }
- | ^^^^
- |
- = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
- = note: fields of packed structs might be misaligned: dereferencing a misaligned pointer or even just creating a misaligned reference is undefined behavior
-
error[E0133]: access to union field is unsafe and requires unsafe function or block
--> $DIR/issue-53114-safety-checks.rs:47:12
|
let c = || {
println!("{}", foo.x);
- //~^ WARNING: borrow of packed field is unsafe and requires unsafe function or block
+ //~^ WARNING: reference to packed field is unaligned
//~| WARNING: this was previously accepted by the compiler but is being phased out
let _z = foo.x;
};
= note: `#[warn(incomplete_features)]` on by default
= note: see issue #53488 <https://github.com/rust-lang/rust/issues/53488> for more information
-warning: borrow of packed field is unsafe and requires unsafe function or block (error E0133)
+warning: reference to packed field is unaligned
--> $DIR/repr_packed.rs:25:24
|
LL | println!("{}", foo.x);
| ^^^^^
|
- = note: `#[warn(safe_packed_borrows)]` on by default
+ = note: `#[warn(unaligned_references)]` on by default
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
- = note: fields of packed structs might be misaligned: dereferencing a misaligned pointer or even just creating a misaligned reference is undefined behavior
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
+ = note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
warning: 2 warnings emitted
// run-pass
// ignore-windows - this is a unix-specific test
-// ignore-cloudabi
// ignore-emscripten
// ignore-sgx
// ignore-musl - returns dummy result for _SC_NGROUPS_MAX
#![allow(dead_code)]
struct NestedArrays<'a, const N: usize, A: 'a, const M: usize, T:'a =u32> {
- //[min]~^ ERROR type parameters must be declared prior to const parameters
- args: &'a [&'a [T; M]; N],
- specifier: A,
+ //[min]~^ ERROR type parameters must be declared prior to const parameters
+ args: &'a [&'a [T; M]; N],
+ specifier: A,
}
fn main() {
- let array = [1, 2, 3];
- let nest = [&array];
- let _ = NestedArrays {
- args: &nest,
- specifier: true,
- };
+ let array = [1, 2, 3];
+ let nest = [&array];
+ let _ = NestedArrays {
+ args: &nest,
+ specifier: true,
+ };
}
#[stable(feature = "const_default_unstable", since="none")]
pub struct ConstDefaultStable<const N: usize = {
- #[stable(feature = "const_default_unstable_val", since="none")]
- 3
+ #[stable(feature = "const_default_unstable_val", since="none")]
+ 3
}>;
fn main() {}
pub struct Example4<const N: usize=13, const M: usize=4>;
fn main() {
- let e: Example::<13> = ();
- //~^ Error: mismatched types
- let e: Example2::<u32, 13> = ();
- //~^ Error: mismatched types
- let e: Example3::<13, u32> = ();
- //~^ Error: mismatched types
- let e: Example3::<7> = ();
- //~^ Error: mismatched types
- // FIXME(const_generics_defaults): There should be a note for the error below, but it is
- // missing.
- let e: Example4::<7> = ();
- //~^ Error: mismatched types
+ let e: Example::<13> = ();
+ //~^ Error: mismatched types
+ let e: Example2::<u32, 13> = ();
+ //~^ Error: mismatched types
+ let e: Example3::<13, u32> = ();
+ //~^ Error: mismatched types
+ let e: Example3::<7> = ();
+ //~^ Error: mismatched types
+ // FIXME(const_generics_defaults): There should be a note for the error below, but it is
+ // missing.
+ let e: Example4::<7> = ();
+ //~^ Error: mismatched types
}
error[E0308]: mismatched types
- --> $DIR/mismatch.rs:11:26
+ --> $DIR/mismatch.rs:11:28
|
-LL | let e: Example::<13> = ();
- | ------------- ^^ expected struct `Example`, found `()`
- | |
- | expected due to this
+LL | let e: Example::<13> = ();
+ | ------------- ^^ expected struct `Example`, found `()`
+ | |
+ | expected due to this
error[E0308]: mismatched types
- --> $DIR/mismatch.rs:13:32
+ --> $DIR/mismatch.rs:13:34
|
-LL | let e: Example2::<u32, 13> = ();
- | ------------------- ^^ expected struct `Example2`, found `()`
- | |
- | expected due to this
+LL | let e: Example2::<u32, 13> = ();
+ | ------------------- ^^ expected struct `Example2`, found `()`
+ | |
+ | expected due to this
|
= note: expected struct `Example2`
found unit type `()`
error[E0308]: mismatched types
- --> $DIR/mismatch.rs:15:32
+ --> $DIR/mismatch.rs:15:34
|
-LL | let e: Example3::<13, u32> = ();
- | ------------------- ^^ expected struct `Example3`, found `()`
- | |
- | expected due to this
+LL | let e: Example3::<13, u32> = ();
+ | ------------------- ^^ expected struct `Example3`, found `()`
+ | |
+ | expected due to this
|
= note: expected struct `Example3`
found unit type `()`
error[E0308]: mismatched types
- --> $DIR/mismatch.rs:17:26
+ --> $DIR/mismatch.rs:17:28
|
-LL | let e: Example3::<7> = ();
- | ------------- ^^ expected struct `Example3`, found `()`
- | |
- | expected due to this
+LL | let e: Example3::<7> = ();
+ | ------------- ^^ expected struct `Example3`, found `()`
+ | |
+ | expected due to this
|
= note: expected struct `Example3<7_usize>`
found unit type `()`
error[E0308]: mismatched types
- --> $DIR/mismatch.rs:21:26
+ --> $DIR/mismatch.rs:21:28
|
-LL | let e: Example4::<7> = ();
- | ------------- ^^ expected struct `Example4`, found `()`
- | |
- | expected due to this
+LL | let e: Example4::<7> = ();
+ | ------------- ^^ expected struct `Example4`, found `()`
+ | |
+ | expected due to this
error: aborting due to 5 previous errors
//[min]~^ ERROR type parameters must be declared prior
fn main() {
- let _: A<3> = A(0);
+ let _: A<3> = A(0);
}
--- /dev/null
+// Regression test for #82792.
+
+// run-pass
+
+#![feature(const_generics_defaults)]
+#![allow(incomplete_features)]
+
+#[repr(C)]
+pub struct Loaf<T: Sized, const N: usize = 1usize> {
+ head: [T; N],
+ slice: [T],
+}
+
+fn main() {}
#![allow(dead_code)]
struct FixedOutput<'a, const N: usize, T=u32> {
- //[min]~^ ERROR type parameters must be declared prior to const parameters
- out: &'a [T; N],
+ //[min]~^ ERROR type parameters must be declared prior to const parameters
+ out: &'a [T; N],
}
trait FixedOutputter {
- fn out(&self) -> FixedOutput<'_, 10>;
+ fn out(&self) -> FixedOutput<'_, 10>;
}
fn main() {}
--- /dev/null
+warning: the feature `const_generics` is incomplete and may not be safe to use and/or cause compiler crashes
+ --> $DIR/issue-56445-1.rs:3:27
+ |
+LL | #![cfg_attr(full, feature(const_generics))]
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `#[warn(incomplete_features)]` on by default
+ = note: see issue #44580 <https://github.com/rust-lang/rust/issues/44580> for more information
+
+error[E0771]: use of non-static lifetime `'a` in const generic
+ --> $DIR/issue-56445-1.rs:8:26
+ |
+LL | struct Bug<'a, const S: &'a str>(PhantomData<&'a ()>);
+ | ^^
+ |
+ = note: for more information, see issue #74052 <https://github.com/rust-lang/rust/issues/74052>
+
+error: aborting due to previous error; 1 warning emitted
+
+For more information about this error, try `rustc --explain E0771`.
--- /dev/null
+error[E0771]: use of non-static lifetime `'a` in const generic
+ --> $DIR/issue-56445-1.rs:8:26
+ |
+LL | struct Bug<'a, const S: &'a str>(PhantomData<&'a ()>);
+ | ^^
+ |
+ = note: for more information, see issue #74052 <https://github.com/rust-lang/rust/issues/74052>
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0771`.
--- /dev/null
+// Regression test for https://github.com/rust-lang/rust/issues/56445#issuecomment-518402995.
+// revisions: full min
+#![cfg_attr(full, feature(const_generics))] //[full]~WARN the feature `const_generics` is incomplete
+#![crate_type = "lib"]
+
+use std::marker::PhantomData;
+
+struct Bug<'a, const S: &'a str>(PhantomData<&'a ()>);
+//~^ ERROR: use of non-static lifetime `'a` in const generic
+
+impl Bug<'_, ""> {}
--- /dev/null
+// Regression test for https://github.com/rust-lang/rust/issues/56445#issuecomment-502095133
+struct OnDiskDirEntry<'a> { _s: &'a usize }
+
+impl<'a> OnDiskDirEntry<'a> {
+ const LFN_FRAGMENT_LEN: usize = 2;
+
+ fn lfn_contents(&self) -> [char; Self::LFN_FRAGMENT_LEN] { loop { } }
+ //~^ ERROR: generic `Self` types are currently not permitted in anonymous constants
+}
+
+fn main() {}
--- /dev/null
+error: generic `Self` types are currently not permitted in anonymous constants
+ --> $DIR/issue-56445-2.rs:7:38
+ |
+LL | fn lfn_contents(&self) -> [char; Self::LFN_FRAGMENT_LEN] { loop { } }
+ | ^^^^
+ |
+note: not a concrete type
+ --> $DIR/issue-56445-2.rs:4:10
+ |
+LL | impl<'a> OnDiskDirEntry<'a> {
+ | ^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
--- /dev/null
+// Regression test for https://github.com/rust-lang/rust/issues/56445#issuecomment-524494170
+pub struct Memory<'rom> {
+ rom: &'rom [u8],
+ ram: [u8; Self::SIZE],
+ //~^ ERROR: generic `Self` types are currently not permitted in anonymous constants
+}
+
+impl<'rom> Memory<'rom> {
+ pub const SIZE: usize = 0x8000;
+}
+
+fn main() {}
--- /dev/null
+error: generic `Self` types are currently not permitted in anonymous constants
+ --> $DIR/issue-56445-3.rs:4:15
+ |
+LL | ram: [u8; Self::SIZE],
+ | ^^^^
+
+error: aborting due to previous error
+
+++ /dev/null
-warning: the feature `const_generics` is incomplete and may not be safe to use and/or cause compiler crashes
- --> $DIR/issue-56445.rs:3:27
- |
-LL | #![cfg_attr(full, feature(const_generics))]
- | ^^^^^^^^^^^^^^
- |
- = note: `#[warn(incomplete_features)]` on by default
- = note: see issue #44580 <https://github.com/rust-lang/rust/issues/44580> for more information
-
-error[E0771]: use of non-static lifetime `'a` in const generic
- --> $DIR/issue-56445.rs:8:26
- |
-LL | struct Bug<'a, const S: &'a str>(PhantomData<&'a ()>);
- | ^^
- |
- = note: for more information, see issue #74052 <https://github.com/rust-lang/rust/issues/74052>
-
-error: aborting due to previous error; 1 warning emitted
-
-For more information about this error, try `rustc --explain E0771`.
+++ /dev/null
-error[E0771]: use of non-static lifetime `'a` in const generic
- --> $DIR/issue-56445.rs:8:26
- |
-LL | struct Bug<'a, const S: &'a str>(PhantomData<&'a ()>);
- | ^^
- |
- = note: for more information, see issue #74052 <https://github.com/rust-lang/rust/issues/74052>
-
-error: aborting due to previous error
-
-For more information about this error, try `rustc --explain E0771`.
+++ /dev/null
-// Regression test for https://github.com/rust-lang/rust/issues/56445#issuecomment-518402995.
-// revisions: full min
-#![cfg_attr(full, feature(const_generics))] //[full]~WARN the feature `const_generics` is incomplete
-#![crate_type = "lib"]
-
-use std::marker::PhantomData;
-
-struct Bug<'a, const S: &'a str>(PhantomData<&'a ()>);
-//~^ ERROR: use of non-static lifetime `'a` in const generic
-
-impl Bug<'_, ""> {}
-// run-pass
#![feature(const_generics)]
#![allow(incomplete_features, unused_braces)]
fn main() {
Foo::foo();
+ //~^ ERROR the function or associated item
}
--- /dev/null
+error[E0599]: the function or associated item `foo` exists for struct `Foo<{_: usize}>`, but its trait bounds were not satisfied
+ --> $DIR/issue-69654-run-pass.rs:16:10
+ |
+LL | struct Foo<const N: usize> {}
+ | -------------------------- function or associated item `foo` not found for this
+...
+LL | Foo::foo();
+ | ^^^ function or associated item cannot be called on `Foo<{_: usize}>` due to unsatisfied trait bounds
+ |
+ = note: the following trait bounds were not satisfied:
+ `[u8; _]: Bar<[(); _]>`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0599`.
fn main() {
Foo::foo();
+ //~^ ERROR the function or associated item
}
LL | impl<T> Bar<T> for [u8; T] {}
| ^ not a value
-error: aborting due to previous error
+error[E0599]: the function or associated item `foo` exists for struct `Foo<{_: usize}>`, but its trait bounds were not satisfied
+ --> $DIR/issue-69654.rs:17:10
+ |
+LL | struct Foo<const N: usize> {}
+ | -------------------------- function or associated item `foo` not found for this
+...
+LL | Foo::foo();
+ | ^^^ function or associated item cannot be called on `Foo<{_: usize}>` due to unsatisfied trait bounds
+ |
+ = note: the following trait bounds were not satisfied:
+ `[u8; _]: Bar<[(); _]>`
+
+error: aborting due to 2 previous errors
-For more information about this error, try `rustc --explain E0423`.
+Some errors have detailed explanations: E0423, E0599.
+For more information about an error, try `rustc --explain E0423`.
-// build-pass
#![feature(const_generics)]
#![allow(incomplete_features)]
A<N>: Bar<N>;
fn main() {
- let _ = A;
+ let _ = A; //~ERROR the trait bound
}
--- /dev/null
+error[E0277]: the trait bound `A<{_: usize}>: Bar<{_: usize}>` is not satisfied
+ --> $DIR/unused-substs-1.rs:12:13
+ |
+LL | / struct A<const N: usize>
+LL | | where
+LL | | A<N>: Bar<N>;
+ | |_________________- required by `A`
+...
+LL | let _ = A;
+ | ^ the trait `Bar<{_: usize}>` is not implemented for `A<{_: usize}>`
+ |
+ = help: the following implementations were found:
+ <A<7_usize> as Bar<N>>
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0277`.
-// check-pass
#![feature(const_generics)]
#![allow(incomplete_features)]
// `t` is `ty::Infer(TyVar(_#1t))`
// `foo` contains `ty::Infer(TyVar(_#1t))` in its substs
t = foo;
+ //~^ ERROR mismatched types
+ //~| NOTE cyclic type
}
--- /dev/null
+error[E0308]: mismatched types
+ --> $DIR/unused-substs-2.rs:25:9
+ |
+LL | t = foo;
+ | ^^^ cyclic type of infinite size
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0308`.
-// check-pass
#![feature(const_generics)]
#![allow(incomplete_features)]
// `t` is `ty::Infer(TyVar(_#1t))`
// `foo` contains `ty::Infer(TyVar(_#1t))` in its substs
t = foo;
+ //~^ ERROR mismatched types
+ //~| NOTE cyclic type
}
--- /dev/null
+error[E0308]: mismatched types
+ --> $DIR/unused-substs-3.rs:16:9
+ |
+LL | t = foo;
+ | ^^^
+ | |
+ | cyclic type of infinite size
+ | help: try using a conversion method: `foo.to_vec()`
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0308`.
-// build-pass
#![feature(const_generics)]
#![allow(incomplete_features)]
fn main() {
let mut arr = Default::default();
- arr = bind(arr);
+ arr = bind(arr); //~ ERROR mismatched type
}
--- /dev/null
+error[E0308]: mismatched types
+ --> $DIR/unused-substs-4.rs:10:11
+ |
+LL | arr = bind(arr);
+ | ^^^^^^^^^ encountered a self-referencing constant
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0308`.
-#![deny(safe_packed_borrows)]
+#![deny(unaligned_references)]
// check that derive on a packed struct with non-Copy fields
// correctly. This can't be made to work perfectly because
note: the lint level is defined here
--> $DIR/deriving-with-repr-packed.rs:1:9
|
-LL | #![deny(safe_packed_borrows)]
- | ^^^^^^^^^^^^^^^^^^^
+LL | #![deny(unaligned_references)]
+ | ^^^^^^^^^^^^^^^^^^^^
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
= note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
error: `#[derive]` can't be used on a `#[repr(packed)]` struct with type or const parameters (error E0133)
| ^^^^^^^^^
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
= note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
error: `#[derive]` can't be used on a `#[repr(packed)]` struct that does not derive Copy (error E0133)
| ^^^^^^^^^
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
= note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
error: `#[derive]` can't be used on a `#[repr(packed)]` struct that does not derive Copy (error E0133)
| ^^^^^^^^^
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
= note: this error originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 4 previous errors
}
// kill this test when that turns to a hard error
-#[allow(safe_packed_borrows)]
+#[allow(unaligned_references)]
fn main() {
let good = Good { data: &0, data2: [&0, &0], aligned: [0; 32] };
- unsafe {
- let _ = &good.data; // ok
- let _ = &good.data2[0]; // ok
- }
+ let _ = &good.data; // ok
+ let _ = &good.data2[0]; // ok
let _ = &good.data;
let _ = &good.data2[0];
aligned: [u8; 32],
}
-#[deny(safe_packed_borrows)]
+#[deny(unaligned_references)]
fn main() {
let good = Good {
data: &0,
aligned: [0; 32]
};
- unsafe {
- let _ = &good.data; // ok
- let _ = &good.data2[0]; // ok
- }
+ let _ = &good.data; //~ ERROR reference to packed field
+ //~| hard error
+ let _ = &good.data2[0]; //~ ERROR reference to packed field
+ //~| hard error
- let _ = &good.data; //~ ERROR borrow of packed field is unsafe
+ let _ = &good.data; //~ ERROR reference to packed field
//~| hard error
- let _ = &good.data2[0]; //~ ERROR borrow of packed field is unsafe
+ let _ = &good.data2[0]; //~ ERROR reference to packed field
//~| hard error
let _ = &*good.data; // ok, behind a pointer
let _ = &good.aligned; // ok, has align 1
-error: borrow of packed field is unsafe and requires unsafe function or block (error E0133)
- --> $DIR/issue-27060.rs:21:13
+error: reference to packed field is unaligned
+ --> $DIR/issue-27060.rs:16:13
|
LL | let _ = &good.data;
| ^^^^^^^^^^
note: the lint level is defined here
--> $DIR/issue-27060.rs:8:8
|
-LL | #[deny(safe_packed_borrows)]
- | ^^^^^^^^^^^^^^^^^^^
+LL | #[deny(unaligned_references)]
+ | ^^^^^^^^^^^^^^^^^^^^
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
+ = note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
+
+error: reference to packed field is unaligned
+ --> $DIR/issue-27060.rs:18:13
+ |
+LL | let _ = &good.data2[0];
+ | ^^^^^^^^^^^^^^
+ |
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
+ = note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
+
+error: reference to packed field is unaligned
+ --> $DIR/issue-27060.rs:21:13
+ |
+LL | let _ = &good.data;
+ | ^^^^^^^^^^
+ |
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
- = note: fields of packed structs might be misaligned: dereferencing a misaligned pointer or even just creating a misaligned reference is undefined behavior
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
+ = note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
-error: borrow of packed field is unsafe and requires unsafe function or block (error E0133)
+error: reference to packed field is unaligned
--> $DIR/issue-27060.rs:23:13
|
LL | let _ = &good.data2[0];
| ^^^^^^^^^^^^^^
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
- = note: fields of packed structs might be misaligned: dereferencing a misaligned pointer or even just creating a misaligned reference is undefined behavior
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
+ = note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
-error: aborting due to 2 previous errors
+error: aborting due to 4 previous errors
--> $DIR/array-of-ranges.rs:4:14
|
LL | for _ in [0..=1] {}
- | ^^^^^^^ borrow the array with `&` or call `.iter()` on it to iterate over it
+ | ^^^^^^^ if you meant to iterate between two values, remove the square brackets
|
= help: the trait `Iterator` is not implemented for `[RangeInclusive<{integer}>; 1]`
- = note: arrays are not iterators, but slices like the following are: `&[1, 2, 3]`
+ = note: `[start..=end]` is an array of one `RangeInclusive`; you might have meant to have a `RangeInclusive` without the brackets: `start..=end`
= note: required because of the requirements on the impl of `IntoIterator` for `[RangeInclusive<{integer}>; 1]`
= note: required by `into_iter`
--> $DIR/array-of-ranges.rs:6:14
|
LL | for _ in [0..] {}
- | ^^^^^ borrow the array with `&` or call `.iter()` on it to iterate over it
+ | ^^^^^ if you meant to iterate from a value onwards, remove the square brackets
|
= help: the trait `Iterator` is not implemented for `[RangeFrom<{integer}>; 1]`
- = note: arrays are not iterators, but slices like the following are: `&[1, 2, 3]`
+ = note: `[start..]` is an array of one `RangeFrom`; you might have meant to have a `RangeFrom` without the brackets: `start..`, keeping in mind that iterating over an unbounded iterator will run forever unless you `break` or `return` from within the loop
= note: required because of the requirements on the impl of `IntoIterator` for `[RangeFrom<{integer}>; 1]`
= note: required by `into_iter`
--> $DIR/array-of-ranges.rs:8:14
|
LL | for _ in [..1] {}
- | ^^^^^ borrow the array with `&` or call `.iter()` on it to iterate over it
+ | ^^^^^ if you meant to iterate until a value, remove the square brackets and add a starting value
|
= help: the trait `Iterator` is not implemented for `[RangeTo<{integer}>; 1]`
- = note: arrays are not iterators, but slices like the following are: `&[1, 2, 3]`
+ = note: `[..end]` is an array of one `RangeTo`; you might have meant to have a bounded `Range` without the brackets: `0..end`
= note: required because of the requirements on the impl of `IntoIterator` for `[RangeTo<{integer}>; 1]`
= note: required by `into_iter`
--> $DIR/array-of-ranges.rs:10:14
|
LL | for _ in [..=1] {}
- | ^^^^^^ borrow the array with `&` or call `.iter()` on it to iterate over it
+ | ^^^^^^ if you meant to iterate until a value (including it), remove the square brackets and add a starting value
|
= help: the trait `Iterator` is not implemented for `[RangeToInclusive<{integer}>; 1]`
- = note: arrays are not iterators, but slices like the following are: `&[1, 2, 3]`
+ = note: `[..=end]` is an array of one `RangeToInclusive`; you might have meant to have a bounded `RangeInclusive` without the brackets: `0..=end`
= note: required because of the requirements on the impl of `IntoIterator` for `[RangeToInclusive<{integer}>; 1]`
= note: required by `into_iter`
--> $DIR/array-of-ranges.rs:19:14
|
LL | for _ in [0..1, 2..3] {}
- | ^^^^^^^^^^^^ borrow the array with `&` or call `.iter()` on it to iterate over it
+ | ^^^^^^^^^^^^ arrays do not yet implement `IntoIterator`; try using `std::array::IntoIter::new(arr)`
|
= help: the trait `Iterator` is not implemented for `[std::ops::Range<{integer}>; 2]`
- = note: arrays are not iterators, but slices like the following are: `&[1, 2, 3]`
+ = note: see <https://github.com/rust-lang/rust/pull/65819> for more details
= note: required because of the requirements on the impl of `IntoIterator` for `[std::ops::Range<{integer}>; 2]`
= note: required by `into_iter`
--> $DIR/array-of-ranges.rs:21:14
|
LL | for _ in [0..=1] {}
- | ^^^^^^^ borrow the array with `&` or call `.iter()` on it to iterate over it
+ | ^^^^^^^ if you meant to iterate between two values, remove the square brackets
|
= help: the trait `Iterator` is not implemented for `[RangeInclusive<{integer}>; 1]`
- = note: arrays are not iterators, but slices like the following are: `&[1, 2, 3]`
+ = note: `[start..=end]` is an array of one `RangeInclusive`; you might have meant to have a `RangeInclusive` without the brackets: `start..=end`
= note: required because of the requirements on the impl of `IntoIterator` for `[RangeInclusive<{integer}>; 1]`
= note: required by `into_iter`
--> $DIR/array.rs:2:14
|
LL | for _ in [1, 2] {}
- | ^^^^^^ borrow the array with `&` or call `.iter()` on it to iterate over it
+ | ^^^^^^ arrays do not yet implement `IntoIterator`; try using `std::array::IntoIter::new(arr)`
|
= help: the trait `Iterator` is not implemented for `[{integer}; 2]`
- = note: arrays are not iterators, but slices like the following are: `&[1, 2, 3]`
+ = note: see <https://github.com/rust-lang/rust/pull/65819> for more details
= note: required because of the requirements on the impl of `IntoIterator` for `[{integer}; 2]`
= note: required by `into_iter`
--> $DIR/array.rs:5:14
|
LL | for _ in x {}
- | ^ borrow the array with `&` or call `.iter()` on it to iterate over it
+ | ^ arrays do not yet implement `IntoIterator`; try using `std::array::IntoIter::new(arr)`
|
= help: the trait `Iterator` is not implemented for `[{integer}; 2]`
- = note: arrays are not iterators, but slices like the following are: `&[1, 2, 3]`
+ = note: see <https://github.com/rust-lang/rust/pull/65819> for more details
= note: required because of the requirements on the impl of `IntoIterator` for `[{integer}; 2]`
= note: required by `into_iter`
--> $DIR/array.rs:7:14
|
LL | for _ in [1.0, 2.0] {}
- | ^^^^^^^^^^ borrow the array with `&` or call `.iter()` on it to iterate over it
+ | ^^^^^^^^^^ arrays do not yet implement `IntoIterator`; try using `std::array::IntoIter::new(arr)`
|
= help: the trait `Iterator` is not implemented for `[{float}; 2]`
- = note: arrays are not iterators, but slices like the following are: `&[1, 2, 3]`
+ = note: see <https://github.com/rust-lang/rust/pull/65819> for more details
= note: required because of the requirements on the impl of `IntoIterator` for `[{float}; 2]`
= note: required by `into_iter`
--> $DIR/ranges.rs:2:14
|
LL | for _ in ..10 {}
- | ^^^^ `RangeTo<{integer}>` is not an iterator
+ | ^^^^ if you meant to iterate until a value, add a starting value
|
= help: the trait `Iterator` is not implemented for `RangeTo<{integer}>`
+ = note: `..end` is a `RangeTo`, which cannot be iterated on; you might have meant to have a bounded `Range`: `0..end`
= note: required because of the requirements on the impl of `IntoIterator` for `RangeTo<{integer}>`
= note: required by `into_iter`
--> $DIR/ranges.rs:4:14
|
LL | for _ in ..=10 {}
- | ^^^^^ `RangeToInclusive<{integer}>` is not an iterator
+ | ^^^^^ if you meant to iterate until a value (including it), add a starting value
|
= help: the trait `Iterator` is not implemented for `RangeToInclusive<{integer}>`
+ = note: `..=end` is a `RangeToInclusive`, which cannot be iterated on; you might have meant to have a bounded `RangeInclusive`: `0..=end`
= note: required because of the requirements on the impl of `IntoIterator` for `RangeToInclusive<{integer}>`
= note: required by `into_iter`
--> $DIR/string.rs:2:14
|
LL | for _ in "".to_owned() {}
- | ^^^^^^^^^^^^^ `String` is not an iterator
+ | ^^^^^^^^^^^^^ `String` is not an iterator; try calling `.chars()` or `.bytes()`
|
= help: the trait `Iterator` is not implemented for `String`
= note: required because of the requirements on the impl of `IntoIterator` for `String`
aligned: [u8; 32],
}
+#[repr(packed(2))]
+pub struct Packed2 {
+ x: u32,
+ y: u16,
+ z: u8,
+}
+
fn main() {
unsafe {
let good = Good { data: 0, ptr: &0, data2: [0, 0], aligned: [0; 32] };
let _ = &good.ptr; //~ ERROR reference to packed field
+ //~^ previously accepted
let _ = &good.data; //~ ERROR reference to packed field
+ //~^ previously accepted
// Error even when turned into raw pointer immediately.
let _ = &good.data as *const _; //~ ERROR reference to packed field
+ //~^ previously accepted
let _: *const _ = &good.data; //~ ERROR reference to packed field
+ //~^ previously accepted
// Error on method call.
let _ = good.data.clone(); //~ ERROR reference to packed field
+ //~^ previously accepted
// Error for nested fields.
let _ = &good.data2[0]; //~ ERROR reference to packed field
+ //~^ previously accepted
let _ = &*good.ptr; // ok, behind a pointer
let _ = &good.aligned; // ok, has align 1
let _ = &good.aligned[2]; // ok, has align 1
}
+
+ unsafe {
+ let packed2 = Packed2 { x: 0, y: 0, z: 0 };
+ let _ = &packed2.x; //~ ERROR reference to packed field
+ //~^ previously accepted
+ let _ = &packed2.y; // ok, has align 2 in packed(2) struct
+ let _ = &packed2.z; // ok, has align 1
+ }
}
error: reference to packed field is unaligned
- --> $DIR/unaligned_references.rs:15:17
+ --> $DIR/unaligned_references.rs:22:17
|
LL | let _ = &good.ptr;
| ^^^^^^^^^
|
LL | #![deny(unaligned_references)]
| ^^^^^^^^^^^^^^^^^^^^
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
= note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
error: reference to packed field is unaligned
- --> $DIR/unaligned_references.rs:16:17
+ --> $DIR/unaligned_references.rs:24:17
|
LL | let _ = &good.data;
| ^^^^^^^^^^
|
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
= note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
error: reference to packed field is unaligned
- --> $DIR/unaligned_references.rs:18:17
+ --> $DIR/unaligned_references.rs:27:17
|
LL | let _ = &good.data as *const _;
| ^^^^^^^^^^
|
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
= note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
error: reference to packed field is unaligned
- --> $DIR/unaligned_references.rs:19:27
+ --> $DIR/unaligned_references.rs:29:27
|
LL | let _: *const _ = &good.data;
| ^^^^^^^^^^
|
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
= note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
error: reference to packed field is unaligned
- --> $DIR/unaligned_references.rs:21:17
+ --> $DIR/unaligned_references.rs:32:17
|
LL | let _ = good.data.clone();
| ^^^^^^^^^
|
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
= note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
error: reference to packed field is unaligned
- --> $DIR/unaligned_references.rs:23:17
+ --> $DIR/unaligned_references.rs:35:17
|
LL | let _ = &good.data2[0];
| ^^^^^^^^^^^^^^
|
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
= note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
-error: aborting due to 6 previous errors
+error: reference to packed field is unaligned
+ --> $DIR/unaligned_references.rs:45:17
+ |
+LL | let _ = &packed2.x;
+ | ^^^^^^^^^^
+ |
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
+ = note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
+
+error: aborting due to 7 previous errors
// aux-build:unaligned_references_external_crate.rs
-#![allow(safe_packed_borrows)]
-
extern crate unaligned_references_external_crate;
unaligned_references_external_crate::mac! { //~ERROR reference to packed field is unaligned
+ //~^ previously accepted
#[repr(packed)]
pub struct X {
pub field: u16
error: reference to packed field is unaligned
- --> $DIR/unaligned_references_external_macro.rs:7:1
+ --> $DIR/unaligned_references_external_macro.rs:5:1
|
LL | / unaligned_references_external_crate::mac! {
+LL | |
LL | | #[repr(packed)]
LL | | pub struct X {
LL | | pub field: u16
| |_^
|
note: the lint level is defined here
- --> $DIR/unaligned_references_external_macro.rs:7:1
+ --> $DIR/unaligned_references_external_macro.rs:5:1
|
LL | / unaligned_references_external_crate::mac! {
+LL | |
LL | | #[repr(packed)]
LL | | pub struct X {
LL | | pub field: u16
LL | | }
LL | | }
| |_^
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
= note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
--- /dev/null
+// check-fail
+
+fn main() {
+ println!("{}\
+"); //~^ ERROR: 1 positional argument in format string, but no arguments were given
+}
--- /dev/null
+error: 1 positional argument in format string, but no arguments were given
+ --> $DIR/issue-83344.rs:4:15
+ |
+LL | println!("{}\
+ | ^^
+
+error: aborting due to previous error
+
// run-pass
#![allow(dead_code)]
-#![deny(safe_packed_borrows)]
+#![deny(unaligned_references)]
#![feature(raw_ref_op)]
// ignore-emscripten weird assertion?
--- /dev/null
+// run-pass (note: this is spec-UB, but it works for now)
+// ignore-32bit (needs `usize` to be 8-aligned to reproduce all the errors below)
+#![allow(dead_code)]
+// ignore-emscripten weird assertion?
+
+#[repr(C, packed(4))]
+struct Foo4C {
+ bar: u8,
+ baz: usize
+}
+
+pub fn main() {
+ let foo = Foo4C { bar: 1, baz: 2 };
+ let brw = &foo.baz; //~WARN reference to packed field is unaligned
+ //~^ previously accepted
+ assert_eq!(*brw, 2);
+}
--- /dev/null
+warning: reference to packed field is unaligned
+ --> $DIR/packed-struct-borrow-element-64bit.rs:14:15
+ |
+LL | let brw = &foo.baz;
+ | ^^^^^^^^
+ |
+ = note: `#[warn(unaligned_references)]` on by default
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
+ = note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
+
+warning: 1 warning emitted
+
pub fn main() {
let foo = Foo1 { bar: 1, baz: 2 };
- let brw = unsafe { &foo.baz };
+ let brw = &foo.baz; //~WARN reference to packed field is unaligned
+ //~^ previously accepted
assert_eq!(*brw, 2);
let foo = Foo2 { bar: 1, baz: 2 };
- let brw = unsafe { &foo.baz };
- assert_eq!(*brw, 2);
-
- let foo = Foo4C { bar: 1, baz: 2 };
- let brw = unsafe { &foo.baz };
+ let brw = &foo.baz; //~WARN reference to packed field is unaligned
+ //~^ previously accepted
assert_eq!(*brw, 2);
}
--- /dev/null
+warning: reference to packed field is unaligned
+ --> $DIR/packed-struct-borrow-element.rs:25:15
+ |
+LL | let brw = &foo.baz;
+ | ^^^^^^^^
+ |
+ = note: `#[warn(unaligned_references)]` on by default
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
+ = note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
+
+warning: reference to packed field is unaligned
+ --> $DIR/packed-struct-borrow-element.rs:30:15
+ |
+LL | let brw = &foo.baz;
+ | ^^^^^^^^
+ |
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #82523 <https://github.com/rust-lang/rust/issues/82523>
+ = note: fields of packed structs are not properly aligned, and creating a misaligned reference is undefined behavior (even if that reference is never dereferenced)
+
+warning: 2 warnings emitted
+
#[macro_export]
macro_rules! outer_macro {
- ($name:ident) => {
+ ($name:ident, $attr_struct_name:ident) => {
#[macro_export]
macro_rules! inner_macro {
- ($wrapper:ident) => {
- $wrapper!($name)
+ ($bang_macro:ident, $attr_macro:ident) => {
+ $bang_macro!($name);
+ #[$attr_macro] struct $attr_struct_name {}
}
}
}
}
-outer_macro!(FirstStruct);
+outer_macro!(FirstStruct, FirstAttrStruct);
|
= note: `#[warn(proc_macro_derive_resolution_fallback)]` on by default
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #50504 <https://github.com/rust-lang/rust/issues/50504>
+ = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
+ = note: this warning originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
warning: cannot find type `OuterDerive` in this scope
--> $DIR/generate-mod.rs:16:10
| ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #50504 <https://github.com/rust-lang/rust/issues/50504>
+ = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
+ = note: this warning originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
warning: cannot find type `FromOutside` in this scope
--> $DIR/generate-mod.rs:23:14
| ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #50504 <https://github.com/rust-lang/rust/issues/50504>
+ = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
+ = note: this warning originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
warning: cannot find type `OuterDerive` in this scope
--> $DIR/generate-mod.rs:23:14
| ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #50504 <https://github.com/rust-lang/rust/issues/50504>
+ = note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
+ = note: this warning originates in a derive macro (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 4 previous errors; 4 warnings emitted
// run-pass
// aux-build:nested-macro-rules.rs
// aux-build:test-macros.rs
-// compile-flags: -Z span-debug
+// compile-flags: -Z span-debug -Z macro-backtrace
// edition:2018
#![no_std] // Don't load unnecessary hygiene information from std
extern crate nested_macro_rules;
extern crate test_macros;
-use test_macros::print_bang;
+use test_macros::{print_bang, print_attr};
use nested_macro_rules::FirstStruct;
struct SecondStruct;
fn main() {
- nested_macro_rules::inner_macro!(print_bang);
+ nested_macro_rules::inner_macro!(print_bang, print_attr);
- nested_macro_rules::outer_macro!(SecondStruct);
- inner_macro!(print_bang);
+ nested_macro_rules::outer_macro!(SecondStruct, SecondAttrStruct);
+ inner_macro!(print_bang, print_attr);
}
stream: TokenStream [
Ident {
ident: "FirstStruct",
- span: $DIR/auxiliary/nested-macro-rules.rs:15:14: 15:25 (#7),
+ span: $DIR/auxiliary/nested-macro-rules.rs:16:14: 16:25 (#7),
},
],
- span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#6),
+ span: $DIR/auxiliary/nested-macro-rules.rs:9:30: 9:35 (#6),
+ },
+]
+PRINT-ATTR INPUT (DISPLAY): struct FirstAttrStruct { }
+PRINT-ATTR INPUT (DEBUG): TokenStream [
+ Ident {
+ ident: "struct",
+ span: $DIR/auxiliary/nested-macro-rules.rs:10:32: 10:38 (#6),
+ },
+ Group {
+ delimiter: None,
+ stream: TokenStream [
+ Ident {
+ ident: "FirstAttrStruct",
+ span: $DIR/auxiliary/nested-macro-rules.rs:16:27: 16:42 (#7),
+ },
+ ],
+ span: $DIR/auxiliary/nested-macro-rules.rs:10:39: 10:56 (#6),
+ },
+ Group {
+ delimiter: Brace,
+ stream: TokenStream [],
+ span: $DIR/auxiliary/nested-macro-rules.rs:10:57: 10:59 (#6),
},
]
PRINT-BANG INPUT (DISPLAY): SecondStruct
stream: TokenStream [
Ident {
ident: "SecondStruct",
- span: $DIR/nested-macro-rules.rs:21:38: 21:50 (#13),
+ span: $DIR/nested-macro-rules.rs:21:38: 21:50 (#16),
},
],
- span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#12),
+ span: $DIR/auxiliary/nested-macro-rules.rs:9:30: 9:35 (#15),
+ },
+]
+PRINT-ATTR INPUT (DISPLAY): struct SecondAttrStruct { }
+PRINT-ATTR INPUT (DEBUG): TokenStream [
+ Ident {
+ ident: "struct",
+ span: $DIR/auxiliary/nested-macro-rules.rs:10:32: 10:38 (#15),
+ },
+ Group {
+ delimiter: None,
+ stream: TokenStream [
+ Ident {
+ ident: "SecondAttrStruct",
+ span: $DIR/nested-macro-rules.rs:21:52: 21:68 (#16),
+ },
+ ],
+ span: $DIR/auxiliary/nested-macro-rules.rs:10:39: 10:56 (#15),
+ },
+ Group {
+ delimiter: Brace,
+ stream: TokenStream [],
+ span: $DIR/auxiliary/nested-macro-rules.rs:10:57: 10:59 (#15),
},
]
LL | Pin::new(x)
| ^^^^^^^^ the trait `Unpin` is not implemented for `dyn Future<Output = i32> + Send`
|
+ = note: consider using `Box::pin`
= note: required by `Pin::<P>::new`
error[E0277]: `dyn Future<Output = i32> + Send` cannot be unpinned
LL | Pin::new(Box::new(x))
| ^^^^^^^^ the trait `Unpin` is not implemented for `dyn Future<Output = i32> + Send`
|
+ = note: consider using `Box::pin`
= note: required by `Pin::<P>::new`
error[E0308]: mismatched types
LL | foo(String::new());
| ^^^ the trait `From<String>` is not implemented for `&str`
|
+ = note: to coerce a `String` into a `&str`, use `&*` as a prefix
= note: required because of the requirements on the impl of `Into<&str>` for `String`
error: aborting due to previous error
--> $DIR/path-display.rs:5:20
|
LL | println!("{}", path);
- | ^^^^ `Path` cannot be formatted with the default formatter
+ | ^^^^ `Path` cannot be formatted with the default formatter; call `.display()` on it
|
= help: the trait `std::fmt::Display` is not implemented for `Path`
- = note: in format strings you may be able to use `{:?}` (or {:#?} for pretty-print) instead
+ = note: call `.display()` or `.to_string_lossy()` to safely print paths, as they may contain non-Unicode data
= note: required because of the requirements on the impl of `std::fmt::Display` for `&Path`
= note: required by `std::fmt::Display::fmt`
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
+++ /dev/null
-#![feature(unsafe_block_in_unsafe_fn)]
-
-#[repr(packed)]
-pub struct Packed {
- data: &'static u32,
-}
-
-const PACKED: Packed = Packed { data: &0 };
-
-#[allow(safe_packed_borrows)]
-#[allow(unsafe_op_in_unsafe_fn)]
-unsafe fn allow_allow() {
- &PACKED.data; // allowed
-}
-
-#[allow(safe_packed_borrows)]
-#[warn(unsafe_op_in_unsafe_fn)]
-unsafe fn allow_warn() {
- &PACKED.data; // allowed
-}
-
-#[allow(safe_packed_borrows)]
-#[deny(unsafe_op_in_unsafe_fn)]
-unsafe fn allow_deny() {
- &PACKED.data; // allowed
-}
-
-#[warn(safe_packed_borrows)]
-#[allow(unsafe_op_in_unsafe_fn)]
-unsafe fn warn_allow() {
- &PACKED.data; // allowed
-}
-
-#[warn(safe_packed_borrows)]
-#[warn(unsafe_op_in_unsafe_fn)]
-unsafe fn warn_warn() {
- &PACKED.data; //~ WARN
- //~| WARNING this was previously accepted by the compiler but is being phased out
-}
-
-#[warn(safe_packed_borrows)]
-#[deny(unsafe_op_in_unsafe_fn)]
-unsafe fn warn_deny() {
- &PACKED.data; //~ WARN
- //~| WARNING this was previously accepted by the compiler but is being phased out
-}
-
-#[deny(safe_packed_borrows)]
-#[allow(unsafe_op_in_unsafe_fn)]
-unsafe fn deny_allow() {
- &PACKED.data; // allowed
-}
-
-#[deny(safe_packed_borrows)]
-#[warn(unsafe_op_in_unsafe_fn)]
-unsafe fn deny_warn() {
- &PACKED.data; //~ WARN
-}
-
-#[deny(safe_packed_borrows)]
-#[deny(unsafe_op_in_unsafe_fn)]
-unsafe fn deny_deny() {
- &PACKED.data; //~ ERROR
- //~| WARNING this was previously accepted by the compiler but is being phased out
-}
-
-fn main() {}
+++ /dev/null
-warning: borrow of packed field is unsafe and requires unsafe block (error E0133)
- --> $DIR/rfc-2585-safe_packed_borrows-in-unsafe-fn.rs:37:5
- |
-LL | &PACKED.data;
- | ^^^^^^^^^^^^ borrow of packed field
- |
-note: the lint level is defined here
- --> $DIR/rfc-2585-safe_packed_borrows-in-unsafe-fn.rs:34:8
- |
-LL | #[warn(safe_packed_borrows)]
- | ^^^^^^^^^^^^^^^^^^^
- = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
- = note: fields of packed structs might be misaligned: dereferencing a misaligned pointer or even just creating a misaligned reference is undefined behavior
-
-warning: borrow of packed field is unsafe and requires unsafe block (error E0133)
- --> $DIR/rfc-2585-safe_packed_borrows-in-unsafe-fn.rs:44:5
- |
-LL | &PACKED.data;
- | ^^^^^^^^^^^^ borrow of packed field
- |
-note: the lint level is defined here
- --> $DIR/rfc-2585-safe_packed_borrows-in-unsafe-fn.rs:41:8
- |
-LL | #[warn(safe_packed_borrows)]
- | ^^^^^^^^^^^^^^^^^^^
- = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
- = note: fields of packed structs might be misaligned: dereferencing a misaligned pointer or even just creating a misaligned reference is undefined behavior
-
-warning: borrow of packed field is unsafe and requires unsafe block (error E0133)
- --> $DIR/rfc-2585-safe_packed_borrows-in-unsafe-fn.rs:57:5
- |
-LL | &PACKED.data;
- | ^^^^^^^^^^^^ borrow of packed field
- |
-note: the lint level is defined here
- --> $DIR/rfc-2585-safe_packed_borrows-in-unsafe-fn.rs:55:8
- |
-LL | #[warn(unsafe_op_in_unsafe_fn)]
- | ^^^^^^^^^^^^^^^^^^^^^^
- = note: fields of packed structs might be misaligned: dereferencing a misaligned pointer or even just creating a misaligned reference is undefined behavior
-
-error: borrow of packed field is unsafe and requires unsafe block (error E0133)
- --> $DIR/rfc-2585-safe_packed_borrows-in-unsafe-fn.rs:63:5
- |
-LL | &PACKED.data;
- | ^^^^^^^^^^^^ borrow of packed field
- |
-note: the lint level is defined here
- --> $DIR/rfc-2585-safe_packed_borrows-in-unsafe-fn.rs:60:8
- |
-LL | #[deny(safe_packed_borrows)]
- | ^^^^^^^^^^^^^^^^^^^
- = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
- = note: for more information, see issue #46043 <https://github.com/rust-lang/rust/issues/46043>
- = note: fields of packed structs might be misaligned: dereferencing a misaligned pointer or even just creating a misaligned reference is undefined behavior
-
-error: aborting due to previous error; 3 warnings emitted
-
ty::{self, FloatTy, IntTy, PolyFnSig, Ty},
};
use rustc_session::{declare_lint_pass, declare_tool_lint};
+use std::iter;
declare_clippy_lint! {
/// **What it does:** Checks for usage of unconstrained numeric literals which may cause default numeric fallback in type
match &expr.kind {
ExprKind::Call(func, args) => {
if let Some(fn_sig) = fn_sig_opt(self.cx, func.hir_id) {
- for (expr, bound) in args.iter().zip(fn_sig.skip_binder().inputs().iter()) {
+ for (expr, bound) in iter::zip(*args, fn_sig.skip_binder().inputs()) {
// Push found arg type, then visit arg.
self.ty_bounds.push(TyBound::Ty(bound));
self.visit_expr(expr);
ExprKind::MethodCall(_, _, args, _) => {
if let Some(def_id) = self.cx.typeck_results().type_dependent_def_id(expr.hir_id) {
let fn_sig = self.cx.tcx.fn_sig(def_id).skip_binder();
- for (expr, bound) in args.iter().zip(fn_sig.inputs().iter()) {
+ for (expr, bound) in iter::zip(*args, fn_sig.inputs()) {
self.ty_bounds.push(TyBound::Ty(bound));
self.visit_expr(expr);
self.ty_bounds.pop();
);
},
);
- } else if !attr.is_value_str() && is_must_use_ty(cx, return_ty(cx, item_id)) {
+ } else if !attr.value_str().is_some() && is_must_use_ty(cx, return_ty(cx, item_id)) {
span_lint_and_help(
cx,
DOUBLE_MUST_USE,
#![feature(box_syntax)]
#![feature(drain_filter)]
#![feature(in_band_lifetimes)]
+#![feature(iter_zip)]
#![feature(once_cell)]
#![cfg_attr(bootstrap, feature(or_patterns))]
#![feature(rustc_private)]
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_tool_lint, impl_lint_pass};
+use std::iter;
declare_clippy_lint! {
/// **What it does:** Warns if a long integral or floating-point constant does
let group_sizes: Vec<usize> = num_lit.integer.split('_').map(str::len).collect();
if UUID_GROUP_LENS.len() == group_sizes.len() {
- UUID_GROUP_LENS.iter().zip(&group_sizes).all(|(&a, &b)| a == b)
+ iter::zip(&UUID_GROUP_LENS, &group_sizes).all(|(&a, &b)| a == b)
} else {
false
}
use rustc_middle::middle::region;
use rustc_middle::ty::{self, Ty};
use rustc_span::symbol::{sym, Symbol};
-use std::iter::Iterator;
+use std::iter::{self, Iterator};
use std::mem;
/// Checks for looping over a range and then indexing a sequence with it.
},
ExprKind::MethodCall(_, _, args, _) => {
let def_id = self.cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap();
- for (ty, expr) in self.cx.tcx.fn_sig(def_id).inputs().skip_binder().iter().zip(args) {
+ for (ty, expr) in iter::zip(self.cx.tcx.fn_sig(def_id).inputs().skip_binder(), args) {
self.prefer_mutable = false;
if let ty::Ref(_, _, mutbl) = *ty.kind() {
if mutbl == Mutability::Mut {
use rustc_span::sym;
use std::cmp::Ordering;
use std::collections::hash_map::Entry;
+use std::iter;
use std::ops::Bound;
declare_clippy_lint! {
values.sort();
- for (a, b) in values.iter().zip(values.iter().skip(1)) {
+ for (a, b) in iter::zip(&values, &values[1..]) {
match (a, b) {
(&Kind::Start(_, ra), &Kind::End(_, rb)) => {
if ra.node != rb.node {
let has_doc = attrs
.iter()
- .any(|a| a.is_doc_comment() || a.doc_str().is_some() || a.is_value_str() || Self::has_include(a.meta()));
+ .any(|a| a.is_doc_comment() || a.doc_str().is_some() || a.value_str().is_some() || Self::has_include(a.meta()));
if !has_doc {
span_lint(
cx,
use rustc_middle::ty::{Adt, Array, RawPtr, Ref, Slice, Tuple, Ty, TypeAndMut};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
+use std::iter;
declare_clippy_lint! {
/// **What it does:** Checks for sets/maps with mutable key types.
fn check_sig<'tcx>(cx: &LateContext<'tcx>, item_hir_id: hir::HirId, decl: &hir::FnDecl<'_>) {
let fn_def_id = cx.tcx.hir().local_def_id(item_hir_id);
let fn_sig = cx.tcx.fn_sig(fn_def_id);
- for (hir_ty, ty) in decl.inputs.iter().zip(fn_sig.inputs().skip_binder().iter()) {
+ for (hir_ty, ty) in iter::zip(decl.inputs, fn_sig.inputs().skip_binder()) {
check_ty(cx, hir_ty.span, ty);
}
check_ty(cx, decl.output.span(), cx.tcx.erase_late_bound_regions(fn_sig.output()));
use rustc_middle::ty::subst::Subst;
use rustc_middle::ty::{self, Ty};
use rustc_session::{declare_lint_pass, declare_tool_lint};
+use std::iter;
declare_clippy_lint! {
/// **What it does:** Detects passing a mutable reference to a function that only
match type_definition.kind() {
ty::FnDef(..) | ty::FnPtr(_) => {
let parameters = type_definition.fn_sig(cx.tcx).skip_binder().inputs();
- for (argument, parameter) in arguments.iter().zip(parameters.iter()) {
+ for (argument, parameter) in iter::zip(arguments, parameters) {
match parameter.kind() {
ty::Ref(_, _, Mutability::Not)
| ty::RawPtr(ty::TypeAndMut {
use std::cmp;
+use std::iter;
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::is_self_ty;
let fn_body = cx.enclosing_body.map(|id| cx.tcx.hir().body(id));
- for (index, (input, &ty)) in decl.inputs.iter().zip(fn_sig.inputs()).enumerate() {
+ for (index, (input, &ty)) in iter::zip(decl.inputs, fn_sig.inputs()).enumerate() {
// All spans generated from a proc-macro invocation are the same...
match span {
Some(s) if s == input.span => return,
use rustc_middle::ty::{AdtDef, FieldDef, Ty, TyKind, VariantDef};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
+use std::iter;
declare_clippy_lint! {
/// **What it does:** Checks for patterns that aren't exact representations of the types
hir_id: HirId,
) {
if let Some(fn_sig) = cx.typeck_results().liberated_fn_sigs().get(hir_id) {
- for (param, ty) in body.params.iter().zip(fn_sig.inputs().iter()) {
+ for (param, ty) in iter::zip(body.params, fn_sig.inputs()) {
apply_lint(cx, ¶m.pat, ty, DerefPossible::Impossible);
}
}
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::sym;
use rustc_span::symbol::Ident;
+use std::iter;
declare_clippy_lint! {
/// **What it does:**
mirrored_exprs(cx, left_expr, a_ident, right_expr, b_ident)
},
// Two arrays with mirrored contents
- (ExprKind::Array(left_exprs), ExprKind::Array(right_exprs)) => left_exprs
- .iter()
- .zip(right_exprs.iter())
- .all(|(left, right)| mirrored_exprs(cx, left, a_ident, right, b_ident)),
+ (ExprKind::Array(left_exprs), ExprKind::Array(right_exprs)) => {
+ iter::zip(*left_exprs, *right_exprs)
+ .all(|(left, right)| mirrored_exprs(cx, left, a_ident, right, b_ident))
+ }
// The two exprs are function calls.
// Check to see that the function itself and its arguments are mirrored
(ExprKind::Call(left_expr, left_args), ExprKind::Call(right_expr, right_args)) => {
mirrored_exprs(cx, left_expr, a_ident, right_expr, b_ident)
- && left_args
- .iter()
- .zip(right_args.iter())
+ && iter::zip(*left_args, *right_args)
.all(|(left, right)| mirrored_exprs(cx, left, a_ident, right, b_ident))
},
// The two exprs are method calls.
ExprKind::MethodCall(right_segment, _, right_args, _),
) => {
left_segment.ident == right_segment.ident
- && left_args
- .iter()
- .zip(right_args.iter())
+ && iter::zip(*left_args, *right_args)
.all(|(left, right)| mirrored_exprs(cx, left, a_ident, right, b_ident))
- },
+ }
// Two tuples with mirrored contents
- (ExprKind::Tup(left_exprs), ExprKind::Tup(right_exprs)) => left_exprs
- .iter()
- .zip(right_exprs.iter())
- .all(|(left, right)| mirrored_exprs(cx, left, a_ident, right, b_ident)),
+ (ExprKind::Tup(left_exprs), ExprKind::Tup(right_exprs)) => {
+ iter::zip(*left_exprs, *right_exprs)
+ .all(|(left, right)| mirrored_exprs(cx, left, a_ident, right, b_ident))
+ }
// Two binary ops, which are the same operation and which have mirrored arguments
(ExprKind::Binary(left_op, left_left, left_right), ExprKind::Binary(right_op, right_left, right_right)) => {
left_op.node == right_op.node
},
)),
) => {
- (left_segments
- .iter()
- .zip(right_segments.iter())
+ (iter::zip(*left_segments, *right_segments)
.all(|(left, right)| left.ident == right.ident)
&& left_segments
.iter()
use std::cmp::Ordering::{self, Equal};
use std::convert::TryInto;
use std::hash::{Hash, Hasher};
+use std::iter;
/// A `LitKind`-like enum to fold constant `Expr`s into.
#[derive(Debug, Clone)]
(&Self::F64(l), &Self::F64(r)) => l.partial_cmp(&r),
(&Self::F32(l), &Self::F32(r)) => l.partial_cmp(&r),
(&Self::Bool(ref l), &Self::Bool(ref r)) => Some(l.cmp(r)),
- (&Self::Tuple(ref l), &Self::Tuple(ref r)) | (&Self::Vec(ref l), &Self::Vec(ref r)) => l
- .iter()
- .zip(r.iter())
- .map(|(li, ri)| Self::partial_cmp(tcx, cmp_type, li, ri))
- .find(|r| r.map_or(true, |o| o != Ordering::Equal))
- .unwrap_or_else(|| Some(l.len().cmp(&r.len()))),
+ (&Self::Tuple(ref l), &Self::Tuple(ref r)) | (&Self::Vec(ref l), &Self::Vec(ref r)) => {
+ iter::zip(l, r)
+ .map(|(li, ri)| Self::partial_cmp(tcx, cmp_type, li, ri))
+ .find(|r| r.map_or(true, |o| o != Ordering::Equal))
+ .unwrap_or_else(|| Some(l.len().cmp(&r.len())))
+ }
(&Self::Repeat(ref lv, ref ls), &Self::Repeat(ref rv, ref rs)) => {
match Self::partial_cmp(tcx, cmp_type, lv, rv) {
Some(Equal) => Some(ls.cmp(rs)),
#![feature(box_patterns)]
#![feature(in_band_lifetimes)]
+#![feature(iter_zip)]
#![cfg_attr(bootstrap, feature(or_patterns))]
#![feature(rustc_private)]
#![recursion_limit = "512"]
use rustc_ast::ast::{Lit, LitFloatType, LitIntType, LitKind};
+use std::iter;
#[derive(Debug, PartialEq, Copy, Clone)]
pub enum Radix {
}
}
- for (c, i) in digits.zip((0..group_size).cycle()) {
+ for (c, i) in iter::zip(digits, (0..group_size).cycle()) {
if i == 0 {
output.push('_');
}
|
help: try this
|
-LL | "some 1{} / {}", "2", "3",
- | ^ --
+LL | "some 1/
+LL | {} / {}", "2", "3",
+ |
error: literal with an empty format string
--> $DIR/write_literal_2.rs:25:14
-Subproject commit 12dac5c0f7acd106401aa14fec758f0ff552f678
+Subproject commit 2cdd1744b896e8129322229f253f95fd7ad491f1
-Subproject commit 858ad554374a8b1ad67692558a0878391abfdd86
+Subproject commit bb1d925dab36372c6bd1fb5671bb68ce938ff009