RUST_CONFIGURE_ARGS=--build=x86_64-apple-darwin
SRC=.
RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache
os: osx
osx_image: xcode8.2
install: &osx_install_sccache >
RUST_CONFIGURE_ARGS=--build=i686-apple-darwin
SRC=.
RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache
os: osx
osx_image: xcode8.2
install: *osx_install_sccache
SRC=.
DEPLOY=1
RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache
os: osx
osx_image: xcode8.2
install: >
SRC=.
DEPLOY=1
RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache
os: osx
osx_image: xcode8.2
install: *osx_install_sccache
SRC=.
DEPLOY_ALT=1
RUSTC_RETRY_LINKER_ON_SEGFAULT=1
+ SCCACHE_ERROR_LOG=/tmp/sccache.log
+ RUST_LOG=sccache
os: osx
osx_image: xcode8.2
install: *osx_install_sccache
df -h;
du . | sort -nr | head -n100
- cat obj/tmp/sccache.log
+ - cat /tmp/sccache.log
# Save tagged docker images we created and load them if they're available
before_cache:
- set PATH=%PATH%;%CD%\handle
- handle.exe -accepteula -help
+ # Attempt to debug sccache failures
+ - set RUST_LOG=sccache
+ - set SCCACHE_ERROR_LOG=%CD%/sccache.log
+
test_script:
- appveyor-retry sh -c 'git submodule deinit -f . && git submodule update --init'
- set SRC=.
- set NO_CCACHE=1
- sh src/ci/run.sh
+on_failure:
+ - cat %CD%/sccache.log
+
cache:
- "build/i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
- "build/x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger"
need_cmd file
need_cmd make
-msg "inspecting environment"
-
-CFG_OSTYPE=$(uname -s)
-CFG_CPUTYPE=$(uname -m)
-
-if [ $CFG_OSTYPE = Darwin -a $CFG_CPUTYPE = i386 ]
-then
- # Darwin's `uname -s` lies and always returns i386. We have to use sysctl
- # instead.
- if sysctl hw.optional.x86_64 | grep -q ': 1'
- then
- CFG_CPUTYPE=x86_64
- fi
-fi
-
-# The goal here is to come up with the same triple as LLVM would,
-# at least for the subset of platforms we're willing to target.
-
-case $CFG_OSTYPE in
-
- Linux)
- CFG_OSTYPE=unknown-linux-gnu
- ;;
-
- FreeBSD)
- CFG_OSTYPE=unknown-freebsd
- ;;
-
- DragonFly)
- CFG_OSTYPE=unknown-dragonfly
- ;;
-
- Bitrig)
- CFG_OSTYPE=unknown-bitrig
- ;;
-
- OpenBSD)
- CFG_OSTYPE=unknown-openbsd
- ;;
-
- NetBSD)
- CFG_OSTYPE=unknown-netbsd
- ;;
-
- Darwin)
- CFG_OSTYPE=apple-darwin
- ;;
-
- SunOS)
- CFG_OSTYPE=sun-solaris
- CFG_CPUTYPE=$(isainfo -n)
- ;;
-
- Haiku)
- CFG_OSTYPE=unknown-haiku
- ;;
-
- MINGW*)
- # msys' `uname` does not print gcc configuration, but prints msys
- # configuration. so we cannot believe `uname -m`:
- # msys1 is always i686 and msys2 is always x86_64.
- # instead, msys defines $MSYSTEM which is MINGW32 on i686 and
- # MINGW64 on x86_64.
- CFG_CPUTYPE=i686
- CFG_OSTYPE=pc-windows-gnu
- if [ "$MSYSTEM" = MINGW64 ]
- then
- CFG_CPUTYPE=x86_64
- fi
- ;;
-
- MSYS*)
- CFG_OSTYPE=pc-windows-gnu
- ;;
-
-# Thad's Cygwin identifiers below
-
-# Vista 32 bit
- CYGWIN_NT-6.0)
- CFG_OSTYPE=pc-windows-gnu
- CFG_CPUTYPE=i686
- ;;
-
-# Vista 64 bit
- CYGWIN_NT-6.0-WOW64)
- CFG_OSTYPE=pc-windows-gnu
- CFG_CPUTYPE=x86_64
- ;;
-
-# Win 7 32 bit
- CYGWIN_NT-6.1)
- CFG_OSTYPE=pc-windows-gnu
- CFG_CPUTYPE=i686
- ;;
-
-# Win 7 64 bit
- CYGWIN_NT-6.1-WOW64)
- CFG_OSTYPE=pc-windows-gnu
- CFG_CPUTYPE=x86_64
- ;;
-
-# Win 8 # uname -s on 64-bit cygwin does not contain WOW64, so simply use uname -m to detect arch (works in my install)
- CYGWIN_NT-6.3)
- CFG_OSTYPE=pc-windows-gnu
- ;;
-# We do not detect other OS such as XP/2003 using 64 bit using uname.
-# If we want to in the future, we will need to use Cygwin - Chuck's csih helper in /usr/lib/csih/winProductName.exe or alternative.
- *)
- err "unknown OS type: $CFG_OSTYPE"
- ;;
-esac
-
-
-case $CFG_CPUTYPE in
-
- i386 | i486 | i686 | i786 | x86)
- CFG_CPUTYPE=i686
- ;;
-
- xscale | arm)
- CFG_CPUTYPE=arm
- ;;
-
- armv6l)
- CFG_CPUTYPE=arm
- CFG_OSTYPE="${CFG_OSTYPE}eabihf"
- ;;
-
- armv7l | armv8l)
- CFG_CPUTYPE=armv7
- CFG_OSTYPE="${CFG_OSTYPE}eabihf"
- ;;
-
- aarch64 | arm64)
- CFG_CPUTYPE=aarch64
- ;;
-
- powerpc | ppc)
- CFG_CPUTYPE=powerpc
- ;;
-
- powerpc64 | ppc64)
- CFG_CPUTYPE=powerpc64
- ;;
-
- powerpc64le | ppc64le)
- CFG_CPUTYPE=powerpc64le
- ;;
-
- s390x)
- CFG_CPUTYPE=s390x
- ;;
-
- x86_64 | x86-64 | x64 | amd64)
- CFG_CPUTYPE=x86_64
- ;;
-
- mips | mips64)
- if [ "$CFG_CPUTYPE" = "mips64" ]; then
- CFG_OSTYPE="${CFG_OSTYPE}abi64"
- fi
- ENDIAN=$(printf '\1' | od -dAn)
- if [ "$ENDIAN" -eq 1 ]; then
- CFG_CPUTYPE="${CFG_CPUTYPE}el"
- elif [ "$ENDIAN" -ne 256 ]; then
- err "unknown endianness: $ENDIAN (expecting 1 for little or 256 for big)"
- fi
- ;;
-
- BePC)
- CFG_CPUTYPE=i686
- ;;
-
- *)
- err "unknown CPU type: $CFG_CPUTYPE"
-esac
-
-# Detect 64 bit linux systems with 32 bit userland and force 32 bit compilation
-if [ $CFG_OSTYPE = unknown-linux-gnu -a $CFG_CPUTYPE = x86_64 ]
-then
- # $SHELL does not exist in standard 'sh', so probably only exists
- # if configure is running in an interactive bash shell. /usr/bin/env
- # exists *everywhere*.
- BIN_TO_PROBE="$SHELL"
- if [ ! -r "$BIN_TO_PROBE" ]; then
- if [ -r "/usr/bin/env" ]; then
- BIN_TO_PROBE="/usr/bin/env"
- else
- warn "Cannot check if the userland is i686 or x86_64"
- fi
- fi
- file -L "$BIN_TO_PROBE" | grep -q "x86[_-]64"
- if [ $? != 0 ]; then
- msg "i686 userland on x86_64 Linux kernel"
- CFG_CPUTYPE=i686
- fi
-fi
-
-
-DEFAULT_BUILD="${CFG_CPUTYPE}-${CFG_OSTYPE}"
-
CFG_SRC_DIR="$(abs_path $(dirname $0))/"
CFG_SRC_DIR_RELATIVE="$(dirname $0)/"
CFG_BUILD_DIR="$(pwd)/"
valopt llvm-root "" "set LLVM root"
valopt python "" "set path to python"
valopt jemalloc-root "" "set directory where libjemalloc_pic.a is located"
-valopt build "${DEFAULT_BUILD}" "GNUs ./configure syntax LLVM build triple"
+valopt build "" "GNUs ./configure syntax LLVM build triple"
valopt android-cross-path "" "Android NDK standalone path (deprecated)"
valopt i686-linux-android-ndk "" "i686-linux-android NDK standalone path"
valopt arm-linux-androideabi-ndk "" "arm-linux-androideabi NDK standalone path"
err "Found $python_version, but Python 2.7 is required"
fi
-# If we have no git directory then we are probably a tarball distribution
-# and shouldn't attempt to load submodules
-if [ ! -e ${CFG_SRC_DIR}.git ]
-then
- probe CFG_GIT git
- msg "git: no git directory. disabling submodules"
- CFG_DISABLE_MANAGE_SUBMODULES=1
-else
- probe_need CFG_GIT git
-fi
-
-# Use `md5sum` on GNU platforms, or `md5 -q` on BSD
-probe CFG_MD5 md5
-probe CFG_MD5SUM md5sum
-if [ -n "$CFG_MD5" ]
-then
- CFG_HASH_COMMAND="$CFG_MD5 -q | cut -c 1-8"
-elif [ -n "$CFG_MD5SUM" ]
-then
- CFG_HASH_COMMAND="$CFG_MD5SUM | cut -c 1-8"
-else
- err 'could not find one of: md5 md5sum'
-fi
-putvar CFG_HASH_COMMAND
-
-probe CFG_CLANG clang++
-probe CFG_CCACHE ccache
-probe CFG_GCC gcc
-probe CFG_LD ld
-probe CFG_VALGRIND valgrind
-probe CFG_PERF perf
-probe CFG_ISCC iscc
-probe CFG_ANTLR4 antlr4
-probe CFG_GRUN grun
-probe CFG_FLEX flex
-probe CFG_BISON bison
-probe CFG_GDB gdb
-probe CFG_LLDB lldb
-
-if [ -n "$CFG_ENABLE_NINJA" ]
-then
- probe CFG_NINJA ninja
- if [ -z "$CFG_NINJA" ]
- then
- # On Debian and Fedora, the `ninja` binary is an IRC bot, so the build tool was
- # renamed. Handle this case.
- probe CFG_NINJA ninja-build
- fi
-fi
-
-# For building LLVM
-if [ -z "$CFG_LLVM_ROOT" ]
-then
- probe_need CFG_CMAKE cmake
-fi
-
-# On MacOS X, invoking `javac` pops up a dialog if the JDK is not
-# installed. Since `javac` is only used if `antlr4` is available,
-# probe for it only in this case.
-if [ -n "$CFG_ANTLR4" ]
-then
- CFG_ANTLR4_JAR="\"$(find /usr/ -name antlr-complete.jar 2>/dev/null | head -n 1)\""
- if [ "x" = "x$CFG_ANTLR4_JAR" ]
- then
- CFG_ANTLR4_JAR="\"$(find ~ -name antlr-complete.jar 2>/dev/null | head -n 1)\""
- fi
- putvar CFG_ANTLR4_JAR $CFG_ANTLR4_JAR
- probe CFG_JAVAC javac
-fi
-
# the valgrind rpass tests will fail if you don't have a valgrind, but they're
# only disabled if you opt out.
if [ -z "$CFG_VALGRIND" ]
fi
fi
-if [ -n "$CFG_LLDB" ]
-then
- # Store LLDB's version
- CFG_LLDB_VERSION=$($CFG_LLDB --version 2>/dev/null | head -1)
- putvar CFG_LLDB_VERSION
-
- # If CFG_LLDB_PYTHON_DIR is not already set from the outside and valid, try to read it from
- # LLDB via the -P commandline options.
- if [ -z "$CFG_LLDB_PYTHON_DIR" ] || [ ! -d "$CFG_LLDB_PYTHON_DIR" ]
- then
- CFG_LLDB_PYTHON_DIR=$($CFG_LLDB -P)
-
- # If CFG_LLDB_PYTHON_DIR is not a valid directory, set it to something more readable
- if [ ! -d "$CFG_LLDB_PYTHON_DIR" ]
- then
- CFG_LLDB_PYTHON_DIR="LLDB_PYTHON_DIRECTORY_NOT_FOUND"
- fi
-
- putvar CFG_LLDB_PYTHON_DIR
- fi
-fi
-
-# LLDB tests on OSX require /usr/bin/python, not something like Homebrew's
-# /usr/local/bin/python. We're loading a compiled module for LLDB tests which is
-# only compatible with the system.
-case $CFG_BUILD in
- *-apple-darwin)
- CFG_LLDB_PYTHON=/usr/bin/python
- ;;
- *)
- CFG_LLDB_PYTHON=$CFG_PYTHON
- ;;
-esac
-putvar CFG_LLDB_PYTHON
-
# Do some sanity checks if running on buildbot
# (these env vars are set by rust-buildbot)
if [ -n "$RUST_DIST_SERVER" -a -n "$ALLOW_NONZERO_RLIMIT_CORE" ]; then
fi
fi
-step_msg "looking for target specific programs"
-
-probe CFG_ADB adb
-
BIN_SUF=
if [ "$CFG_OSTYPE" = "pc-windows-gnu" ] || [ "$CFG_OSTYPE" = "pc-windows-msvc" ]
then
CFG_DOCDIR=${CFG_DOCDIR%/}
CFG_HOST="$(echo $CFG_HOST | tr ',' ' ')"
CFG_TARGET="$(echo $CFG_TARGET | tr ',' ' ')"
-CFG_SUPPORTED_TARGET=""
-for target_file in ${CFG_SRC_DIR}mk/cfg/*.mk; do
- CFG_SUPPORTED_TARGET="${CFG_SUPPORTED_TARGET} $(basename "$target_file" .mk)"
-done
# copy build-triples to host-triples so that builds are a subset of hosts
V_TEMP=""
putvar CFG_DOCDIR
putvar CFG_USING_LIBCPP
-# Avoid spurious warnings from clang by feeding it original source on
-# ccache-miss rather than preprocessed input.
-if [ -n "$CFG_ENABLE_CCACHE" ] && [ -n "$CFG_USING_CLANG" ]
-then
- CFG_CCACHE_CPP2=1
- putvar CFG_CCACHE_CPP2
-fi
-
-if [ -n "$CFG_ENABLE_CCACHE" ]
-then
- CFG_CCACHE_BASEDIR=${CFG_SRC_DIR}
- putvar CFG_CCACHE_BASEDIR
-fi
-
-
-putvar CFG_LLVM_SRC_DIR
-
-for t in $CFG_HOST
-do
- CFG_LLVM_BUILD_DIR=$(echo CFG_LLVM_BUILD_DIR_${t} | tr - _)
- CFG_LLVM_INST_DIR=$(echo CFG_LLVM_INST_DIR_${t} | tr - _)
- putvar $CFG_LLVM_BUILD_DIR
- putvar $CFG_LLVM_INST_DIR
-done
-
msg
copy_if_changed ${CFG_SRC_DIR}src/bootstrap/mk/Makefile.in ./Makefile
move_if_changed config.tmp config.mk
def get_mk(self, key):
for line in iter(self.config_mk.splitlines()):
- if line.startswith(key):
- return line[line.find(':=') + 2:].strip()
+ if line.startswith(key + ' '):
+ var = line[line.find(':=') + 2:].strip()
+ if var != '':
+ return var
return None
def cargo(self):
sys.exit(err)
elif ostype == 'Darwin':
ostype = 'apple-darwin'
+ elif ostype == 'Haiku':
+ ostype = 'unknown-haiku'
elif ostype.startswith('MINGW'):
# msys' `uname` does not print gcc configuration, but prints msys
# configuration. so we cannot believe `uname -m`:
cputype = 'i686'
elif cputype in {'xscale', 'arm'}:
cputype = 'arm'
- elif cputype in {'armv7l', 'armv8l'}:
+ elif cputype in {'armv6l', 'armv7l', 'armv8l'}:
cputype = 'arm'
ostype += 'eabihf'
+ elif cputype == 'armv7l':
+ cputype = 'armv7'
+ ostype += 'eabihf'
elif cputype == 'aarch64':
cputype = 'aarch64'
elif cputype == 'arm64':
raise ValueError('unknown byteorder: ' + sys.byteorder)
# only the n64 ABI is supported, indicate it
ostype += 'abi64'
- elif cputype in {'powerpc', 'ppc', 'ppc64'}:
+ elif cputype in {'powerpc', 'ppc'}:
cputype = 'powerpc'
+ elif cputype in {'powerpc64', 'ppc64'}:
+ cputype = 'powerpc64'
+ elif cputype in {'powerpc64le', 'ppc64le'}:
+ cputype = 'powerpc64le'
elif cputype == 'sparcv9':
pass
elif cputype in {'amd64', 'x86_64', 'x86-64', 'x64'}:
cputype = 'x86_64'
+ elif cputype == 's390x':
+ cputype = 's390x'
+ elif cputype == 'BePC':
+ cputype = 'i686'
else:
err = "unknown cpu type: " + cputype
if self.verbose:
}
match key {
- "CFG_BUILD" => self.build = value.to_string(),
- "CFG_HOST" => {
- self.host = value.split(" ").map(|s| s.to_string())
- .collect();
- }
- "CFG_TARGET" => {
- self.target = value.split(" ").map(|s| s.to_string())
- .collect();
+ "CFG_BUILD" if value.len() > 0 => self.build = value.to_string(),
+ "CFG_HOST" if value.len() > 0 => {
+ self.host.extend(value.split(" ").map(|s| s.to_string()));
+
+ }
+ "CFG_TARGET" if value.len() > 0 => {
+ self.target.extend(value.split(" ").map(|s| s.to_string()));
}
"CFG_MUSL_ROOT" if value.len() > 0 => {
self.musl_root = Some(parse_configure_path(value));
cmd.arg(distdir(build));
cmd.arg(today.trim());
cmd.arg(build.rust_package_vers());
- cmd.arg(build.cargo_info.version(build, &build.cargo_release_num()));
+ cmd.arg(build.package_vers(&build.cargo_release_num()));
cmd.arg(addr);
t!(fs::create_dir_all(distdir(build)));
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Display {
/// Formats the value using the given formatter.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::fmt;
+ ///
+ /// struct Position {
+ /// longitude: f32,
+ /// latitude: f32,
+ /// }
+ ///
+ /// impl fmt::Display for Position {
+ /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ /// write!(f, "({}, {})", self.longitude, self.latitude)
+ /// }
+ /// }
+ ///
+ /// assert_eq!("(1.987, 2.983)".to_owned(),
+ /// format!("{}", Position { longitude: 1.987, latitude: 2.983, }));
+ /// ```
#[stable(feature = "rust1", since = "1.0.0")]
fn fmt(&self, f: &mut Formatter) -> Result;
}
}
impl<'a> Formatter<'a> {
-
// First up is the collection of functions used to execute a format string
// at runtime. This consumes all of the compile-time statics generated by
// the format! syntax extension.
use super::dep_node::{DepNode, WorkProductId};
use super::query::DepGraphQuery;
use super::raii;
+use super::safe::DepGraphSafe;
use super::thread::{DepGraphThreadData, DepMessage};
#[derive(Clone)]
op()
}
- pub fn with_task<OP,R>(&self, key: DepNode<DefId>, op: OP) -> R
- where OP: FnOnce() -> R
+ /// Starts a new dep-graph task. Dep-graph tasks are specified
+ /// using a free function (`task`) and **not** a closure -- this
+ /// is intentional because we want to exercise tight control over
+ /// what state they have access to. In particular, we want to
+ /// prevent implicit 'leaks' of tracked state into the task (which
+ /// could then be read without generating correct edges in the
+ /// dep-graph -- see the [README] for more details on the
+ /// dep-graph). To this end, the task function gets exactly two
+ /// pieces of state: the context `cx` and an argument `arg`. Both
+ /// of these bits of state must be of some type that implements
+ /// `DepGraphSafe` and hence does not leak.
+ ///
+ /// The choice of two arguments is not fundamental. One argument
+ /// would work just as well, since multiple values can be
+ /// collected using tuples. However, using two arguments works out
+ /// to be quite convenient, since it is common to need a context
+ /// (`cx`) and some argument (e.g., a `DefId` identifying what
+ /// item to process).
+ ///
+ /// For cases where you need some other number of arguments:
+ ///
+ /// - If you only need one argument, just use `()` for the `arg`
+ /// parameter.
+ /// - If you need 3+ arguments, use a tuple for the
+ /// `arg` parameter.
+ ///
+ /// [README]: README.md
+ pub fn with_task<C, A, R>(&self, key: DepNode<DefId>, cx: C, arg: A, task: fn(C, A) -> R) -> R
+ where C: DepGraphSafe, A: DepGraphSafe
{
let _task = self.in_task(key);
- op()
+ task(cx, arg)
}
pub fn read(&self, v: DepNode<DefId>) {
mod graph;
mod query;
mod raii;
+mod safe;
mod shadow;
mod thread;
mod visit;
pub use self::graph::DepGraph;
pub use self::graph::WorkProduct;
pub use self::query::DepGraphQuery;
+pub use self::safe::AssertDepGraphSafe;
+pub use self::safe::DepGraphSafe;
pub use self::visit::visit_all_bodies_in_krate;
pub use self::visit::visit_all_item_likes_in_krate;
pub use self::raii::DepTask;
--- /dev/null
+// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use hir::BodyId;
+use hir::def_id::DefId;
+use syntax::ast::NodeId;
+use ty::TyCtxt;
+
+/// The `DepGraphSafe` trait is used to specify what kinds of values
+/// are safe to "leak" into a task. The idea is that this should be
+/// only be implemented for things like the tcx as well as various id
+/// types, which will create reads in the dep-graph whenever the trait
+/// loads anything that might depend on the input program.
+pub trait DepGraphSafe {
+}
+
+/// A `BodyId` on its own doesn't give access to any particular state.
+/// You must fetch the state from the various maps or generate
+/// on-demand queries, all of which create reads.
+impl DepGraphSafe for BodyId {
+}
+
+/// A `NodeId` on its own doesn't give access to any particular state.
+/// You must fetch the state from the various maps or generate
+/// on-demand queries, all of which create reads.
+impl DepGraphSafe for NodeId {
+}
+
+/// A `DefId` on its own doesn't give access to any particular state.
+/// You must fetch the state from the various maps or generate
+/// on-demand queries, all of which create reads.
+impl DepGraphSafe for DefId {
+}
+
+/// The type context itself can be used to access all kinds of tracked
+/// state, but those accesses should always generate read events.
+impl<'a, 'gcx, 'tcx> DepGraphSafe for TyCtxt<'a, 'gcx, 'tcx> {
+}
+
+/// Tuples make it easy to build up state.
+impl<A, B> DepGraphSafe for (A, B)
+ where A: DepGraphSafe, B: DepGraphSafe
+{
+}
+
+/// No data here! :)
+impl DepGraphSafe for () {
+}
+
+/// A convenient override that lets you pass arbitrary state into a
+/// task. Every use should be accompanied by a comment explaining why
+/// it makes sense (or how it could be refactored away in the future).
+pub struct AssertDepGraphSafe<T>(pub T);
+
+impl<T> DepGraphSafe for AssertDepGraphSafe<T> {
+}
trait_items: BTreeMap<hir::TraitItemId, hir::TraitItem>,
impl_items: BTreeMap<hir::ImplItemId, hir::ImplItem>,
bodies: BTreeMap<hir::BodyId, hir::Body>,
+ exported_macros: Vec<hir::MacroDef>,
trait_impls: BTreeMap<DefId, Vec<NodeId>>,
trait_default_impl: BTreeMap<DefId, NodeId>,
bodies: BTreeMap::new(),
trait_impls: BTreeMap::new(),
trait_default_impl: BTreeMap::new(),
+ exported_macros: Vec::new(),
loop_scopes: Vec::new(),
is_in_loop_condition: false,
type_def_lifetime_params: DefIdMap(),
impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> {
fn visit_item(&mut self, item: &'lcx Item) {
- let hir_item = self.lctx.lower_item(item);
- self.lctx.items.insert(item.id, hir_item);
- visit::walk_item(self, item);
+ if let Some(hir_item) = self.lctx.lower_item(item) {
+ self.lctx.items.insert(item.id, hir_item);
+ visit::walk_item(self, item);
+ }
}
fn visit_trait_item(&mut self, item: &'lcx TraitItem) {
let module = self.lower_mod(&c.module);
let attrs = self.lower_attrs(&c.attrs);
- let exported_macros = c.exported_macros.iter().map(|m| self.lower_macro_def(m)).collect();
let body_ids = body_ids(&self.bodies);
hir::Crate {
module: module,
attrs: attrs,
span: c.span,
- exported_macros: exported_macros,
+ exported_macros: hir::HirVec::from(self.exported_macros),
items: self.items,
trait_items: self.trait_items,
impl_items: self.impl_items,
bounds,
items)
}
- ItemKind::Mac(_) => panic!("Shouldn't still be around"),
+ ItemKind::MacroDef(..) | ItemKind::Mac(..) => panic!("Shouldn't still be around"),
}
}
}
}
- fn lower_macro_def(&mut self, m: &MacroDef) -> hir::MacroDef {
- hir::MacroDef {
- name: m.ident.name,
- attrs: self.lower_attrs(&m.attrs),
- id: m.id,
- span: m.span,
- body: m.body.clone().into(),
- }
- }
-
fn lower_item_id(&mut self, i: &Item) -> SmallVector<hir::ItemId> {
- if let ItemKind::Use(ref view_path) = i.node {
- if let ViewPathList(_, ref imports) = view_path.node {
- return iter::once(i.id).chain(imports.iter().map(|import| import.node.id))
- .map(|id| hir::ItemId { id: id }).collect();
+ match i.node {
+ ItemKind::Use(ref view_path) => {
+ if let ViewPathList(_, ref imports) = view_path.node {
+ return iter::once(i.id).chain(imports.iter().map(|import| import.node.id))
+ .map(|id| hir::ItemId { id: id }).collect();
+ }
}
+ ItemKind::MacroDef(..) => return SmallVector::new(),
+ _ => {}
}
SmallVector::one(hir::ItemId { id: i.id })
}
- pub fn lower_item(&mut self, i: &Item) -> hir::Item {
+ pub fn lower_item(&mut self, i: &Item) -> Option<hir::Item> {
let mut name = i.ident.name;
let attrs = self.lower_attrs(&i.attrs);
let mut vis = self.lower_visibility(&i.vis);
+ if let ItemKind::MacroDef(ref tts) = i.node {
+ if i.attrs.iter().any(|attr| attr.name() == "macro_export") {
+ self.exported_macros.push(hir::MacroDef {
+ name: name, attrs: attrs, id: i.id, span: i.span, body: tts.clone().into(),
+ });
+ }
+ return None;
+ }
+
let node = self.with_parent_def(i.id, |this| {
this.lower_item_kind(i.id, &mut name, &attrs, &mut vis, &i.node)
});
- hir::Item {
+ Some(hir::Item {
id: i.id,
name: name,
attrs: attrs,
node: node,
vis: vis,
span: i.span,
- }
+ })
}
fn lower_foreign_item(&mut self, i: &ForeignItem) -> hir::ForeignItem {
ItemKind::Mod(..) => DefPathData::Module(i.ident.name.as_str()),
ItemKind::Static(..) | ItemKind::Const(..) | ItemKind::Fn(..) =>
DefPathData::ValueNs(i.ident.name.as_str()),
- ItemKind::Mac(..) if i.id == DUMMY_NODE_ID => return, // Scope placeholder
+ ItemKind::MacroDef(..) => DefPathData::MacroDef(i.ident.name.as_str()),
ItemKind::Mac(..) => return self.visit_macro_invoc(i.id, false),
ItemKind::Use(ref view_path) => {
match view_path.node {
self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name.as_str()));
}
- fn visit_macro_def(&mut self, macro_def: &'a MacroDef) {
- self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name.as_str()));
- }
-
fn visit_stmt(&mut self, stmt: &'a Stmt) {
match stmt.node {
StmtKind::Mac(..) => self.visit_macro_invoc(stmt.id, false),
The main case which fails today that I would like to support is:
-```text
+```rust
fn foo<T>(x: T, y: T) { ... }
fn bar() {
`X`, and thus inherits its UB/LB of `@mut int`. This leaves no
flexibility for `T` to later adjust to accommodate `@int`.
+Note: `@` and `@mut` are replaced with `Rc<T>` and `Rc<RefCell<T>>` in current Rust.
+
### What to do when not all bounds are present
In the prior discussion we assumed that A.ub was not top and B.lb was
"execute" by testing the value they are applied to and creating any
relevant bindings). So, for example:
- fn foo(x: isize, y: isize) { // -+
- // +------------+ // |
- // | +-----+ // |
- // | +-+ +-+ +-+ // |
- // | | | | | | | // |
- // v v v v v v v // |
- let z = x + y; // |
- ... // |
- } // -+
-
- fn bar() { ... }
+```rust
+fn foo(x: isize, y: isize) { // -+
+// +------------+ // |
+// | +-----+ // |
+// | +-+ +-+ +-+ // |
+// | | | | | | | // |
+// v v v v v v v // |
+ let z = x + y; // |
+ ... // |
+} // -+
+
+fn bar() { ... }
+```
In this example, there is a region for the fn body block as a whole,
and then a subregion for the declaration of the local variable.
particular when combined with `&mut` functions. For example, a call
like this one
- self.foo(self.bar())
+```rust
+self.foo(self.bar())
+```
where both `foo` and `bar` are `&mut self` functions will always yield
an error.
Here is a more involved example (which is safe) so we can see what's
going on:
- struct Foo { f: usize, g: usize }
- ...
- fn add(p: &mut usize, v: usize) {
- *p += v;
- }
- ...
- fn inc(p: &mut usize) -> usize {
- *p += 1; *p
- }
- fn weird() {
- let mut x: Box<Foo> = box Foo { ... };
- 'a: add(&mut (*x).f,
- 'b: inc(&mut (*x).f)) // (..)
- }
+```rust
+struct Foo { f: usize, g: usize }
+// ...
+fn add(p: &mut usize, v: usize) {
+ *p += v;
+}
+// ...
+fn inc(p: &mut usize) -> usize {
+ *p += 1; *p
+}
+fn weird() {
+ let mut x: Box<Foo> = box Foo { /* ... */ };
+ 'a: add(&mut (*x).f,
+ 'b: inc(&mut (*x).f)) // (..)
+}
+```
The important part is the line marked `(..)` which contains a call to
`add()`. The first argument is a mutable borrow of the field `f`. The
involved with `'a` in detail. We'll break apart all the steps involved
in a call expression:
- 'a: {
- 'a_arg1: let a_temp1: ... = add;
- 'a_arg2: let a_temp2: &'a mut usize = &'a mut (*x).f;
- 'a_arg3: let a_temp3: usize = {
- let b_temp1: ... = inc;
- let b_temp2: &'b = &'b mut (*x).f;
- 'b_call: b_temp1(b_temp2)
- };
- 'a_call: a_temp1(a_temp2, a_temp3) // (**)
- }
+```rust
+'a: {
+ 'a_arg1: let a_temp1: ... = add;
+ 'a_arg2: let a_temp2: &'a mut usize = &'a mut (*x).f;
+ 'a_arg3: let a_temp3: usize = {
+ let b_temp1: ... = inc;
+ let b_temp2: &'b = &'b mut (*x).f;
+ 'b_call: b_temp1(b_temp2)
+ };
+ 'a_call: a_temp1(a_temp2, a_temp3) // (**)
+}
+```
Here we see that the lifetime `'a` includes a number of substatements.
In particular, there is this lifetime I've called `'a_call` that
argument, it can still be *invalidated* by that evaluation. Consider
this similar but unsound example:
- struct Foo { f: usize, g: usize }
- ...
- fn add(p: &mut usize, v: usize) {
- *p += v;
- }
- ...
- fn consume(x: Box<Foo>) -> usize {
- x.f + x.g
- }
- fn weird() {
- let mut x: Box<Foo> = box Foo { ... };
- 'a: add(&mut (*x).f, consume(x)) // (..)
- }
+```rust
+struct Foo { f: usize, g: usize }
+// ...
+fn add(p: &mut usize, v: usize) {
+ *p += v;
+}
+// ...
+fn consume(x: Box<Foo>) -> usize {
+ x.f + x.g
+}
+fn weird() {
+ let mut x: Box<Foo> = box Foo { ... };
+ 'a: add(&mut (*x).f, consume(x)) // (..)
+}
+```
In this case, the second argument to `add` actually consumes `x`, thus
invalidating the first argument.
};
if output_template.is_empty() {
- bug!("empty string provided as RUST_REGION_GRAPH");
+ panic!("empty string provided as RUST_REGION_GRAPH");
}
if output_template.contains('%') {
self.tables = old_tables;
}
+ fn visit_body(&mut self, body: &'tcx hir::Body) {
+ run_lints!(self, check_body, late_passes, body);
+ hir_visit::walk_body(self, body);
+ run_lints!(self, check_body_post, late_passes, body);
+ }
+
fn visit_item(&mut self, it: &'tcx hir::Item) {
self.with_lint_attrs(&it.attrs, |cx| {
run_lints!(cx, check_item, late_passes, it);
// FIXME: eliminate the duplication with `Visitor`. But this also
// contains a few lint-specific methods with no equivalent in `Visitor`.
pub trait LateLintPass<'a, 'tcx>: LintPass {
+ fn check_body(&mut self, _: &LateContext, _: &'tcx hir::Body) { }
+ fn check_body_post(&mut self, _: &LateContext, _: &'tcx hir::Body) { }
fn check_name(&mut self, _: &LateContext, _: Span, _: ast::Name) { }
fn check_crate(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Crate) { }
fn check_crate_post(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Crate) { }
}
pub enum LoadedMacro {
- MacroRules(ast::MacroDef),
+ MacroDef(ast::Item),
ProcMacro(Rc<SyntaxExtension>),
}
ObjectSafetyViolation,
};
+use errors::DiagnosticBuilder;
use fmt_macros::{Parser, Piece, Position};
+use hir::{intravisit, Local, Pat};
+use hir::intravisit::{Visitor, NestedVisitorMap};
+use hir::map::NodeExpr;
use hir::def_id::DefId;
use infer::{self, InferCtxt};
use infer::type_variable::TypeVariableOrigin;
use rustc::lint::builtin::EXTRA_REQUIREMENT_IN_IMPL;
+use std::fmt;
+use syntax::ast;
use ty::{self, AdtKind, ToPredicate, ToPolyTraitRef, Ty, TyCtxt, TypeFoldable};
use ty::error::ExpectedFound;
use ty::fast_reject;
use ty::subst::Subst;
use util::nodemap::{FxHashMap, FxHashSet};
-use std::fmt;
-use syntax::ast;
-use hir::{intravisit, Local, Pat};
-use hir::intravisit::{Visitor, NestedVisitorMap};
use syntax_pos::{DUMMY_SP, Span};
-use errors::DiagnosticBuilder;
+
#[derive(Debug, PartialEq, Eq, Hash)]
pub struct TraitErrorKey<'tcx> {
err.span_label(cause.span, &format!("cannot infer type for `{}`", name));
- let expr = self.tcx.hir.expect_expr(cause.body_id);
-
let mut local_visitor = FindLocalByTypeVisitor {
infcx: &self,
target_ty: &ty,
found_pattern: None,
};
- local_visitor.visit_expr(expr);
+ // #40294: cause.body_id can also be a fn declaration.
+ // Currently, if it's anything other than NodeExpr, we just ignore it
+ match self.tcx.hir.find(cause.body_id) {
+ Some(NodeExpr(expr)) => local_visitor.visit_expr(expr),
+ _ => ()
+ }
if let Some(pattern) = local_visitor.found_pattern {
let pattern_span = pattern.span;
let new_trait = tcx.mk_dynamic(
ty::Binder(tcx.mk_existential_predicates(iter)), r_b);
let InferOk { obligations, .. } =
- self.infcx.sub_types(false, &obligation.cause, new_trait, target)
+ self.infcx.eq_types(false, &obligation.cause, new_trait, target)
.map_err(|_| Unimplemented)?;
self.inferred_obligations.extend(obligations);
// [T; n] -> [T].
(&ty::TyArray(a, _), &ty::TySlice(b)) => {
let InferOk { obligations, .. } =
- self.infcx.sub_types(false, &obligation.cause, a, b)
+ self.infcx.eq_types(false, &obligation.cause, a, b)
.map_err(|_| Unimplemented)?;
self.inferred_obligations.extend(obligations);
}
});
let new_struct = tcx.mk_adt(def, tcx.mk_substs(params));
let InferOk { obligations, .. } =
- self.infcx.sub_types(false, &obligation.cause, new_struct, target)
+ self.infcx.eq_types(false, &obligation.cause, new_struct, target)
.map_err(|_| Unimplemented)?;
self.inferred_obligations.extend(obligations);
pub type LoanDataFlow<'a, 'tcx> = DataFlowContext<'a, 'tcx, LoanDataFlowOperator>;
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- tcx.dep_graph.with_task(DepNode::BorrowCheckKrate, || {
+ tcx.dep_graph.with_task(DepNode::BorrowCheckKrate, tcx, (), check_crate_task);
+
+ fn check_crate_task<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, (): ()) {
tcx.visit_all_bodies_in_krate(|body_owner_def_id, body_id| {
- tcx.dep_graph.with_task(DepNode::BorrowCheck(body_owner_def_id), || {
- borrowck_fn(tcx, body_id);
- });
+ tcx.dep_graph.with_task(DepNode::BorrowCheck(body_owner_def_id),
+ tcx,
+ body_id,
+ borrowck_fn);
});
- });
+ }
}
/// Collection of conclusions determined via borrow checker analyses.
use serialize::json;
use std::env;
-use std::mem;
use std::ffi::{OsString, OsStr};
use std::fs;
use std::io::{self, Write};
krate
});
- krate.exported_macros = mem::replace(&mut resolver.exported_macros, Vec::new());
-
krate = time(time_passes, "maybe building test harness", || {
syntax::test::modify_for_testing(&sess.parse_sess,
&mut resolver,
const X86_WHITELIST: &'static [&'static str] = &["avx\0", "avx2\0", "bmi\0", "bmi2\0", "sse\0",
"sse2\0", "sse3\0", "sse4.1\0", "sse4.2\0",
"ssse3\0", "tbm\0", "lzcnt\0", "popcnt\0",
- "sse4a\0", "rdrnd\0", "rdseed\0"];
+ "sse4a\0", "rdrnd\0", "rdseed\0", "fma\0"];
/// Add `target_feature = "..."` cfgs for a variety of platform
/// specific features (SSE, NEON etc.).
clean_work_products.insert(wp.clone());
}
- tcx.dep_graph.with_task(n, || ()); // create the node with no inputs
+ tcx.dep_graph.with_task(n, (), (), create_node);
+
+ fn create_node((): (), (): ()) {
+ // just create the node with no inputs
+ }
}
}
sess.imported_macro_spans.borrow_mut()
.insert(local_span, (name.to_string(), data.get_span(id.index, sess)));
- LoadedMacro::MacroRules(ast::MacroDef {
+ LoadedMacro::MacroDef(ast::Item {
ident: ast::Ident::with_empty_ctxt(name),
id: ast::DUMMY_NODE_ID,
span: local_span,
attrs: attrs,
- body: body.into(),
+ node: ast::ItemKind::MacroDef(body.into()),
+ vis: ast::Visibility::Inherited,
})
}
(https://github.com/rust-lang/rust/issues/39283)");
}
- if temp_lifetime.is_some() {
+ if !expr_ty.is_never() && temp_lifetime.is_some() {
this.cfg.push(block, Statement {
source_info: source_info,
kind: StatementKind::StorageLive(temp.clone())
use std::mem;
pub fn build_mir_for_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- tcx.dep_graph.with_task(DepNode::MirKrate, || {
+ tcx.dep_graph.with_task(DepNode::MirKrate, tcx, (), build_mir_for_crate_task);
+
+ fn build_mir_for_crate_task<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, (): ()) {
tcx.visit_all_bodies_in_krate(|body_owner_def_id, _body_id| {
tcx.item_mir(body_owner_def_id);
});
- });
+ }
}
pub fn provide(providers: &mut Providers) {
fn visit_attribute(&mut self, attr: &'v ast::Attribute) {
self.record("Attribute", Id::None, attr);
}
-
- fn visit_macro_def(&mut self, macro_def: &'v ast::MacroDef) {
- self.record("MacroDef", Id::None, macro_def);
- ast_visit::walk_macro_def(self, macro_def)
- }
}
use syntax::ast::{Variant, ViewPathGlob, ViewPathList, ViewPathSimple};
use syntax::ext::base::SyntaxExtension;
use syntax::ext::base::Determinacy::Undetermined;
-use syntax::ext::expand::mark_tts;
use syntax::ext::hygiene::Mark;
use syntax::ext::tt::macro_rules;
use syntax::parse::token;
self.define(parent, ident, TypeNS, (module, vis, sp, expansion));
self.current_module = module;
}
- ItemKind::Mac(_) => panic!("unexpanded macro in resolve!"),
+ ItemKind::MacroDef(..) | ItemKind::Mac(_) => unreachable!(),
}
}
})
}
+ pub fn macro_def_scope(&mut self, expansion: Mark) -> Module<'a> {
+ let def_id = self.macro_defs[&expansion];
+ if let Some(id) = self.definitions.as_local_node_id(def_id) {
+ self.local_macro_def_scopes[&id]
+ } else {
+ let module_def_id = ty::DefIdTree::parent(&*self, def_id).unwrap();
+ self.get_extern_crate_root(module_def_id.krate)
+ }
+ }
+
pub fn get_macro(&mut self, def: Def) -> Rc<SyntaxExtension> {
let def_id = match def {
Def::Macro(def_id, ..) => def_id,
return ext.clone();
}
- let mut macro_rules = match self.session.cstore.load_macro(def_id, &self.session) {
- LoadedMacro::MacroRules(macro_rules) => macro_rules,
+ let macro_def = match self.session.cstore.load_macro(def_id, &self.session) {
+ LoadedMacro::MacroDef(macro_def) => macro_def,
LoadedMacro::ProcMacro(ext) => return ext,
};
- let mark = Mark::fresh();
- let invocation = self.arenas.alloc_invocation_data(InvocationData {
- module: Cell::new(self.get_extern_crate_root(def_id.krate)),
- def_index: CRATE_DEF_INDEX,
- const_expr: false,
- legacy_scope: Cell::new(LegacyScope::Empty),
- expansion: Cell::new(LegacyScope::Empty),
- });
- self.invocations.insert(mark, invocation);
- macro_rules.body = mark_tts(macro_rules.stream(), mark).into();
- let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, ¯o_rules));
+ let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, ¯o_def));
self.macro_map.insert(def_id, ext.clone());
ext
}
fn visit_item(&mut self, item: &'a Item) {
let macro_use = match item.node {
- ItemKind::Mac(ref mac) => {
- if mac.node.path.segments.is_empty() {
- self.legacy_scope = LegacyScope::Expansion(self.visit_invoc(item.id));
- } else {
- self.resolver.define_macro(item, &mut self.legacy_scope);
- }
+ ItemKind::MacroDef(..) => {
+ self.resolver.define_macro(item, &mut self.legacy_scope);
+ return
+ }
+ ItemKind::Mac(..) => {
+ self.legacy_scope = LegacyScope::Expansion(self.visit_invoc(item.id));
return
}
ItemKind::Mod(..) => self.resolver.contains_macro_use(&item.attrs),
// Trait paths in bounds or impls.
Trait,
// Expression paths `path`, with optional parent context.
- Expr(Option<&'a ExprKind>),
+ Expr(Option<&'a Expr>),
// Paths in path patterns `Path`.
Pat,
// Paths in struct expressions and patterns `Path { .. }`.
ValueNS => "method or associated constant",
MacroNS => bug!("associated macro"),
},
- PathSource::Expr(parent) => match parent {
+ PathSource::Expr(parent) => match parent.map(|p| &p.node) {
// "function" here means "anything callable" rather than `Def::Fn`,
// this is not precise but usually more helpful than just "value".
Some(&ExprKind::Call(..)) => "function",
// We passed through a module.
ModuleRibKind(Module<'a>),
- // We passed through a `macro_rules!` statement with the given expansion
- MacroDefinition(Mark),
+ // We passed through a `macro_rules!` statement
+ MacroDefinition(DefId),
// All bindings in this rib are type parameters that can't be used
// from the default of a type parameter because they're not declared
}
}
- fn get_macro(&self, resolver: &mut Resolver<'a>) -> Rc<SyntaxExtension> {
+ fn def_ignoring_ambiguity(&self) -> Def {
match self.kind {
- NameBindingKind::Import { binding, .. } => binding.get_macro(resolver),
- NameBindingKind::Ambiguity { b1, .. } => b1.get_macro(resolver),
- _ => resolver.get_macro(self.def()),
+ NameBindingKind::Import { binding, .. } => binding.def_ignoring_ambiguity(),
+ NameBindingKind::Ambiguity { b1, .. } => b1.def_ignoring_ambiguity(),
+ _ => self.def(),
}
}
+ fn get_macro(&self, resolver: &mut Resolver<'a>) -> Rc<SyntaxExtension> {
+ resolver.get_macro(self.def_ignoring_ambiguity())
+ }
+
// We sometimes need to treat variants as `pub` for backwards compatibility
fn pseudo_vis(&self) -> ty::Visibility {
if self.is_variant() { ty::Visibility::Public } else { self.vis }
pub definitions: Definitions,
- // Maps the node id of a statement to the expansions of the `macro_rules!`s
- // immediately above the statement (if appropriate).
- macros_at_scope: FxHashMap<NodeId, Vec<Mark>>,
-
graph_root: Module<'a>,
prelude: Option<Module<'a>>,
dummy_binding: &'a NameBinding<'a>,
use_extern_macros: bool, // true if `#![feature(use_extern_macros)]`
- pub exported_macros: Vec<ast::MacroDef>,
crate_loader: &'a mut CrateLoader,
macro_names: FxHashSet<Name>,
builtin_macros: FxHashMap<Name, &'a NameBinding<'a>>,
lexical_macro_resolutions: Vec<(Name, &'a Cell<LegacyScope<'a>>)>,
macro_map: FxHashMap<DefId, Rc<SyntaxExtension>>,
+ macro_defs: FxHashMap<Mark, DefId>,
+ local_macro_def_scopes: FxHashMap<NodeId, Module<'a>>,
macro_exports: Vec<Export>,
pub whitelisted_legacy_custom_derives: Vec<Name>,
pub found_unresolved_macro: bool,
let features = session.features.borrow();
+ let mut macro_defs = FxHashMap();
+ macro_defs.insert(Mark::root(), root_def_id);
+
Resolver {
session: session,
definitions: definitions,
- macros_at_scope: FxHashMap(),
// The outermost module has def ID 0; this is not reflected in the
// AST.
// `#![feature(proc_macro)]` implies `#[feature(extern_macros)]`
use_extern_macros: features.use_extern_macros || features.proc_macro,
- exported_macros: Vec::new(),
crate_loader: crate_loader,
macro_names: FxHashSet(),
builtin_macros: FxHashMap(),
macro_map: FxHashMap(),
macro_exports: Vec::new(),
invocations: invocations,
+ macro_defs: macro_defs,
+ local_macro_def_scopes: FxHashMap(),
name_already_seen: FxHashMap(),
whitelisted_legacy_custom_derives: Vec::new(),
proc_macro_enabled: features.proc_macro,
}
}
- if let MacroDefinition(mac) = self.ribs[ns][i].kind {
+ if let MacroDefinition(def) = self.ribs[ns][i].kind {
// If an invocation of this macro created `ident`, give up on `ident`
// and switch to `ident`'s source from the macro definition.
- let (source_ctxt, source_macro) = ident.ctxt.source();
- if source_macro == mac {
- ident.ctxt = source_ctxt;
+ let ctxt_data = ident.ctxt.data();
+ if def == self.macro_defs[&ctxt_data.outer_mark] {
+ ident.ctxt = ctxt_data.prev_ctxt;
}
}
}
None
}
- fn resolve_crate_var(&mut self, mut crate_var_ctxt: SyntaxContext) -> Module<'a> {
- while crate_var_ctxt.source().0 != SyntaxContext::empty() {
- crate_var_ctxt = crate_var_ctxt.source().0;
+ fn resolve_crate_var(&mut self, crate_var_ctxt: SyntaxContext) -> Module<'a> {
+ let mut ctxt_data = crate_var_ctxt.data();
+ while ctxt_data.prev_ctxt != SyntaxContext::empty() {
+ ctxt_data = ctxt_data.prev_ctxt.data();
}
- let module = self.invocations[&crate_var_ctxt.source().1].module.get();
+ let module = self.macro_def_scope(ctxt_data.outer_mark);
if module.is_local() { self.graph_root } else { module }
}
NormalRibKind => {
// Continue
}
- MacroDefinition(mac) => {
+ MacroDefinition(def) => {
// If an invocation of this macro created `ident`, give up on `ident`
// and switch to `ident`'s source from the macro definition.
- let (source_ctxt, source_macro) = ident.ctxt.source();
- if source_macro == mac {
- ident.ctxt = source_ctxt;
+ let ctxt_data = ident.ctxt.data();
+ if def == self.macro_defs[&ctxt_data.outer_mark] {
+ ident.ctxt = ctxt_data.prev_ctxt;
}
}
_ => {
}
}
- ItemKind::ExternCrate(_) => {
+ ItemKind::ExternCrate(_) | ItemKind::MacroDef(..) => {
// do nothing, these are just around to be encoded
}
// Descend into the block.
for stmt in &block.stmts {
- if let Some(marks) = self.macros_at_scope.remove(&stmt.id) {
- num_macro_definition_ribs += marks.len() as u32;
- for mark in marks {
- self.ribs[ValueNS].push(Rib::new(MacroDefinition(mark)));
- self.label_ribs.push(Rib::new(MacroDefinition(mark)));
+ if let ast::StmtKind::Item(ref item) = stmt.node {
+ if let ast::ItemKind::MacroDef(..) = item.node {
+ num_macro_definition_ribs += 1;
+ let def = self.definitions.local_def_id(item.id);
+ self.ribs[ValueNS].push(Rib::new(MacroDefinition(def)));
+ self.label_ribs.push(Rib::new(MacroDefinition(def)));
}
}
source: PathSource)
-> PathResolution {
let segments = &path.segments.iter().map(|seg| seg.identifier).collect::<Vec<_>>();
- self.smart_resolve_path_fragment(id, qself, segments, path.span, source)
+ let ident_span = path.segments.last().map_or(path.span, |seg| seg.span);
+ self.smart_resolve_path_fragment(id, qself, segments, path.span, ident_span, source)
}
fn smart_resolve_path_fragment(&mut self,
qself: Option<&QSelf>,
path: &[Ident],
span: Span,
+ ident_span: Span,
source: PathSource)
-> PathResolution {
let ns = source.namespace();
let expected = source.descr_expected();
let path_str = names_to_string(path);
let code = source.error_code(def.is_some());
- let (base_msg, fallback_label) = if let Some(def) = def {
+ let (base_msg, fallback_label, base_span) = if let Some(def) = def {
(format!("expected {}, found {} `{}`", expected, def.kind_name(), path_str),
- format!("not a {}", expected))
+ format!("not a {}", expected), span)
} else {
let item_str = path[path.len() - 1];
let (mod_prefix, mod_str) = if path.len() == 1 {
(mod_prefix, format!("`{}`", names_to_string(mod_path)))
};
(format!("cannot find {} `{}` in {}{}", expected, item_str, mod_prefix, mod_str),
- format!("not found in {}", mod_str))
+ format!("not found in {}", mod_str), ident_span)
};
- let mut err = this.session.struct_span_err_with_code(span, &base_msg, code);
+ let mut err = this.session.struct_span_err_with_code(base_span, &base_msg, code);
// Emit special messages for unresolved `Self` and `self`.
if is_self_type(path, ns) {
err.span_label(span, &format!("type aliases cannot be used for traits"));
return err;
}
- (Def::Mod(..), PathSource::Expr(Some(parent))) => match *parent {
+ (Def::Mod(..), PathSource::Expr(Some(parent))) => match parent.node {
ExprKind::Field(_, ident) => {
- err.span_label(span, &format!("did you mean `{}::{}`?",
- path_str, ident.node));
+ err.span_label(parent.span, &format!("did you mean `{}::{}`?",
+ path_str, ident.node));
return err;
}
ExprKind::MethodCall(ident, ..) => {
- err.span_label(span, &format!("did you mean `{}::{}(...)`?",
- path_str, ident.node));
+ err.span_label(parent.span, &format!("did you mean `{}::{}(...)`?",
+ path_str, ident.node));
return err;
}
_ => {}
// Try Levenshtein if nothing else worked.
if let Some(candidate) = this.lookup_typo_candidate(path, ns, is_expected) {
- err.span_label(span, &format!("did you mean `{}`?", candidate));
+ err.span_label(ident_span, &format!("did you mean `{}`?", candidate));
return err;
}
// Fallback label.
- err.span_label(span, &fallback_label);
+ err.span_label(base_span, &fallback_label);
err
};
let report_errors = |this: &mut Self, def: Option<Def>| {
// Make sure `A::B` in `<T as A>::B::C` is a trait item.
let ns = if qself.position + 1 == path.len() { ns } else { TypeNS };
let res = self.smart_resolve_path_fragment(id, None, &path[..qself.position + 1],
- span, PathSource::TraitItem(ns));
+ span, span, PathSource::TraitItem(ns));
return Some(PathResolution::with_unresolved_segments(
res.base_def(), res.unresolved_segments() + path.len() - qself.position - 1
));
path: &[Ident],
ns: Namespace,
filter_fn: FilterFn)
- -> Option<String>
+ -> Option<Symbol>
where FilterFn: Fn(Def) -> bool
{
let add_module_candidates = |module: Module, names: &mut Vec<Name>| {
};
let mut names = Vec::new();
- let prefix_str = if path.len() == 1 {
+ if path.len() == 1 {
// Search in lexical scope.
// Walk backwards up the ribs in scope and collect candidates.
for rib in self.ribs[ns].iter().rev() {
names.push(*name);
}
}
- String::new()
} else {
// Search in module.
let mod_path = &path[..path.len() - 1];
if let PathResult::Module(module) = self.resolve_path(mod_path, Some(TypeNS), None) {
add_module_candidates(module, &mut names);
}
- names_to_string(mod_path) + "::"
- };
+ }
let name = path[path.len() - 1].name;
// Make sure error reporting is deterministic.
names.sort_by_key(|name| name.as_str());
match find_best_match_for_name(names.iter(), &name.as_str(), None) {
- Some(found) if found != name => Some(format!("{}{}", prefix_str, found)),
+ Some(found) if found != name => Some(found),
_ => None,
}
}
self.with_resolved_label(label, id, |this| this.visit_block(block));
}
- fn resolve_expr(&mut self, expr: &Expr, parent: Option<&ExprKind>) {
+ fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) {
// First, record candidate traits for this expression if it could
// result in the invocation of a method call.
// Equivalent to `visit::walk_expr` + passing some context to children.
ExprKind::Field(ref subexpression, _) => {
- self.resolve_expr(subexpression, Some(&expr.node));
+ self.resolve_expr(subexpression, Some(expr));
}
ExprKind::MethodCall(_, ref types, ref arguments) => {
let mut arguments = arguments.iter();
- self.resolve_expr(arguments.next().unwrap(), Some(&expr.node));
+ self.resolve_expr(arguments.next().unwrap(), Some(expr));
for argument in arguments {
self.resolve_expr(argument, None);
}
});
}
ExprKind::Call(ref callee, ref arguments) => {
- self.resolve_expr(callee, Some(&expr.node));
+ self.resolve_expr(callee, Some(expr));
for argument in arguments {
self.resolve_expr(argument, None);
}
if ident.name == lookup_name && ns == namespace {
if filter_fn(name_binding.def()) {
// create the path
- let span = name_binding.span;
let mut segms = path_segments.clone();
- segms.push(ident.into());
+ segms.push(ast::PathSegment::from_ident(ident, name_binding.span));
let path = Path {
- span: span,
+ span: name_binding.span,
segments: segms,
};
// the entity is accessible in the following cases:
if let Some(module) = name_binding.module() {
// form the path
let mut path_segments = path_segments.clone();
- path_segments.push(ident.into());
+ path_segments.push(ast::PathSegment::from_ident(ident, name_binding.span));
if !in_module_is_extern || name_binding.vis == ty::Visibility::Public {
// add the module to the lookup
use rustc::hir::def::{Def, Export};
use rustc::hir::map::{self, DefCollector};
use rustc::ty;
-use std::cell::Cell;
-use std::rc::Rc;
use syntax::ast::{self, Name, Ident};
-use syntax::attr;
+use syntax::attr::{self, HasAttrs};
use syntax::errors::DiagnosticBuilder;
-use syntax::ext::base::{self, Determinacy, MultiModifier, MultiDecorator};
-use syntax::ext::base::{Resolver as SyntaxResolver, SyntaxExtension};
-use syntax::ext::base::MacroKind;
-use syntax::ext::expand::{Expansion, mark_tts};
+use syntax::ext::base::{self, Annotatable, Determinacy, MultiModifier, MultiDecorator};
+use syntax::ext::base::{MacroKind, SyntaxExtension, Resolver as SyntaxResolver};
+use syntax::ext::expand::{Expansion, ExpansionKind, Invocation, InvocationKind, find_attr_invoc};
use syntax::ext::hygiene::Mark;
+use syntax::ext::placeholders::placeholder;
use syntax::ext::tt::macro_rules;
use syntax::feature_gate::{self, emit_feature_err, GateIssue};
use syntax::fold::{self, Folder};
use syntax::ptr::P;
use syntax::symbol::{Symbol, keywords};
use syntax::util::lev_distance::find_best_match_for_name;
-use syntax::visit::Visitor;
use syntax_pos::{Span, DUMMY_SP};
+use std::cell::Cell;
+use std::mem;
+use std::rc::Rc;
+
#[derive(Clone)]
pub struct InvocationData<'a> {
pub module: Cell<Module<'a>>,
pub struct LegacyBinding<'a> {
pub parent: Cell<LegacyScope<'a>>,
pub name: ast::Name,
- ext: Rc<SyntaxExtension>,
+ def_id: DefId,
pub span: Span,
}
path.segments[0].identifier.name = keywords::CrateRoot.name();
let module = self.0.resolve_crate_var(ident.ctxt);
if !module.is_local() {
+ let span = path.segments[0].span;
path.segments.insert(1, match module.kind {
- ModuleKind::Def(_, name) => ast::Ident::with_empty_ctxt(name).into(),
+ ModuleKind::Def(_, name) => ast::PathSegment::from_ident(
+ ast::Ident::with_empty_ctxt(name), span
+ ),
_ => unreachable!(),
})
}
invocation.expansion.set(visitor.legacy_scope);
}
- fn add_ext(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>) {
+ fn add_builtin(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>) {
let def_id = DefId {
krate: BUILTIN_MACROS_CRATE,
index: DefIndex::new(self.macro_map.len()),
self.builtin_macros.insert(ident.name, binding);
}
- fn add_expansions_at_stmt(&mut self, id: ast::NodeId, macros: Vec<Mark>) {
- self.macros_at_scope.insert(id, macros);
- }
-
fn resolve_imports(&mut self) {
ImportResolver { resolver: self }.resolve_imports()
}
None
}
- fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind,
- force: bool) -> Result<Rc<SyntaxExtension>, Determinacy> {
+ fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
+ -> Result<Option<Rc<SyntaxExtension>>, Determinacy> {
+ let def = match invoc.kind {
+ InvocationKind::Attr { attr: None, .. } => return Ok(None),
+ _ => match self.resolve_invoc_to_def(invoc, scope, force) {
+ Ok(def) => def,
+ Err(determinacy) => return Err(determinacy),
+ },
+ };
+ self.macro_defs.insert(invoc.expansion_data.mark, def.def_id());
+ Ok(Some(self.get_macro(def)))
+ }
+
+ fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
+ -> Result<Rc<SyntaxExtension>, Determinacy> {
+ self.resolve_macro_to_def(scope, path, kind, force).map(|def| self.get_macro(def))
+ }
+}
+
+impl<'a> Resolver<'a> {
+ fn resolve_invoc_to_def(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
+ -> Result<Def, Determinacy> {
+ let (attr, traits, item) = match invoc.kind {
+ InvocationKind::Attr { ref mut attr, ref traits, ref mut item } => (attr, traits, item),
+ InvocationKind::Bang { ref mac, .. } => {
+ return self.resolve_macro_to_def(scope, &mac.node.path, MacroKind::Bang, force);
+ }
+ InvocationKind::Derive { name, span, .. } => {
+ let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
+ return self.resolve_macro_to_def(scope, &path, MacroKind::Derive, force);
+ }
+ };
+
+ let (attr_name, path) = {
+ let attr = attr.as_ref().unwrap();
+ (attr.name(), ast::Path::from_ident(attr.span, Ident::with_empty_ctxt(attr.name())))
+ };
+
+ let mut determined = true;
+ match self.resolve_macro_to_def(scope, &path, MacroKind::Attr, force) {
+ Ok(def) => return Ok(def),
+ Err(Determinacy::Undetermined) => determined = false,
+ Err(Determinacy::Determined) if force => return Err(Determinacy::Determined),
+ Err(Determinacy::Determined) => {}
+ }
+
+ for &(name, span) in traits {
+ let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
+ match self.resolve_macro(scope, &path, MacroKind::Derive, force) {
+ Ok(ext) => if let SyntaxExtension::ProcMacroDerive(_, ref inert_attrs) = *ext {
+ if inert_attrs.contains(&attr_name) {
+ // FIXME(jseyfried) Avoid `mem::replace` here.
+ let dummy_item = placeholder(ExpansionKind::Items, ast::DUMMY_NODE_ID)
+ .make_items().pop().unwrap();
+ let dummy_item = Annotatable::Item(dummy_item);
+ *item = mem::replace(item, dummy_item).map_attrs(|mut attrs| {
+ let inert_attr = attr.take().unwrap();
+ attr::mark_known(&inert_attr);
+ if self.proc_macro_enabled {
+ *attr = find_attr_invoc(&mut attrs);
+ }
+ attrs.push(inert_attr);
+ attrs
+ });
+ }
+ return Err(Determinacy::Undetermined);
+ },
+ Err(Determinacy::Undetermined) => determined = false,
+ Err(Determinacy::Determined) => {}
+ }
+ }
+
+ Err(if determined { Determinacy::Determined } else { Determinacy::Undetermined })
+ }
+
+ fn resolve_macro_to_def(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
+ -> Result<Def, Determinacy> {
let ast::Path { ref segments, span } = *path;
if segments.iter().any(|segment| segment.parameters.is_some()) {
let kind =
return Err(Determinacy::Determined);
}
- let ext = match self.resolve_path(&path, Some(MacroNS), None) {
+ let def = match self.resolve_path(&path, Some(MacroNS), None) {
PathResult::NonModule(path_res) => match path_res.base_def() {
Def::Err => Err(Determinacy::Determined),
- def @ _ => Ok(self.get_macro(def)),
+ def @ _ => Ok(def),
},
PathResult::Module(..) => unreachable!(),
PathResult::Indeterminate if !force => return Err(Determinacy::Undetermined),
};
self.current_module.macro_resolutions.borrow_mut()
.push((path.into_boxed_slice(), span));
- return ext;
+ return def;
}
let name = path[0].name;
let result = match self.resolve_legacy_scope(&invocation.legacy_scope, name, false) {
- Some(MacroBinding::Legacy(binding)) => Ok(binding.ext.clone()),
- Some(MacroBinding::Modern(binding)) => Ok(binding.get_macro(self)),
+ Some(MacroBinding::Legacy(binding)) => Ok(Def::Macro(binding.def_id, MacroKind::Bang)),
+ Some(MacroBinding::Modern(binding)) => Ok(binding.def_ignoring_ambiguity()),
None => match self.resolve_lexical_macro_path_segment(path[0], MacroNS, None) {
- Ok(binding) => Ok(binding.get_macro(self)),
+ Ok(binding) => Ok(binding.def_ignoring_ambiguity()),
Err(Determinacy::Undetermined) if !force =>
return Err(Determinacy::Undetermined),
Err(_) => {
result
}
-}
-impl<'a> Resolver<'a> {
// Resolve the initial segment of a non-global macro path (e.g. `foo` in `foo::bar!();`)
pub fn resolve_lexical_macro_path_segment(&mut self,
ident: Ident,
};
let ident = Ident::from_str(name);
self.lookup_typo_candidate(&vec![ident], MacroNS, is_macro)
- .as_ref().map(|s| Symbol::intern(s))
});
if let Some(suggestion) = suggestion {
}
pub fn define_macro(&mut self, item: &ast::Item, legacy_scope: &mut LegacyScope<'a>) {
- let tts = match item.node {
- ast::ItemKind::Mac(ref mac) => mac.node.stream(),
- _ => unreachable!(),
- };
-
- if item.ident.name == "macro_rules" {
+ self.local_macro_def_scopes.insert(item.id, self.current_module);
+ let ident = item.ident;
+ if ident.name == "macro_rules" {
self.session.span_err(item.span, "user-defined macros may not be named `macro_rules`");
}
- let mark = Mark::from_placeholder_id(item.id);
- let invocation = self.invocations[&mark];
- invocation.module.set(self.current_module);
-
- let mut def = ast::MacroDef {
- ident: item.ident,
- attrs: item.attrs.clone(),
- id: ast::DUMMY_NODE_ID,
- span: item.span,
- body: mark_tts(tts, mark).into(),
- };
-
+ let def_id = self.definitions.local_def_id(item.id);
+ let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, item));
+ self.macro_map.insert(def_id, ext);
*legacy_scope = LegacyScope::Binding(self.arenas.alloc_legacy_binding(LegacyBinding {
- parent: Cell::new(*legacy_scope),
- name: def.ident.name,
- ext: Rc::new(macro_rules::compile(&self.session.parse_sess, &def)),
- span: def.span,
+ parent: Cell::new(*legacy_scope), name: ident.name, def_id: def_id, span: item.span,
}));
- self.macro_names.insert(def.ident.name);
+ self.macro_names.insert(ident.name);
- if attr::contains_name(&def.attrs, "macro_export") {
- def.id = self.next_node_id();
- DefCollector::new(&mut self.definitions).with_parent(CRATE_DEF_INDEX, |collector| {
- collector.visit_macro_def(&def)
- });
- self.macro_exports.push(Export {
- name: def.ident.name,
- def: Def::Macro(self.definitions.local_def_id(def.id), MacroKind::Bang),
- });
- self.exported_macros.push(def);
+ if attr::contains_name(&item.attrs, "macro_export") {
+ let def = Def::Macro(def_id, MacroKind::Bang);
+ self.macro_exports.push(Export { name: ident.name, def: def });
}
}
}
}
None => {
- span_bug!(span, "Could not find container for method {}", id);
+ debug!("Could not find container for method {} at {:?}", id, span);
+ // This is not necessarily a bug, if there was a compilation error, the tables
+ // we need might not exist.
+ return None;
}
},
};
use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::adjustment::CustomCoerceUnsized;
-use rustc::dep_graph::{DepNode, WorkProduct};
+use rustc::dep_graph::{AssertDepGraphSafe, DepNode, WorkProduct};
use rustc::hir::map as hir_map;
use rustc::util::common::time;
use session::config::{self, NoDebugInfo};
// Instantiate translation items without filling out definitions yet...
for ccx in crate_context_list.iter_need_trans() {
- let cgu = ccx.codegen_unit();
- let trans_items = cgu.items_in_deterministic_order(tcx, &symbol_map);
-
- tcx.dep_graph.with_task(cgu.work_product_dep_node(), || {
+ let dep_node = ccx.codegen_unit().work_product_dep_node();
+ tcx.dep_graph.with_task(dep_node,
+ ccx,
+ AssertDepGraphSafe(symbol_map.clone()),
+ trans_decl_task);
+
+ fn trans_decl_task<'a, 'tcx>(ccx: CrateContext<'a, 'tcx>,
+ symbol_map: AssertDepGraphSafe<Rc<SymbolMap<'tcx>>>) {
+ // FIXME(#40304): Instead of this, the symbol-map should be an
+ // on-demand thing that we compute.
+ let AssertDepGraphSafe(symbol_map) = symbol_map;
+ let cgu = ccx.codegen_unit();
+ let trans_items = cgu.items_in_deterministic_order(ccx.tcx(), &symbol_map);
for (trans_item, linkage) in trans_items {
trans_item.predefine(&ccx, linkage);
}
- });
+ }
}
// ... and now that we have everything pre-defined, fill out those definitions.
for ccx in crate_context_list.iter_need_trans() {
- let cgu = ccx.codegen_unit();
- let trans_items = cgu.items_in_deterministic_order(tcx, &symbol_map);
- tcx.dep_graph.with_task(cgu.work_product_dep_node(), || {
+ let dep_node = ccx.codegen_unit().work_product_dep_node();
+ tcx.dep_graph.with_task(dep_node,
+ ccx,
+ AssertDepGraphSafe(symbol_map.clone()),
+ trans_def_task);
+
+ fn trans_def_task<'a, 'tcx>(ccx: CrateContext<'a, 'tcx>,
+ symbol_map: AssertDepGraphSafe<Rc<SymbolMap<'tcx>>>) {
+ // FIXME(#40304): Instead of this, the symbol-map should be an
+ // on-demand thing that we compute.
+ let AssertDepGraphSafe(symbol_map) = symbol_map;
+ let cgu = ccx.codegen_unit();
+ let trans_items = cgu.items_in_deterministic_order(ccx.tcx(), &symbol_map);
for (trans_item, _) in trans_items {
trans_item.define(&ccx);
}
if ccx.sess().opts.debuginfo != NoDebugInfo {
debuginfo::finalize(&ccx);
}
- });
+ }
}
symbol_names_test::report_symbol_names(&shared_ccx);
use llvm;
use llvm::{ContextRef, ModuleRef, ValueRef};
-use rustc::dep_graph::{DepGraph, DepNode, DepTrackingMap, DepTrackingMapConfig, WorkProduct};
+use rustc::dep_graph::{DepGraph, DepGraphSafe, DepNode, DepTrackingMap,
+ DepTrackingMapConfig, WorkProduct};
use middle::cstore::LinkMeta;
use rustc::hir;
use rustc::hir::def::ExportMap;
index: usize,
}
+impl<'a, 'tcx> DepGraphSafe for CrateContext<'a, 'tcx> {
+}
+
pub struct CrateContextIterator<'a, 'tcx: 'a> {
shared: &'a SharedCrateContext<'a, 'tcx>,
local_ccxs: &'a [LocalCrateContext<'tcx>],
use super::FnCtxt;
+use rustc::infer::InferOk;
use rustc::traits;
use rustc::ty::{self, Ty, TraitRef};
use rustc::ty::{ToPredicate, TypeFoldable};
pub fn finalize<'b, I>(self, pref: LvaluePreference, exprs: I)
where I: IntoIterator<Item = &'b hir::Expr>
+ {
+ let fcx = self.fcx;
+ fcx.register_infer_ok_obligations(self.finalize_as_infer_ok(pref, exprs));
+ }
+
+ pub fn finalize_as_infer_ok<'b, I>(self, pref: LvaluePreference, exprs: I)
+ -> InferOk<'tcx, ()>
+ where I: IntoIterator<Item = &'b hir::Expr>
{
let methods: Vec<_> = self.steps
.iter()
}
}
- for obligation in self.obligations {
- self.fcx.register_predicate(obligation);
+ InferOk {
+ value: (),
+ obligations: self.obligations
}
}
}
use rustc::hir;
use rustc::hir::def_id::DefId;
-use rustc::infer::{Coercion, InferOk, TypeTrace};
+use rustc::infer::{Coercion, InferResult, InferOk, TypeTrace};
+use rustc::infer::type_variable::TypeVariableOrigin;
use rustc::traits::{self, ObligationCause, ObligationCauseCode};
use rustc::ty::adjustment::{Adjustment, Adjust, AutoBorrow};
use rustc::ty::{self, LvaluePreference, TypeAndMut,
use rustc::ty::subst::Subst;
use syntax::abi;
use syntax::feature_gate;
-use util::common::indent;
-use std::cell::RefCell;
use std::collections::VecDeque;
use std::ops::Deref;
fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
cause: ObligationCause<'tcx>,
use_lub: bool,
- unsizing_obligations: RefCell<Vec<traits::PredicateObligation<'tcx>>>,
}
impl<'a, 'gcx, 'tcx> Deref for Coerce<'a, 'gcx, 'tcx> {
}
}
-type CoerceResult<'tcx> = RelateResult<'tcx, (Ty<'tcx>, Adjust<'tcx>)>;
+type CoerceResult<'tcx> = InferResult<'tcx, Adjustment<'tcx>>;
fn coerce_mutbls<'tcx>(from_mutbl: hir::Mutability,
to_mutbl: hir::Mutability)
}
}
+fn identity<'tcx>() -> Adjust<'tcx> {
+ Adjust::DerefRef {
+ autoderefs: 0,
+ autoref: None,
+ unsize: false,
+ }
+}
+
+fn success<'tcx>(kind: Adjust<'tcx>,
+ target: Ty<'tcx>,
+ obligations: traits::PredicateObligations<'tcx>)
+ -> CoerceResult<'tcx> {
+ Ok(InferOk {
+ value: Adjustment {
+ kind,
+ target
+ },
+ obligations
+ })
+}
+
impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> {
fn new(fcx: &'f FnCtxt<'f, 'gcx, 'tcx>, cause: ObligationCause<'tcx>) -> Self {
Coerce {
fcx: fcx,
cause: cause,
use_lub: false,
- unsizing_obligations: RefCell::new(vec![]),
}
}
- fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
+ fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
self.commit_if_ok(|_| {
let trace = TypeTrace::types(&self.cause, false, a, b);
if self.use_lub {
self.lub(false, trace, &a, &b)
- .map(|ok| self.register_infer_ok_obligations(ok))
} else {
self.sub(false, trace, &a, &b)
- .map(|InferOk { value, obligations }| {
- self.fcx.register_predicates(obligations);
- value
- })
}
})
}
- /// Unify two types (using sub or lub) and produce a noop coercion.
- fn unify_and_identity(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> {
- self.unify(&a, &b).and_then(|ty| self.identity(ty))
- }
-
- /// Synthesize an identity adjustment.
- fn identity(&self, ty: Ty<'tcx>) -> CoerceResult<'tcx> {
- Ok((ty, Adjust::DerefRef {
- autoderefs: 0,
- autoref: None,
- unsize: false,
- }))
+ /// Unify two types (using sub or lub) and produce a specific coercion.
+ fn unify_and(&self, a: Ty<'tcx>, b: Ty<'tcx>, kind: Adjust<'tcx>)
+ -> CoerceResult<'tcx> {
+ self.unify(&a, &b).and_then(|InferOk { value: ty, obligations }| {
+ success(kind, ty, obligations)
+ })
}
fn coerce<'a, E, I>(&self, exprs: &E, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx>
// Just ignore error types.
if a.references_error() || b.references_error() {
- return self.identity(b);
+ return success(identity(), b, vec![]);
}
if a.is_never() {
- return Ok((b, Adjust::NeverToAny));
+ return success(Adjust::NeverToAny, b, vec![]);
}
// Consider coercing the subtype to a DST
}
_ => {
// Otherwise, just use unification rules.
- self.unify_and_identity(a, b)
+ self.unify_and(a, b, identity())
}
}
}
coerce_mutbls(mt_a.mutbl, mt_b.mutbl)?;
(r_a, mt_a)
}
- _ => return self.unify_and_identity(a, b),
+ _ => return self.unify_and(a, b, identity()),
};
let span = self.cause.span;
let mut first_error = None;
let mut r_borrow_var = None;
let mut autoderef = self.autoderef(span, a);
- let mut success = None;
+ let mut found = None;
for (referent_ty, autoderefs) in autoderef.by_ref() {
if autoderefs == 0 {
mutbl: mt_b.mutbl, // [1] above
});
match self.unify(derefd_ty_a, b) {
- Ok(ty) => {
- success = Some((ty, autoderefs));
+ Ok(ok) => {
+ found = Some((ok, autoderefs));
break;
}
Err(err) => {
// (e.g., in example above, the failure from relating `Vec<T>`
// to the target type), since that should be the least
// confusing.
- let (ty, autoderefs) = match success {
+ let (InferOk { value: ty, mut obligations }, autoderefs) = match found {
Some(d) => d,
None => {
let err = first_error.expect("coerce_borrowed_pointer had no error");
}
};
- // This commits the obligations to the fulfillcx. After this succeeds,
- // this snapshot can't be rolled back.
- autoderef.finalize(LvaluePreference::from_mutbl(mt_b.mutbl), exprs());
-
- // Now apply the autoref. We have to extract the region out of
- // the final ref type we got.
if ty == a && mt_a.mutbl == hir::MutImmutable && autoderefs == 1 {
// As a special case, if we would produce `&'a *x`, that's
// a total no-op. We end up with the type `&'a T` just as
// `self.x`, but we auto-coerce it to `foo(&mut *self.x)`,
// which is a borrow.
assert_eq!(mt_b.mutbl, hir::MutImmutable); // can only coerce &T -> &U
- return self.identity(ty);
+ return success(identity(), ty, obligations);
}
+
+ // Now apply the autoref. We have to extract the region out of
+ // the final ref type we got.
let r_borrow = match ty.sty {
ty::TyRef(r_borrow, _) => r_borrow,
_ => span_bug!(span, "expected a ref type, got {:?}", ty),
ty,
autoderefs,
autoref);
- Ok((ty, Adjust::DerefRef {
+
+ let pref = LvaluePreference::from_mutbl(mt_b.mutbl);
+ obligations.extend(autoderef.finalize_as_infer_ok(pref, exprs()).obligations);
+
+ success(Adjust::DerefRef {
autoderefs: autoderefs,
autoref: autoref,
unsize: false,
- }))
+ }, ty, obligations)
}
}
_ => (source, None),
};
- let source = source.adjust_for_autoref(self.tcx, reborrow);
+ let coerce_source = source.adjust_for_autoref(self.tcx, reborrow);
+
+ let adjust = Adjust::DerefRef {
+ autoderefs: if reborrow.is_some() { 1 } else { 0 },
+ autoref: reborrow,
+ unsize: true,
+ };
+
+ // Setup either a subtyping or a LUB relationship between
+ // the `CoerceUnsized` target type and the expected type.
+ // We only have the latter, so we use an inference variable
+ // for the former and let type inference do the rest.
+ let origin = TypeVariableOrigin::MiscVariable(self.cause.span);
+ let coerce_target = self.next_ty_var(origin);
+ let mut coercion = self.unify_and(coerce_target, target, adjust)?;
let mut selcx = traits::SelectionContext::new(self);
// Use a FIFO queue for this custom fulfillment procedure.
let mut queue = VecDeque::new();
- let mut leftover_predicates = vec![];
// Create an obligation for `Source: CoerceUnsized<Target>`.
let cause = ObligationCause::misc(self.cause.span, self.body_id);
queue.push_back(self.tcx
- .predicate_for_trait_def(cause, coerce_unsized_did, 0, source, &[target]));
+ .predicate_for_trait_def(cause, coerce_unsized_did, 0,
+ coerce_source, &[coerce_target]));
// Keep resolving `CoerceUnsized` and `Unsize` predicates to avoid
// emitting a coercion in cases like `Foo<$1>` -> `Foo<$2>`, where
let trait_ref = match obligation.predicate {
ty::Predicate::Trait(ref tr) if traits.contains(&tr.def_id()) => tr.clone(),
_ => {
- leftover_predicates.push(obligation);
+ coercion.obligations.push(obligation);
continue;
}
};
}
}
- *self.unsizing_obligations.borrow_mut() = leftover_predicates;
-
- let adjustment = Adjust::DerefRef {
- autoderefs: if reborrow.is_some() { 1 } else { 0 },
- autoref: reborrow,
- unsize: true,
- };
- debug!("Success, coerced with {:?}", adjustment);
- Ok((target, adjustment))
+ Ok(coercion)
}
fn coerce_from_safe_fn(&self,
a: Ty<'tcx>,
fn_ty_a: ty::PolyFnSig<'tcx>,
- b: Ty<'tcx>)
+ b: Ty<'tcx>,
+ to_unsafe: Adjust<'tcx>,
+ normal: Adjust<'tcx>)
-> CoerceResult<'tcx> {
if let ty::TyFnPtr(fn_ty_b) = b.sty {
match (fn_ty_a.unsafety(), fn_ty_b.unsafety()) {
(hir::Unsafety::Normal, hir::Unsafety::Unsafe) => {
let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a);
- return self.unify_and_identity(unsafe_a, b)
- .map(|(ty, _)| (ty, Adjust::UnsafeFnPointer));
+ return self.unify_and(unsafe_a, b, to_unsafe);
}
_ => {}
}
}
- self.unify_and_identity(a, b)
+ self.unify_and(a, b, normal)
}
fn coerce_from_fn_pointer(&self,
let b = self.shallow_resolve(b);
debug!("coerce_from_fn_pointer(a={:?}, b={:?})", a, b);
- self.coerce_from_safe_fn(a, fn_ty_a, b)
+ self.coerce_from_safe_fn(a, fn_ty_a, b,
+ Adjust::UnsafeFnPointer, identity())
}
fn coerce_from_fn_item(&self,
match b.sty {
ty::TyFnPtr(_) => {
let a_fn_pointer = self.tcx.mk_fn_ptr(fn_ty_a);
- self.coerce_from_safe_fn(a_fn_pointer, fn_ty_a, b)
- .map(|(ty, _)| (ty, Adjust::ReifyFnPointer))
+ self.coerce_from_safe_fn(a_fn_pointer, fn_ty_a, b,
+ Adjust::ReifyFnPointer, Adjust::ReifyFnPointer)
}
- _ => self.unify_and_identity(a, b),
+ _ => self.unify_and(a, b, identity()),
}
}
self.cause.span,
feature_gate::GateIssue::Language,
feature_gate::CLOSURE_TO_FN_COERCION);
- return self.unify_and_identity(a, b);
+ return self.unify_and(a, b, identity());
}
// We coerce the closure, which has fn type
// `extern "rust-call" fn((arg0,arg1,...)) -> _`
let pointer_ty = self.tcx.mk_fn_ptr(converted_sig);
debug!("coerce_closure_to_fn(a={:?}, b={:?}, pty={:?})",
a, b, pointer_ty);
- self.unify_and_identity(pointer_ty, b)
- .map(|(ty, _)| (ty, Adjust::ClosureFnPointer))
+ self.unify_and(pointer_ty, b, Adjust::ClosureFnPointer)
}
- _ => self.unify_and_identity(a, b),
+ _ => self.unify_and(a, b, identity()),
}
}
ty::TyRef(_, mt) => (true, mt),
ty::TyRawPtr(mt) => (false, mt),
_ => {
- return self.unify_and_identity(a, b);
+ return self.unify_and(a, b, identity());
}
};
mutbl: mutbl_b,
ty: mt_a.ty,
});
- let (ty, noop) = self.unify_and_identity(a_unsafe, b)?;
coerce_mutbls(mt_a.mutbl, mutbl_b)?;
-
// Although references and unsafe ptrs have the same
// representation, we still register an Adjust::DerefRef so that
// regionck knows that the region for `a` must be valid here.
- Ok((ty,
- if is_ref {
- Adjust::DerefRef {
- autoderefs: 1,
- autoref: Some(AutoBorrow::RawPtr(mutbl_b)),
- unsize: false,
- }
- } else if mt_a.mutbl != mutbl_b {
- Adjust::MutToConstPointer
- } else {
- noop
- }))
- }
-}
-
-fn apply<'a, 'b, 'gcx, 'tcx, E, I>(coerce: &mut Coerce<'a, 'gcx, 'tcx>,
- exprs: &E,
- a: Ty<'tcx>,
- b: Ty<'tcx>)
- -> RelateResult<'tcx, Adjustment<'tcx>>
- where E: Fn() -> I,
- I: IntoIterator<Item = &'b hir::Expr>
-{
-
- let (ty, adjust) = indent(|| coerce.coerce(exprs, a, b))?;
-
- let fcx = coerce.fcx;
- if let Adjust::DerefRef { unsize: true, .. } = adjust {
- let mut obligations = coerce.unsizing_obligations.borrow_mut();
- for obligation in obligations.drain(..) {
- fcx.register_predicate(obligation);
- }
+ self.unify_and(a_unsafe, b, if is_ref {
+ Adjust::DerefRef {
+ autoderefs: 1,
+ autoref: Some(AutoBorrow::RawPtr(mutbl_b)),
+ unsize: false,
+ }
+ } else if mt_a.mutbl != mutbl_b {
+ Adjust::MutToConstPointer
+ } else {
+ identity()
+ })
}
-
- Ok(Adjustment {
- kind: adjust,
- target: ty
- })
}
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
debug!("coercion::try({:?}: {:?} -> {:?})", expr, source, target);
let cause = self.cause(expr.span, ObligationCauseCode::ExprAssignable);
- let mut coerce = Coerce::new(self, cause);
+ let coerce = Coerce::new(self, cause);
self.commit_if_ok(|_| {
- let adjustment = apply(&mut coerce, &|| Some(expr), source, target)?;
+ let ok = coerce.coerce(&|| Some(expr), source, target)?;
+ let adjustment = self.register_infer_ok_obligations(ok);
if !adjustment.is_identity() {
debug!("Success, coerced with {:?}", adjustment);
match self.tables.borrow().adjustments.get(&expr.id) {
// but only if the new expression has no coercion already applied to it.
let mut first_error = None;
if !self.tables.borrow().adjustments.contains_key(&new.id) {
- let result = self.commit_if_ok(|_| apply(&mut coerce, &|| Some(new), new_ty, prev_ty));
+ let result = self.commit_if_ok(|_| coerce.coerce(&|| Some(new), new_ty, prev_ty));
match result {
- Ok(adjustment) => {
+ Ok(ok) => {
+ let adjustment = self.register_infer_ok_obligations(ok);
if !adjustment.is_identity() {
self.write_adjustment(new.id, adjustment);
}
}
}
- match self.commit_if_ok(|_| apply(&mut coerce, &exprs, prev_ty, new_ty)) {
+ match self.commit_if_ok(|_| coerce.coerce(&exprs, prev_ty, new_ty)) {
Err(_) => {
// Avoid giving strange errors on failed attempts.
if let Some(e) = first_error {
})
}
}
- Ok(adjustment) => {
+ Ok(ok) => {
+ let adjustment = self.register_infer_ok_obligations(ok);
if !adjustment.is_identity() {
let mut tables = self.tables.borrow_mut();
for expr in exprs() {
}
pub fn check_item_bodies<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> CompileResult {
- tcx.sess.track_errors(|| {
- tcx.dep_graph.with_task(DepNode::TypeckBodiesKrate, || {
- tcx.visit_all_bodies_in_krate(|body_owner_def_id, _body_id| {
- tcx.item_tables(body_owner_def_id);
- });
+ return tcx.sess.track_errors(|| {
+ tcx.dep_graph.with_task(DepNode::TypeckBodiesKrate, tcx, (), check_item_bodies_task);
+ });
+
+ fn check_item_bodies_task<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, (): ()) {
+ tcx.visit_all_bodies_in_krate(|body_owner_def_id, _body_id| {
+ tcx.item_tables(body_owner_def_id);
});
- })
+ }
}
pub fn provide(providers: &mut Providers) {
/// 4. This is added by the code in `visit_expr` when we write to `item_types`.
/// 5. This is added by the code in `convert_item` when we write to `item_types`;
/// note that this write occurs inside the `CollectItemSig` task.
- /// 6. Added by explicit `read` below
- fn with_collect_item_sig<OP>(&self, id: ast::NodeId, op: OP)
- where OP: FnOnce()
- {
+ /// 6. Added by reads from within `op`.
+ fn with_collect_item_sig(&self, id: ast::NodeId, op: fn(TyCtxt<'a, 'tcx, 'tcx>, ast::NodeId)) {
let def_id = self.tcx.hir.local_def_id(id);
- self.tcx.dep_graph.with_task(DepNode::CollectItemSig(def_id), || {
- self.tcx.hir.read(id);
- op();
- });
+ self.tcx.dep_graph.with_task(DepNode::CollectItemSig(def_id), self.tcx, id, op);
}
}
}
fn visit_item(&mut self, item: &'tcx hir::Item) {
- self.with_collect_item_sig(item.id, || convert_item(self.tcx, item));
+ self.with_collect_item_sig(item.id, convert_item);
intravisit::walk_item(self, item);
}
}
fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) {
- self.with_collect_item_sig(trait_item.id, || {
- convert_trait_item(self.tcx, trait_item)
- });
+ self.with_collect_item_sig(trait_item.id, convert_trait_item);
intravisit::walk_trait_item(self, trait_item);
}
fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) {
- self.with_collect_item_sig(impl_item.id, || {
- convert_impl_item(self.tcx, impl_item)
- });
+ self.with_collect_item_sig(impl_item.id, convert_impl_item);
intravisit::walk_impl_item(self, impl_item);
}
}
}
}
-fn convert_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, it: &hir::Item) {
+fn convert_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_id: ast::NodeId) {
+ let it = tcx.hir.expect_item(item_id);
debug!("convert: item {} with id {}", it.name, it.id);
- let def_id = tcx.hir.local_def_id(it.id);
+ let def_id = tcx.hir.local_def_id(item_id);
match it.node {
// These don't define types.
hir::ItemExternCrate(_) | hir::ItemUse(..) | hir::ItemMod(_) => {
}
}
-fn convert_trait_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_item: &hir::TraitItem) {
+fn convert_trait_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, trait_item_id: ast::NodeId) {
+ let trait_item = tcx.hir.expect_trait_item(trait_item_id);
let def_id = tcx.hir.local_def_id(trait_item.id);
tcx.item_generics(def_id);
tcx.item_predicates(def_id);
}
-fn convert_impl_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_item: &hir::ImplItem) {
- let def_id = tcx.hir.local_def_id(impl_item.id);
+fn convert_impl_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_item_id: ast::NodeId) {
+ let def_id = tcx.hir.local_def_id(impl_item_id);
tcx.item_generics(def_id);
tcx.item_type(def_id);
tcx.item_predicates(def_id);
/// Used when rendering a `ResolvedPath` structure. This invokes the `path`
/// rendering function with the necessary arguments for linking to a local path.
fn resolved_path(w: &mut fmt::Formatter, did: DefId, path: &clean::Path,
- print_all: bool, use_absolute: bool) -> fmt::Result {
+ print_all: bool, use_absolute: bool, is_not_debug: bool) -> fmt::Result {
let last = path.segments.last().unwrap();
let rel_root = match &*path.segments[0].name {
"self" => Some("./".to_string()),
} else {
root.push_str(&seg.name);
root.push_str("/");
- write!(w, "<a class=\"mod\"
- href=\"{}index.html\">{}</a>::",
- root,
- seg.name)?;
+ if is_not_debug {
+ write!(w, "<a class=\"mod\"
+ href=\"{}index.html\">{}</a>::",
+ root,
+ seg.name)?;
+ } else {
+ write!(w, "{}::", seg.name)?;
+ }
}
}
}
}
}
if w.alternate() {
- write!(w, "{:#}{:#}", HRef::new(did, &last.name), last.params)?;
+ if is_not_debug {
+ write!(w, "{:#}{:#}", HRef::new(did, &last.name), last.params)?;
+ } else {
+ write!(w, "{:?}{:?}", HRef::new(did, &last.name), last.params)?;
+ }
} else {
- let path = if use_absolute {
- match href(did) {
- Some((_, _, fqp)) => format!("{}::{}",
- fqp[..fqp.len()-1].join("::"),
- HRef::new(did, fqp.last().unwrap())),
- None => format!("{}", HRef::new(did, &last.name)),
- }
+ if is_not_debug {
+ let path = if use_absolute {
+ match href(did) {
+ Some((_, _, fqp)) => format!("{}::{}",
+ fqp[..fqp.len()-1].join("::"),
+ HRef::new(did, fqp.last().unwrap())),
+ None => format!("{}", HRef::new(did, &last.name)),
+ }
+ } else {
+ format!("{}", HRef::new(did, &last.name))
+ };
+ write!(w, "{}{}", path, last.params)?;
} else {
- format!("{}", HRef::new(did, &last.name))
- };
- write!(w, "{}{}", path, last.params)?;
+ let path = if use_absolute {
+ match href(did) {
+ Some((_, _, fqp)) => format!("{:?}::{:?}",
+ fqp[..fqp.len()-1].join("::"),
+ HRef::new(did, fqp.last().unwrap())),
+ None => format!("{:?}", HRef::new(did, &last.name)),
+ }
+ } else {
+ format!("{:?}", HRef::new(did, &last.name))
+ };
+ write!(w, "{}{:?}", path, last.params)?;
+ }
}
Ok(())
}
}
}
+impl<'a> fmt::Debug for HRef<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", self.text)
+ }
+}
+
fn fmt_type(t: &clean::Type, f: &mut fmt::Formatter, use_absolute: bool,
is_not_debug: bool) -> fmt::Result {
match *t {
}
clean::ResolvedPath{ did, ref typarams, ref path, is_generic } => {
// Paths like T::Output and Self::Output should be rendered with all segments
- resolved_path(f, did, path, is_generic, use_absolute)?;
+ resolved_path(f, did, path, is_generic, use_absolute, is_not_debug)?;
tybounds(f, typarams)
}
clean::Infer => write!(f, "_"),
write!(f, "{}::", self_type)?;
}
let path = clean::Path::singleton(name.clone());
- resolved_path(f, did, &path, true, use_absolute)?;
+ resolved_path(f, did, &path, true, use_absolute, is_not_debug)?;
// FIXME: `typarams` are not rendered, and this seems bad?
drop(typarams);
impl fmt::Display for clean::ImportSource {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.did {
- Some(did) => resolved_path(f, did, &self.path, true, false),
+ Some(did) => resolved_path(f, did, &self.path, true, false, true),
_ => {
for (i, seg) in self.path.segments.iter().enumerate() {
if i > 0 {
use syntax::abi;
use syntax::ast;
use syntax::attr;
+use syntax::tokenstream::TokenStream;
use syntax_pos::Span;
use rustc::hir::map as hir_map;
}
let imported_from = self.cx.sess().cstore.original_crate_name(def_id.krate);
let def = match self.cx.sess().cstore.load_macro(def_id, self.cx.sess()) {
- LoadedMacro::MacroRules(macro_rules) => macro_rules,
+ LoadedMacro::MacroDef(macro_def) => macro_def,
// FIXME(jseyfried): document proc macro reexports
LoadedMacro::ProcMacro(..) => continue,
};
- // FIXME(jseyfried) merge with `self.visit_macro()`
- let tts = def.stream().trees().collect::<Vec<_>>();
- let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect();
+ let matchers = if let ast::ItemKind::MacroDef(ref tokens) = def.node {
+ let tts: Vec<_> = TokenStream::from(tokens.clone()).into_trees().collect();
+ tts.chunks(4).map(|arm| arm[0].span()).collect()
+ } else {
+ unreachable!()
+ };
om.macros.push(Macro {
def_id: def_id,
attrs: def.attrs.clone().into(),
use hash::{Hash, Hasher, BuildHasher, SipHasher13};
use iter::{FromIterator, FusedIterator};
use mem::{self, replace};
-use ops::{Deref, Index};
+use ops::{Deref, Index, InPlace, Place, Placer};
use rand::{self, Rng};
+use ptr;
use super::table::{self, Bucket, EmptyBucket, FullBucket, FullBucketMut, RawTable, SafeHash};
use super::table::BucketState::{Empty, Full};
mut hash: SafeHash,
mut key: K,
mut val: V)
- -> &'a mut V {
+ -> FullBucketMut<'a, K, V> {
let start_index = bucket.index();
let size = bucket.table().size();
// Save the *starting point*.
// bucket, which is a FullBucket on top of a
// FullBucketMut, into just one FullBucketMut. The "table"
// refers to the inner FullBucketMut in this context.
- return bucket.into_table().into_mut_refs().1;
+ return bucket.into_table();
}
Full(bucket) => bucket,
};
}
}
+/// A place for insertion to a `Entry`.
+///
+/// See [`HashMap::entry`](struct.HashMap.html#method.entry) for details.
+#[must_use = "places do nothing unless written to with `<-` syntax"]
+#[unstable(feature = "collection_placement",
+ reason = "struct name and placement protocol is subject to change",
+ issue = "30172")]
+pub struct EntryPlace<'a, K: 'a, V: 'a> {
+ bucket: FullBucketMut<'a, K, V>,
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "struct name and placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, K: 'a + Debug, V: 'a + Debug> Debug for EntryPlace<'a, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("EntryPlace")
+ .field("key", self.bucket.read().0)
+ .field("value", self.bucket.read().1)
+ .finish()
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "struct name and placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, K, V> Drop for EntryPlace<'a, K, V> {
+ fn drop(&mut self) {
+ // Inplacement insertion failed. Only key need to drop.
+ // The value is failed to insert into map.
+ unsafe { self.bucket.remove_key() };
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, K, V> Placer<V> for Entry<'a, K, V> {
+ type Place = EntryPlace<'a, K, V>;
+
+ fn make_place(self) -> EntryPlace<'a, K, V> {
+ let b = match self {
+ Occupied(mut o) => {
+ unsafe { ptr::drop_in_place(o.elem.read_mut().1); }
+ o.elem
+ }
+ Vacant(v) => {
+ unsafe { v.insert_key() }
+ }
+ };
+ EntryPlace { bucket: b }
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, K, V> Place<V> for EntryPlace<'a, K, V> {
+ fn pointer(&mut self) -> *mut V {
+ self.bucket.read_mut().1
+ }
+}
+
+#[unstable(feature = "collection_placement",
+ reason = "placement protocol is subject to change",
+ issue = "30172")]
+impl<'a, K, V> InPlace<V> for EntryPlace<'a, K, V> {
+ type Owner = ();
+
+ unsafe fn finalize(self) {
+ mem::forget(self);
+ }
+}
+
impl<'a, K, V> Entry<'a, K, V> {
#[stable(feature = "rust1", since = "1.0.0")]
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn insert(self, value: V) -> &'a mut V {
- match self.elem {
+ let b = match self.elem {
NeqElem(mut bucket, disp) => {
if disp >= DISPLACEMENT_THRESHOLD {
bucket.table_mut().set_tag(true);
if disp >= DISPLACEMENT_THRESHOLD {
bucket.table_mut().set_tag(true);
}
- bucket.put(self.hash, self.key, value).into_mut_refs().1
+ bucket.put(self.hash, self.key, value)
+ },
+ };
+ b.into_mut_refs().1
+ }
+
+ // Only used for InPlacement insert. Avoid unnecessary value copy.
+ // The value remains uninitialized.
+ unsafe fn insert_key(self) -> FullBucketMut<'a, K, V> {
+ match self.elem {
+ NeqElem(mut bucket, disp) => {
+ if disp >= DISPLACEMENT_THRESHOLD {
+ bucket.table_mut().set_tag(true);
+ }
+ let uninit = mem::uninitialized();
+ robin_hood(bucket, disp, self.hash, self.key, uninit)
+ },
+ NoElem(mut bucket, disp) => {
+ if disp >= DISPLACEMENT_THRESHOLD {
+ bucket.table_mut().set_tag(true);
+ }
+ bucket.put_key(self.hash, self.key)
},
}
}
use super::RandomState;
use cell::RefCell;
use rand::{thread_rng, Rng};
+ use panic;
#[test]
fn test_zero_capacities() {
}
panic!("Adaptive early resize failed");
}
+
+ #[test]
+ fn test_placement_in() {
+ let mut map = HashMap::new();
+ map.extend((0..10).map(|i| (i, i)));
+
+ map.entry(100) <- 100;
+ assert_eq!(map[&100], 100);
+
+ map.entry(0) <- 10;
+ assert_eq!(map[&0], 10);
+
+ assert_eq!(map.len(), 11);
+ }
+
+ #[test]
+ fn test_placement_panic() {
+ let mut map = HashMap::new();
+ map.extend((0..10).map(|i| (i, i)));
+
+ fn mkpanic() -> usize { panic!() }
+
+ // modify existing key
+ // when panic happens, previous key is removed.
+ let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| { map.entry(0) <- mkpanic(); }));
+ assert_eq!(map.len(), 9);
+ assert!(!map.contains_key(&0));
+
+ // add new key
+ let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| { map.entry(100) <- mkpanic(); }));
+ assert_eq!(map.len(), 9);
+ assert!(!map.contains_key(&100));
+ }
+
+ #[test]
+ fn test_placement_drop() {
+ // correctly drop
+ struct TestV<'a>(&'a mut bool);
+ impl<'a> Drop for TestV<'a> {
+ fn drop(&mut self) {
+ if !*self.0 { panic!("value double drop!"); } // no double drop
+ *self.0 = false;
+ }
+ }
+
+ fn makepanic<'a>() -> TestV<'a> { panic!() }
+
+ let mut can_drop = true;
+ let mut hm = HashMap::new();
+ hm.insert(0, TestV(&mut can_drop));
+ let _ = panic::catch_unwind(panic::AssertUnwindSafe(|| { hm.entry(0) <- makepanic(); }));
+ assert_eq!(hm.len(), 0);
+ }
}
table: self.table,
}
}
+
+ /// Puts given key, remain value uninitialized.
+ /// It is only used for inplacement insertion.
+ pub unsafe fn put_key(mut self, hash: SafeHash, key: K) -> FullBucket<K, V, M> {
+ *self.raw.hash = hash.inspect();
+ let pair_mut = self.raw.pair as *mut (K, V);
+ ptr::write(&mut (*pair_mut).0, key);
+
+ self.table.borrow_table_mut().size += 1;
+
+ FullBucket {
+ raw: self.raw,
+ idx: self.idx,
+ table: self.table,
+ }
+ }
}
impl<K, V, M: Deref<Target = RawTable<K, V>>> FullBucket<K, V, M> {
v)
}
}
+
+ /// Remove this bucket's `key` from the hashtable.
+ /// Only used for inplacement insertion.
+ /// NOTE: `Value` is uninitialized when this function is called, don't try to drop the `Value`.
+ pub unsafe fn remove_key(&mut self) {
+ self.table.size -= 1;
+
+ *self.raw.hash = EMPTY_BUCKET;
+ let pair_mut = self.raw.pair as *mut (K, V);
+ ptr::drop_in_place(&mut (*pair_mut).0); // only drop key
+ }
}
// This use of `Put` is misleading and restrictive, but safe and sufficient for our use cases
#![feature(panic_unwind)]
#![feature(peek)]
#![feature(placement_in_syntax)]
+#![feature(placement_new_protocol)]
#![feature(prelude_import)]
#![feature(pub_restricted)]
#![feature(rand)]
pub fn from_ident(s: Span, identifier: Ident) -> Path {
Path {
span: s,
- segments: vec![identifier.into()],
+ segments: vec![PathSegment::from_ident(identifier, s)],
}
}
pub struct PathSegment {
/// The identifier portion of this path segment.
pub identifier: Ident,
+ /// Span of the segment identifier.
+ pub span: Span,
/// Type/lifetime parameters attached to this path. They come in
/// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. Note that
pub parameters: Option<P<PathParameters>>,
}
-impl From<Ident> for PathSegment {
- fn from(id: Ident) -> Self {
- PathSegment { identifier: id, parameters: None }
- }
-}
-
impl PathSegment {
+ pub fn from_ident(ident: Ident, span: Span) -> Self {
+ PathSegment { identifier: ident, span: span, parameters: None }
+ }
pub fn crate_root() -> Self {
PathSegment {
identifier: keywords::CrateRoot.ident(),
+ span: DUMMY_SP,
parameters: None,
}
}
pub module: Mod,
pub attrs: Vec<Attribute>,
pub span: Span,
- pub exported_macros: Vec<MacroDef>,
}
/// A spanned compile-time attribute list item.
Option<TraitRef>, // (optional) trait this impl implements
P<Ty>, // self
Vec<ImplItem>),
- /// A macro invocation (which includes macro definition).
+ /// A macro invocation.
///
/// E.g. `macro_rules! foo { .. }` or `foo!(..)`
Mac(Mac),
+
+ /// A macro definition.
+ MacroDef(ThinTokenStream),
}
impl ItemKind {
ItemKind::Union(..) => "union",
ItemKind::Trait(..) => "trait",
ItemKind::Mac(..) |
+ ItemKind::MacroDef(..) |
ItemKind::Impl(..) |
ItemKind::DefaultImpl(..) => "item"
}
}
}
-/// A macro definition, in this crate or imported from another.
-///
-/// Not parsed directly, but created on macro import or `macro_rules!` expansion.
-#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
-pub struct MacroDef {
- pub ident: Ident,
- pub attrs: Vec<Attribute>,
- pub id: NodeId,
- pub span: Span,
- pub body: ThinTokenStream,
-}
-
-impl MacroDef {
- pub fn stream(&self) -> TokenStream {
- self.body.clone().into()
- }
-}
-
#[cfg(test)]
mod tests {
use serialize;
use codemap::{self, CodeMap, ExpnInfo, Spanned, respan};
use syntax_pos::{Span, ExpnId, NO_EXPANSION};
use errors::{DiagnosticBuilder, FatalError};
-use ext::expand::{self, Expansion};
+use ext::expand::{self, Expansion, Invocation};
use ext::hygiene::Mark;
use fold::{self, Folder};
use parse::{self, parser, DirectoryOwnership};
fn is_whitelisted_legacy_custom_derive(&self, name: Name) -> bool;
fn visit_expansion(&mut self, mark: Mark, expansion: &Expansion, derives: &[Mark]);
- fn add_ext(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>);
- fn add_expansions_at_stmt(&mut self, id: ast::NodeId, macros: Vec<Mark>);
+ fn add_builtin(&mut self, ident: ast::Ident, ext: Rc<SyntaxExtension>);
fn resolve_imports(&mut self);
// Resolves attribute and derive legacy macros from `#![plugin(..)]`.
fn find_legacy_attr_invoc(&mut self, attrs: &mut Vec<Attribute>) -> Option<Attribute>;
- fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind,
- force: bool) -> Result<Rc<SyntaxExtension>, Determinacy>;
+ fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
+ -> Result<Option<Rc<SyntaxExtension>>, Determinacy>;
+ fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool)
+ -> Result<Rc<SyntaxExtension>, Determinacy>;
}
#[derive(Copy, Clone, Debug)]
fn is_whitelisted_legacy_custom_derive(&self, _name: Name) -> bool { false }
fn visit_expansion(&mut self, _invoc: Mark, _expansion: &Expansion, _derives: &[Mark]) {}
- fn add_ext(&mut self, _ident: ast::Ident, _ext: Rc<SyntaxExtension>) {}
- fn add_expansions_at_stmt(&mut self, _id: ast::NodeId, _macros: Vec<Mark>) {}
+ fn add_builtin(&mut self, _ident: ast::Ident, _ext: Rc<SyntaxExtension>) {}
fn resolve_imports(&mut self) {}
fn find_legacy_attr_invoc(&mut self, _attrs: &mut Vec<Attribute>) -> Option<Attribute> { None }
+ fn resolve_invoc(&mut self, _invoc: &mut Invocation, _scope: Mark, _force: bool)
+ -> Result<Option<Rc<SyntaxExtension>>, Determinacy> {
+ Err(Determinacy::Determined)
+ }
fn resolve_macro(&mut self, _scope: Mark, _path: &ast::Path, _kind: MacroKind,
_force: bool) -> Result<Rc<SyntaxExtension>, Determinacy> {
Err(Determinacy::Determined)
fn qpath(&self, self_type: P<ast::Ty>,
trait_path: ast::Path,
- ident: ast::Ident)
+ ident: ast::SpannedIdent)
-> (ast::QSelf, ast::Path);
fn qpath_all(&self, self_type: P<ast::Ty>,
trait_path: ast::Path,
- ident: ast::Ident,
+ ident: ast::SpannedIdent,
lifetimes: Vec<ast::Lifetime>,
types: Vec<P<ast::Ty>>,
bindings: Vec<ast::TypeBinding>)
segments.push(ast::PathSegment::crate_root());
}
- segments.extend(idents.into_iter().map(Into::into));
+ segments.extend(idents.into_iter().map(|i| ast::PathSegment::from_ident(i, sp)));
let parameters = if lifetimes.is_empty() && types.is_empty() && bindings.is_empty() {
None
} else {
bindings: bindings,
})))
};
- segments.push(ast::PathSegment { identifier: last_identifier, parameters: parameters });
+ segments.push(ast::PathSegment {
+ identifier: last_identifier,
+ span: sp,
+ parameters: parameters
+ });
ast::Path {
span: sp,
segments: segments,
fn qpath(&self,
self_type: P<ast::Ty>,
trait_path: ast::Path,
- ident: ast::Ident)
+ ident: ast::SpannedIdent)
-> (ast::QSelf, ast::Path) {
self.qpath_all(self_type, trait_path, ident, vec![], vec![], vec![])
}
fn qpath_all(&self,
self_type: P<ast::Ty>,
trait_path: ast::Path,
- ident: ast::Ident,
+ ident: ast::SpannedIdent,
lifetimes: Vec<ast::Lifetime>,
types: Vec<P<ast::Ty>>,
bindings: Vec<ast::TypeBinding>)
bindings: bindings,
};
path.segments.push(ast::PathSegment {
- identifier: ident,
+ identifier: ident.node,
+ span: ident.span,
parameters: Some(P(ast::PathParameters::AngleBracketed(parameters))),
});
pub struct Invocation {
pub kind: InvocationKind,
expansion_kind: ExpansionKind,
- expansion_data: ExpansionData,
+ pub expansion_data: ExpansionData,
}
pub enum InvocationKind {
let scope =
if self.monotonic { invoc.expansion_data.mark } else { orig_expansion_data.mark };
- let ext = match self.resolve_invoc(&mut invoc, scope, force) {
+ let ext = match self.cx.resolver.resolve_invoc(&mut invoc, scope, force) {
Ok(ext) => Some(ext),
Err(Determinacy::Determined) => None,
Err(Determinacy::Undetermined) => {
result
}
- fn resolve_invoc(&mut self, invoc: &mut Invocation, scope: Mark, force: bool)
- -> Result<Option<Rc<SyntaxExtension>>, Determinacy> {
- let (attr, traits, item) = match invoc.kind {
- InvocationKind::Bang { ref mac, .. } => {
- return self.cx.resolver.resolve_macro(scope, &mac.node.path,
- MacroKind::Bang, force).map(Some);
- }
- InvocationKind::Attr { attr: None, .. } => return Ok(None),
- InvocationKind::Derive { name, span, .. } => {
- let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
- return self.cx.resolver.resolve_macro(scope, &path,
- MacroKind::Derive, force).map(Some)
- }
- InvocationKind::Attr { ref mut attr, ref traits, ref mut item } => (attr, traits, item),
- };
-
- let (attr_name, path) = {
- let attr = attr.as_ref().unwrap();
- (attr.name(), ast::Path::from_ident(attr.span, Ident::with_empty_ctxt(attr.name())))
- };
-
- let mut determined = true;
- match self.cx.resolver.resolve_macro(scope, &path, MacroKind::Attr, force) {
- Ok(ext) => return Ok(Some(ext)),
- Err(Determinacy::Undetermined) => determined = false,
- Err(Determinacy::Determined) if force => return Err(Determinacy::Determined),
- _ => {}
- }
-
- for &(name, span) in traits {
- let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name));
- match self.cx.resolver.resolve_macro(scope, &path, MacroKind::Derive, force) {
- Ok(ext) => if let SyntaxExtension::ProcMacroDerive(_, ref inert_attrs) = *ext {
- if inert_attrs.contains(&attr_name) {
- // FIXME(jseyfried) Avoid `mem::replace` here.
- let dummy_item = placeholder(ExpansionKind::Items, ast::DUMMY_NODE_ID)
- .make_items().pop().unwrap();
- *item = mem::replace(item, Annotatable::Item(dummy_item))
- .map_attrs(|mut attrs| {
- let inert_attr = attr.take().unwrap();
- attr::mark_known(&inert_attr);
- if self.cx.ecfg.proc_macro_enabled() {
- *attr = find_attr_invoc(&mut attrs);
- }
- attrs.push(inert_attr);
- attrs
- });
- }
- return Err(Determinacy::Undetermined);
- },
- Err(Determinacy::Undetermined) => determined = false,
- Err(Determinacy::Determined) => {}
- }
- }
-
- Err(if determined { Determinacy::Determined } else { Determinacy::Undetermined })
- }
-
fn expand_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) -> Expansion {
match invoc.kind {
InvocationKind::Bang { .. } => self.expand_bang_invoc(invoc, ext),
let extname = path.segments.last().unwrap().identifier.name;
let ident = ident.unwrap_or(keywords::Invalid.ident());
- let marked_tts = mark_tts(mac.node.stream(), mark);
+ let marked_tts =
+ noop_fold_tts(mac.node.stream(), &mut Marker { mark: mark, expn_id: None });
let opt_expanded = match *ext {
NormalTT(ref expandfun, exp_span, allow_internal_unstable) => {
if ident.name != keywords::Invalid.name() {
}
}
-fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
+pub fn find_attr_invoc(attrs: &mut Vec<ast::Attribute>) -> Option<ast::Attribute> {
for i in 0 .. attrs.len() {
if !attr::is_known(&attrs[i]) && !is_builtin_attr(&attrs[i]) {
return Some(attrs.remove(i));
match item.node {
ast::ItemKind::Mac(..) => {
self.check_attributes(&item.attrs);
- let is_macro_def = if let ItemKind::Mac(ref mac) = item.node {
- mac.node.path.segments[0].identifier.name == "macro_rules"
- } else {
- unreachable!()
- };
-
- item.and_then(|mut item| match item.node {
- ItemKind::Mac(_) if is_macro_def => {
- item.id = Mark::fresh().as_placeholder_id();
- SmallVector::one(P(item))
- }
+ item.and_then(|item| match item.node {
ItemKind::Mac(mac) => {
self.collect(ExpansionKind::Items, InvocationKind::Bang {
mac: mac,
}
fn fold_item_kind(&mut self, item: ast::ItemKind) -> ast::ItemKind {
- noop_fold_item_kind(self.cfg.configure_item_kind(item), self)
+ match item {
+ ast::ItemKind::MacroDef(..) => item,
+ _ => noop_fold_item_kind(self.cfg.configure_item_kind(item), self),
+ }
}
fn new_id(&mut self, id: ast::NodeId) -> ast::NodeId {
span
}
}
-
-// apply a given mark to the given token trees. Used prior to expansion of a macro.
-pub fn mark_tts(tts: TokenStream, m: Mark) -> TokenStream {
- noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
-}
}
/// A mark is a unique id associated with a macro expansion.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Default, RustcEncodable, RustcDecodable)]
pub struct Mark(u32);
impl Mark {
})
})
}
-
- /// If `ident` is macro expanded, return the source ident from the macro definition
- /// and the mark of the expansion that created the macro definition.
- pub fn source(self) -> (Self /* source context */, Mark /* source macro */) {
- let macro_def_ctxt = self.data().prev_ctxt.data();
- (macro_def_ctxt.prev_ctxt, macro_def_ctxt.outer_mark)
- }
}
impl fmt::Debug for SyntaxContext {
use util::small_vector::SmallVector;
use std::collections::HashMap;
-use std::mem;
pub fn placeholder(kind: ExpansionKind, id: ast::NodeId) -> Expansion {
fn mac_placeholder() -> ast::Mac {
fn fold_block(&mut self, block: P<ast::Block>) -> P<ast::Block> {
noop_fold_block(block, self).map(|mut block| {
- let mut macros = Vec::new();
let mut remaining_stmts = block.stmts.len();
block.stmts = block.stmts.move_flat_map(|mut stmt| {
remaining_stmts -= 1;
- // `macro_rules!` macro definition
- if let ast::StmtKind::Item(ref item) = stmt.node {
- if let ast::ItemKind::Mac(_) = item.node {
- macros.push(Mark::from_placeholder_id(item.id));
- return None;
- }
- }
-
match stmt.node {
// Avoid wasting a node id on a trailing expression statement,
// which shares a HIR node with the expression itself.
_ => {}
}
- if self.monotonic && !macros.is_empty() {
- let macros = mem::replace(&mut macros, Vec::new());
- self.cx.resolver.add_expansions_at_stmt(stmt.id, macros);
- }
-
Some(stmt)
});
// Holy self-referential!
/// Converts a `macro_rules!` invocation into a syntax extension.
-pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
+pub fn compile(sess: &ParseSess, def: &ast::Item) -> SyntaxExtension {
let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs"));
let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs"));
];
// Parse the macro_rules! invocation
- let argument_map = match parse(sess, def.body.clone().into(), &argument_gram, None) {
+ let body = match def.node {
+ ast::ItemKind::MacroDef(ref body) => body.clone().into(),
+ _ => unreachable!(),
+ };
+ let argument_map = match parse(sess, body, &argument_gram, None) {
Success(m) => m,
Failure(sp, tok) => {
let s = parse_failure_msg(tok);
pub fn noop_fold_path<T: Folder>(Path { segments, span }: Path, fld: &mut T) -> Path {
Path {
- segments: segments.move_map(|PathSegment {identifier, parameters}| PathSegment {
+ segments: segments.move_map(|PathSegment {identifier, span, parameters}| PathSegment {
identifier: fld.fold_ident(identifier),
+ span: fld.new_span(span),
parameters: parameters.map(|ps| ps.map(|ps| fld.fold_path_parameters(ps))),
}),
span: fld.new_span(span)
items.move_flat_map(|item| folder.fold_trait_item(item)),
),
ItemKind::Mac(m) => ItemKind::Mac(folder.fold_mac(m)),
+ ItemKind::MacroDef(tts) => ItemKind::MacroDef(folder.fold_tts(tts.into()).into()),
}
}
}
}
-pub fn noop_fold_crate<T: Folder>(Crate {module, attrs, mut exported_macros, span}: Crate,
+pub fn noop_fold_crate<T: Folder>(Crate {module, attrs, span}: Crate,
folder: &mut T) -> Crate {
let mut items = folder.fold_item(P(ast::Item {
ident: keywords::Invalid.ident(),
}, vec![], span)
};
- for def in &mut exported_macros {
- def.id = folder.new_id(def.id);
- }
-
Crate {
module: module,
attrs: attrs,
- exported_macros: exported_macros,
span: span,
}
}
matches_codepattern,
"matches_codepattern",
pprust::to_string(|s| fake_print_crate(s, &folded_crate)),
- "zz!zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string());
+ "macro_rules! zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)));".to_string());
}
}
Span {lo: BytePos(a), hi: BytePos(b), expn_id: NO_EXPANSION}
}
+ fn str2seg(s: &str, lo: u32, hi: u32) -> ast::PathSegment {
+ ast::PathSegment::from_ident(Ident::from_str(s), sp(lo, hi))
+ }
+
#[test] fn path_exprs_1() {
assert!(string_to_expr("a".to_string()) ==
P(ast::Expr{
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, ast::Path {
span: sp(0, 1),
- segments: vec![Ident::from_str("a").into()],
+ segments: vec![str2seg("a", 0, 1)],
}),
span: sp(0, 1),
attrs: ThinVec::new(),
node: ast::ExprKind::Path(None, ast::Path {
span: sp(0, 6),
segments: vec![ast::PathSegment::crate_root(),
- Ident::from_str("a").into(),
- Ident::from_str("b").into()]
+ str2seg("a", 2, 3),
+ str2seg("b", 5, 6)]
}),
span: sp(0, 6),
attrs: ThinVec::new(),
id: ast::DUMMY_NODE_ID,
node:ast::ExprKind::Path(None, ast::Path{
span: sp(7, 8),
- segments: vec![Ident::from_str("d").into()],
+ segments: vec![str2seg("d", 7, 8)],
}),
span:sp(7,8),
attrs: ThinVec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Path(None, ast::Path {
span:sp(0,1),
- segments: vec![Ident::from_str("b").into()],
+ segments: vec![str2seg("b", 0, 1)],
}),
span: sp(0,1),
attrs: ThinVec::new()})),
ty: P(ast::Ty{id: ast::DUMMY_NODE_ID,
node: ast::TyKind::Path(None, ast::Path{
span:sp(10,13),
- segments: vec![Ident::from_str("i32").into()],
+ segments: vec![str2seg("i32", 10, 13)],
}),
span:sp(10,13)
}),
node: ast::ExprKind::Path(None,
ast::Path{
span:sp(17,18),
- segments: vec![Ident::from_str("b").into()],
+ segments: vec![str2seg("b", 17, 18)],
}),
span: sp(17,18),
attrs: ThinVec::new()})),
use ast::MacStmtStyle;
use ast::Mac_;
use ast::{MutTy, Mutability};
-use ast::{Pat, PatKind};
+use ast::{Pat, PatKind, PathSegment};
use ast::{PolyTraitRef, QSelf};
use ast::{Stmt, StmtKind};
use ast::{VariantData, StructField};
self.expected_tokens.clear();
}
- pub fn look_ahead<R, F>(&mut self, dist: usize, f: F) -> R where
+ pub fn look_ahead<R, F>(&self, dist: usize, f: F) -> R where
F: FnOnce(&token::Token) -> R,
{
if dist == 0 {
};
if is_global {
- segments.insert(0, ast::PathSegment::crate_root());
+ segments.insert(0, PathSegment::crate_root());
}
// Assemble the span.
/// - `a::b<T,U>::c<V,W>`
/// - `a::b<T,U>::c(V) -> W`
/// - `a::b<T,U>::c(V)`
- pub fn parse_path_segments_without_colons(&mut self) -> PResult<'a, Vec<ast::PathSegment>> {
+ pub fn parse_path_segments_without_colons(&mut self) -> PResult<'a, Vec<PathSegment>> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
let identifier = self.parse_path_segment_ident()?;
+ let ident_span = self.prev_span;
if self.check(&token::ModSep) && self.look_ahead(1, |t| *t == token::Lt) {
self.bump();
};
// Assemble and push the result.
- segments.push(ast::PathSegment { identifier: identifier, parameters: parameters });
+ segments.push(PathSegment {
+ identifier: identifier,
+ span: ident_span,
+ parameters: parameters
+ });
// Continue only if we see a `::`
if !self.eat(&token::ModSep) {
/// Examples:
/// - `a::b::<T,U>::c`
- pub fn parse_path_segments_with_colons(&mut self) -> PResult<'a, Vec<ast::PathSegment>> {
+ pub fn parse_path_segments_with_colons(&mut self) -> PResult<'a, Vec<PathSegment>> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
let identifier = self.parse_path_segment_ident()?;
+ let ident_span = self.prev_span;
// If we do not see a `::`, stop.
if !self.eat(&token::ModSep) {
- segments.push(identifier.into());
+ segments.push(PathSegment::from_ident(identifier, ident_span));
return Ok(segments);
}
// Consumed `a::b::<`, go look for types
let (lifetimes, types, bindings) = self.parse_generic_args()?;
self.expect_gt()?;
- segments.push(ast::PathSegment {
+ segments.push(PathSegment {
identifier: identifier,
+ span: ident_span,
parameters: ast::AngleBracketedParameterData {
lifetimes: lifetimes,
types: types,
}
} else {
// Consumed `a::`, go look for `b`
- segments.push(identifier.into());
+ segments.push(PathSegment::from_ident(identifier, ident_span));
}
}
}
/// Examples:
/// - `a::b::c`
pub fn parse_path_segments_without_types(&mut self)
- -> PResult<'a, Vec<ast::PathSegment>> {
+ -> PResult<'a, Vec<PathSegment>> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
let identifier = self.parse_path_segment_ident()?;
// Assemble and push the result.
- segments.push(identifier.into());
+ segments.push(PathSegment::from_ident(identifier, self.prev_span));
// If we do not see a `::` or see `::{`/`::*`, stop.
if !self.check(&token::ModSep) || self.is_import_coupler() {
})
}
- fn is_union_item(&mut self) -> bool {
+ fn is_union_item(&self) -> bool {
self.token.is_keyword(keywords::Union) &&
self.look_ahead(1, |t| t.is_ident() && !t.is_any_keyword())
}
+ fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility)
+ -> PResult<'a, Option<P<Item>>> {
+ let lo = self.span.lo;
+ match self.token {
+ token::Ident(ident) if ident.name == "macro_rules" => {
+ if self.look_ahead(1, |t| *t == token::Not) {
+ let prev_span = self.prev_span;
+ self.complain_if_pub_macro(vis, prev_span);
+ self.bump();
+ self.bump();
+ }
+ }
+ _ => return Ok(None),
+ };
+
+ let id = self.parse_ident()?;
+ let (delim, tts) = self.expect_delimited_token_tree()?;
+ if delim != token::Brace {
+ if !self.eat(&token::Semi) {
+ let msg = "macros that expand to items must either be surrounded with braces \
+ or followed by a semicolon";
+ self.span_err(self.prev_span, msg);
+ }
+ }
+
+ let hi = self.prev_span.hi;
+ let kind = ItemKind::MacroDef(tts);
+ Ok(Some(self.mk_item(lo, hi, id, kind, Visibility::Inherited, attrs.to_owned())))
+ }
+
fn parse_stmt_without_recovery(&mut self,
macro_legacy_warnings: bool)
-> PResult<'a, Option<Stmt>> {
node: StmtKind::Local(self.parse_local(attrs.into())?),
span: mk_sp(lo, self.prev_span.hi),
}
+ } else if let Some(macro_def) = self.eat_macro_def(&attrs, &Visibility::Inherited)? {
+ Stmt {
+ id: ast::DUMMY_NODE_ID,
+ node: StmtKind::Item(macro_def),
+ span: mk_sp(lo, self.prev_span.hi),
+ }
// Starts like a simple path, but not a union item.
} else if self.token.is_path_start() &&
!self.token.is_qpath_start() &&
let mut attrs = self.parse_outer_attributes()?;
let lo = self.span.lo;
- let vis = self.parse_visibility(true)?;
+ let vis = self.parse_visibility()?;
let defaultness = self.parse_defaultness()?;
let (name, node) = if self.eat_keyword(keywords::Type) {
let name = self.parse_ident()?;
|p| {
let attrs = p.parse_outer_attributes()?;
let lo = p.span.lo;
- let mut vis = p.parse_visibility(false)?;
+ let mut vis = p.parse_visibility()?;
let ty_is_interpolated =
p.token.is_interpolated() || p.look_ahead(1, |t| t.is_interpolated());
let mut ty = p.parse_ty()?;
fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
let attrs = self.parse_outer_attributes()?;
let lo = self.span.lo;
- let vis = self.parse_visibility(true)?;
+ let vis = self.parse_visibility()?;
self.parse_single_struct_field(lo, vis, attrs)
}
- // If `allow_path` is false, just parse the `pub` in `pub(path)` (but still parse `pub(crate)`)
- fn parse_visibility(&mut self, allow_path: bool) -> PResult<'a, Visibility> {
- let pub_crate = |this: &mut Self| {
- let span = this.prev_span;
- this.expect(&token::CloseDelim(token::Paren))?;
- Ok(Visibility::Crate(span))
- };
-
+ // Parse `pub`, `pub(crate)` and `pub(in path)` plus shortcuts
+ // `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
+ fn parse_visibility(&mut self) -> PResult<'a, Visibility> {
if !self.eat_keyword(keywords::Pub) {
- Ok(Visibility::Inherited)
- } else if !allow_path {
- // Look ahead to avoid eating the `(` in `pub(path)` while still parsing `pub(crate)`
- if self.token == token::OpenDelim(token::Paren) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Crate)) {
- self.bump(); self.bump();
- pub_crate(self)
- } else {
- Ok(Visibility::Public)
- }
- } else if !self.eat(&token::OpenDelim(token::Paren)) {
- Ok(Visibility::Public)
- } else if self.eat_keyword(keywords::Crate) {
- pub_crate(self)
- } else {
- let path = self.parse_path(PathStyle::Mod)?.default_to_global();
- self.expect(&token::CloseDelim(token::Paren))?;
- Ok(Visibility::Restricted { path: P(path), id: ast::DUMMY_NODE_ID })
- }
+ return Ok(Visibility::Inherited)
+ }
+
+ if self.check(&token::OpenDelim(token::Paren)) {
+ if self.look_ahead(1, |t| t.is_keyword(keywords::Crate)) {
+ // `pub(crate)`
+ self.bump(); // `(`
+ self.bump(); // `crate`
+ let vis = Visibility::Crate(self.prev_span);
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ return Ok(vis)
+ } else if self.look_ahead(1, |t| t.is_keyword(keywords::In)) {
+ // `pub(in path)`
+ self.bump(); // `(`
+ self.bump(); // `in`
+ let path = self.parse_path(PathStyle::Mod)?.default_to_global(); // `path`
+ let vis = Visibility::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ return Ok(vis)
+ } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) &&
+ self.look_ahead(1, |t| t.is_keyword(keywords::Super) ||
+ t.is_keyword(keywords::SelfValue)) {
+ // `pub(self)` or `pub(super)`
+ self.bump(); // `(`
+ let path = self.parse_path(PathStyle::Mod)?.default_to_global(); // `super`/`self`
+ let vis = Visibility::Restricted { path: P(path), id: ast::DUMMY_NODE_ID };
+ self.expect(&token::CloseDelim(token::Paren))?; // `)`
+ return Ok(vis)
+ }
+ }
+
+ Ok(Visibility::Public)
}
/// Parse defaultness: DEFAULT or nothing
let lo = self.span.lo;
- let visibility = self.parse_visibility(true)?;
+ let visibility = self.parse_visibility()?;
if self.eat_keyword(keywords::Use) {
// USE ITEM
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
+ if let Some(macro_def) = self.eat_macro_def(&attrs, &visibility)? {
+ return Ok(Some(macro_def));
+ }
+
self.parse_macro_use_or_failure(attrs,macros_allowed,attributes_allowed,lo,visibility)
}
fn parse_foreign_item(&mut self) -> PResult<'a, Option<ForeignItem>> {
let attrs = self.parse_outer_attributes()?;
let lo = self.span.lo;
- let visibility = self.parse_visibility(true)?;
+ let visibility = self.parse_visibility()?;
if self.check_keyword(keywords::Static) {
// FOREIGN STATIC ITEM
// `{foo, bar}`, `::{foo, bar}`, `*`, or `::*`.
self.eat(&token::ModSep);
let prefix = ast::Path {
- segments: vec![ast::PathSegment::crate_root()],
+ segments: vec![PathSegment::crate_root()],
span: mk_sp(lo, self.span.hi),
};
let view_path_kind = if self.eat(&token::BinOp(token::Star)) {
attrs: self.parse_inner_attributes()?,
module: self.parse_mod_items(&token::Eof, lo)?,
span: mk_sp(lo, self.span.lo),
- exported_macros: Vec::new(),
})
}
self.bclose(item.span)?;
}
ast::ItemKind::Mac(codemap::Spanned { ref node, .. }) => {
- self.print_visibility(&item.vis)?;
self.print_path(&node.path, false, 0, false)?;
word(&mut self.s, "! ")?;
self.print_ident(item.ident)?;
word(&mut self.s, ";")?;
self.end()?;
}
+ ast::ItemKind::MacroDef(ref tts) => {
+ word(&mut self.s, "macro_rules! ")?;
+ self.print_ident(item.ident)?;
+ self.cbox(INDENT_UNIT)?;
+ self.popen()?;
+ self.print_tts(tts.clone().into())?;
+ self.pclose()?;
+ word(&mut self.s, ";")?;
+ self.end()?;
+ }
}
self.ann.post(self, NodeItem(item))
}
vis: ast::Visibility::Inherited,
node: ast::ItemKind::Use(P(codemap::dummy_spanned(ast::ViewPathGlob(ast::Path {
segments: ["{{root}}", name, "prelude", "v1"].into_iter().map(|name| {
- ast::Ident::from_str(name).into()
+ ast::PathSegment::from_ident(ast::Ident::from_str(name), DUMMY_SP)
}).collect(),
span: span,
})))),
fn path_node(ids: Vec<Ident>) -> ast::Path {
ast::Path {
span: DUMMY_SP,
- segments: ids.into_iter().map(Into::into).collect(),
+ segments: ids.into_iter().map(|id| ast::PathSegment::from_ident(id, DUMMY_SP)).collect(),
}
}
fn visit_attribute(&mut self, _attr: &Attribute) {
self.count += 1;
}
- fn visit_macro_def(&mut self, macro_def: &MacroDef) {
- self.count += 1;
- walk_macro_def(self, macro_def)
- }
-
}
walk_assoc_type_binding(self, type_binding)
}
fn visit_attribute(&mut self, _attr: &'ast Attribute) {}
- fn visit_macro_def(&mut self, macro_def: &'ast MacroDef) {
- walk_macro_def(self, macro_def)
- }
fn visit_vis(&mut self, vis: &'ast Visibility) {
walk_vis(self, vis)
}
pub fn walk_crate<'a, V: Visitor<'a>>(visitor: &mut V, krate: &'a Crate) {
visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID);
walk_list!(visitor, visit_attribute, &krate.attrs);
- walk_list!(visitor, visit_macro_def, &krate.exported_macros);
-}
-
-pub fn walk_macro_def<'a, V: Visitor<'a>>(visitor: &mut V, macro_def: &'a MacroDef) {
- visitor.visit_ident(macro_def.span, macro_def.ident);
- walk_list!(visitor, visit_attribute, ¯o_def.attrs);
}
pub fn walk_mod<'a, V: Visitor<'a>>(visitor: &mut V, module: &'a Mod) {
walk_list!(visitor, visit_trait_item, methods);
}
ItemKind::Mac(ref mac) => visitor.visit_mac(mac),
+ ItemKind::MacroDef(..) => {},
}
walk_list!(visitor, visit_attribute, &item.attrs);
}
fn path(&self) -> ast::Path {
ast::Path {
span: self.span,
- segments: vec![self.ident.into()],
+ segments: vec![ast::PathSegment::from_ident(self.ident, self.span)],
}
}
}
pub fn register_builtin_derives(resolver: &mut Resolver) {
$(
- resolver.add_ext(
+ resolver.add_builtin(
ast::Ident::with_empty_ctxt(Symbol::intern($name)),
Rc::new(SyntaxExtension::BuiltinDerive($func))
);
deriving::register_builtin_derives(resolver);
let mut register = |name, ext| {
- resolver.add_ext(ast::Ident::with_empty_ctxt(name), Rc::new(ext));
+ resolver.add_builtin(ast::Ident::with_empty_ctxt(name), Rc::new(ext));
};
macro_rules! register {
krate.module.items.push(mk_registrar(&mut cx, &derives, &attr_macros, &bang_macros));
- if krate.exported_macros.len() > 0 {
- handler.err("cannot export macro_rules! macros from a `proc-macro` \
- crate type currently");
- }
-
- return krate
+ krate
}
fn is_proc_macro_attr(attr: &ast::Attribute) -> bool {
impl<'a> Visitor<'a> for CollectProcMacros<'a> {
fn visit_item(&mut self, item: &'a ast::Item) {
+ if let ast::ItemKind::MacroDef(..) = item.node {
+ if self.is_proc_macro_crate &&
+ item.attrs.iter().any(|attr| attr.name() == "macro_export") {
+ let msg =
+ "cannot export macro_rules! macros from a `proc-macro` crate type currently";
+ self.handler.span_err(item.span, msg);
+ }
+ }
+
// First up, make sure we're checking a bare function. If we're not then
// we're just not interested in this item.
//
-Subproject commit 50ab09fb43f038e4f824eea6cb278f560d3e8621
+Subproject commit 859fb269364623b17e092efaba3f94e70ce97c5e
# If this file is modified, then llvm will be forcibly cleaned and then rebuilt.
# The actual contents of this file do not matter, but to trigger a change on the
# build bots then the contents should be changed so git updates the mtime.
-2017-03-02
+2017-03-04
#[unstable(feature = "unstable_undeclared", issue = "38412")] // SILLY
pub(crate) b_crate: i32,
#[unstable(feature = "unstable_declared", issue = "38412")] // SILLY
- pub(m) c_mod: i32,
+ pub(in m) c_mod: i32,
#[stable(feature = "unit_test", since = "0.0.0")] // SILLY
d_priv: i32
}
pub i32,
pub(crate) i32,
- pub(m) i32,
+ pub(in m) i32,
i32);
impl Record {
#[unstable(feature = "unstable_undeclared", issue = "38412")] // SILLY
pub(crate) fn pub_crate(&self) -> i32 { self.d_priv }
#[unstable(feature = "unstable_declared", issue = "38412")] // SILLY
- pub(m) fn pub_mod(&self) -> i32 { self.d_priv }
+ pub(in m) fn pub_mod(&self) -> i32 { self.d_priv }
#[stable(feature = "unit_test", since = "0.0.0")] // SILLY
fn private(&self) -> i32 { self.d_priv }
}
pub fn stable(&self) -> i32 { self.0 }
pub(crate) fn pub_crate(&self) -> i32 { self.0 }
- pub(m) fn pub_mod(&self) -> i32 { self.0 }
+ pub(in m) fn pub_mod(&self) -> i32 { self.0 }
fn private(&self) -> i32 { self.0 }
}
}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn prove_static<T: 'static + ?Sized>(_: &'static T) {}
+
+fn lifetime_transmute_slice<'a, T: ?Sized>(x: &'a T, y: &T) -> &'a T {
+ let mut out = [x];
+ //~^ ERROR cannot infer an appropriate lifetime due to conflicting requirements
+ {
+ let slice: &mut [_] = &mut out;
+ slice[0] = y;
+ }
+ out[0]
+}
+
+struct Struct<T, U: ?Sized> {
+ head: T,
+ _tail: U
+}
+
+fn lifetime_transmute_struct<'a, T: ?Sized>(x: &'a T, y: &T) -> &'a T {
+ let mut out = Struct { head: x, _tail: [()] };
+ //~^ ERROR cannot infer an appropriate lifetime due to conflicting requirements
+ {
+ let dst: &mut Struct<_, [()]> = &mut out;
+ dst.head = y;
+ }
+ out.head
+}
+
+fn main() {
+ prove_static(lifetime_transmute_slice("", &String::from("foo")));
+ prove_static(lifetime_transmute_struct("", &String::from("bar")));
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn save_ref<'a>(refr: &'a i32, to: &mut [&'a i32]) {
+ for val in &mut *to {
+ *val = refr;
+ }
+}
+
+fn main() {
+ let ref init = 0i32;
+ let ref mut refr = 1i32;
+
+ let mut out = [init];
+
+ save_ref(&*refr, &mut out);
+
+ // This shouldn't be allowed as `refr` is borrowed
+ *refr = 3; //~ ERROR cannot assign to `*refr` because it is borrowed
+
+ // Prints 3?!
+ println!("{:?}", out[0]);
+}
// which fails to type check.
ss
- //~^ ERROR lifetime bound not satisfied
+ //~^ ERROR cannot infer
//~| ERROR cannot infer
}
// `Box<SomeTrait>` defaults to a `'static` bound, so this return
// is illegal.
- ss.r //~ ERROR lifetime bound not satisfied
+ ss.r //~ ERROR cannot infer an appropriate lifetime
}
fn store(ss: &mut SomeStruct, b: Box<SomeTrait>) {
fn store1<'b>(ss: &mut SomeStruct, b: Box<SomeTrait+'b>) {
// Here we override the lifetimes explicitly, and so naturally we get an error.
- ss.r = b; //~ ERROR lifetime bound not satisfied
+ ss.r = b; //~ ERROR cannot infer an appropriate lifetime
}
fn main() {
mod bar {
#[derive(Default)]
pub struct S {
- pub(foo) x: i32,
+ pub(in foo) x: i32,
}
impl S {
- pub(foo) fn f(&self) -> i32 { 0 }
+ pub(in foo) fn f(&self) -> i32 { 0 }
}
pub struct S2 {
mod foo {
pub mod bar {
pub struct S {
- pub(foo) x: i32,
+ pub(in foo) x: i32,
}
}
}
mod pathological {
- pub(bad::path) mod m1 {} //~ ERROR failed to resolve. Maybe a missing `extern crate bad;`?
- pub(foo) mod m2 {} //~ ERROR visibilities can only be restricted to ancestor modules
+ pub(in bad::path) mod m1 {} //~ ERROR failed to resolve. Maybe a missing `extern crate bad;`?
+ pub(in foo) mod m2 {} //~ ERROR visibilities can only be restricted to ancestor modules
}
#![feature(pub_restricted)]
macro_rules! m {
- ($p: path) => (pub($p) struct Z;)
+ ($p: path) => (pub(in $p) struct Z;)
}
struct S<T>(T);
m!{ S<u8> } //~ ERROR type or lifetime parameters in visibility path
//~^ ERROR expected module, found struct `S`
-mod foo {
- struct S(pub(foo<T>) ()); //~ ERROR type or lifetime parameters in visibility path
- //~^ ERROR cannot find type `T` in this scope
-}
-
fn main() {}
fn make_object_bad<'a,'b,'c,A:SomeTrait+'a+'b>(v: A) -> Box<SomeTrait+'c> {
// A outlives 'a AND 'b...but not 'c.
- box v as Box<SomeTrait+'a> //~ ERROR lifetime bound not satisfied
+ box v as Box<SomeTrait+'a> //~ ERROR cannot infer an appropriate lifetime
}
fn main() {
fn static_proc(x: &isize) -> Box<FnMut()->(isize) + 'static> {
// This is illegal, because the region bound on `proc` is 'static.
- Box::new(move|| { *x }) //~ ERROR does not fulfill the required lifetime
+ Box::new(move|| { *x }) //~ ERROR cannot infer an appropriate lifetime
}
fn main() { }
fn foo3<'a,'b>(x: &'a mut Dummy) -> &'b mut Dummy {
// Without knowing 'a:'b, we can't coerce
- x //~ ERROR lifetime bound not satisfied
- //~^ ERROR cannot infer
+ x //~ ERROR cannot infer an appropriate lifetime
+ //~^ ERROR cannot infer an appropriate lifetime
}
struct Wrapper<T>(T);
enum E {}
trait Tr {}
-pub(E) struct S; //~ ERROR expected module, found enum `E`
-pub(Tr) struct Z; //~ ERROR expected module, found trait `Tr`
-pub(std::vec) struct F; //~ ERROR visibilities can only be restricted to ancestor modules
-pub(nonexistent) struct G; //~ ERROR cannot find module `nonexistent` in the crate root
-pub(too_soon) struct H; //~ ERROR cannot find module `too_soon` in the crate root
+pub(in E) struct S; //~ ERROR expected module, found enum `E`
+pub(in Tr) struct Z; //~ ERROR expected module, found trait `Tr`
+pub(in std::vec) struct F; //~ ERROR visibilities can only be restricted to ancestor modules
+pub(in nonexistent) struct G; //~ ERROR cannot find module `nonexistent` in the crate root
+pub(in too_soon) struct H; //~ ERROR cannot find module `too_soon` in the crate root
// Visibilities are resolved eagerly without waiting for modules becoming fully populated.
// Visibilities can only use ancestor modules legally which are always available in time,
-> Box<Get<&'min i32>>
where 'max : 'min
{
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
where 'max : 'min
{
// Previously OK:
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn main() { }
where 'max : 'min
{
// Previously OK, now an error as traits are invariant.
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
-> Box<Get<&'max i32>>
where 'max : 'min
{
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn main() { }
-> Box<Get<&'min i32>>
where 'max : 'min
{
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn get_max_from_min<'min, 'max, G>(v: Box<Get<&'min i32>>)
-> Box<Get<&'max i32>>
where 'max : 'min
{
- v //~ ERROR mismatched types
+ v //~ ERROR cannot infer an appropriate lifetime
}
fn main() { }
// === GDB TESTS ===================================================================================
-// gdb-command:print 'c_style_enum::SINGLE_VARIANT'
+// gdbg-command:print 'c_style_enum::SINGLE_VARIANT'
+// gdbr-command:print c_style_enum::SINGLE_VARIANT
// gdbg-check:$1 = TheOnlyVariant
// gdbr-check:$1 = c_style_enum::SingleVariant::TheOnlyVariant
-// gdb-command:print 'c_style_enum::AUTO_ONE'
+// gdbg-command:print 'c_style_enum::AUTO_ONE'
+// gdbr-command:print c_style_enum::AUTO_ONE
// gdbg-check:$2 = One
// gdbr-check:$2 = c_style_enum::AutoDiscriminant::One
-// gdb-command:print 'c_style_enum::AUTO_TWO'
+// gdbg-command:print 'c_style_enum::AUTO_TWO'
+// gdbr-command:print c_style_enum::AUTO_TWO
// gdbg-check:$3 = One
// gdbr-check:$3 = c_style_enum::AutoDiscriminant::One
-// gdb-command:print 'c_style_enum::AUTO_THREE'
+// gdbg-command:print 'c_style_enum::AUTO_THREE'
+// gdbr-command:print c_style_enum::AUTO_THREE
// gdbg-check:$4 = One
// gdbr-check:$4 = c_style_enum::AutoDiscriminant::One
-// gdb-command:print 'c_style_enum::MANUAL_ONE'
+// gdbg-command:print 'c_style_enum::MANUAL_ONE'
+// gdbr-command:print c_style_enum::MANUAL_ONE
// gdbg-check:$5 = OneHundred
// gdbr-check:$5 = c_style_enum::ManualDiscriminant::OneHundred
-// gdb-command:print 'c_style_enum::MANUAL_TWO'
+// gdbg-command:print 'c_style_enum::MANUAL_TWO'
+// gdbr-command:print c_style_enum::MANUAL_TWO
// gdbg-check:$6 = OneHundred
// gdbr-check:$6 = c_style_enum::ManualDiscriminant::OneHundred
-// gdb-command:print 'c_style_enum::MANUAL_THREE'
+// gdbg-command:print 'c_style_enum::MANUAL_THREE'
+// gdbr-command:print c_style_enum::MANUAL_THREE
// gdbg-check:$7 = OneHundred
// gdbr-check:$7 = c_style_enum::ManualDiscriminant::OneHundred
// Make sure functions have proper names
// gdb-command:info functions
-// gdb-check:[...]void[...]main([...]);
-// gdb-check:[...]void[...]some_function([...]);
-// gdb-check:[...]void[...]some_other_function([...]);
-// gdb-check:[...]void[...]zzz([...]);
+// gdbg-check:[...]void[...]main([...]);
+// gdbr-check:fn limited_debuginfo::main();
+// gdbg-check:[...]void[...]some_function([...]);
+// gdbr-check:fn limited_debuginfo::some_function();
+// gdbg-check:[...]void[...]some_other_function([...]);
+// gdbr-check:fn limited_debuginfo::some_other_function();
+// gdbg-check:[...]void[...]zzz([...]);
+// gdbr-check:fn limited_debuginfo::zzz();
// gdb-command:run
// === GDB TESTS ===================================================================================
-// there's no frame yet for gdb to reliably detect the language, set it explicitly
-// gdbr-command:set language rust
-
// gdbg-command:print 'simple_struct::NO_PADDING_16'
// gdbr-command:print simple_struct::NO_PADDING_16
// gdbg-check:$1 = {x = 1000, y = -1001}
// === GDB TESTS ===================================================================================
-// there's no frame yet for gdb to reliably detect the language, set it explicitly
-// gdbr-command:set language rust
-
// gdbg-command:print/d 'simple_tuple::NO_PADDING_8'
// gdbr-command:print simple_tuple::NO_PADDING_8
// gdbg-check:$1 = {__0 = -50, __1 = 50}
// }
//
// bb2: {
-// StorageLive(_6);
// _0 = ();
// StorageDead(_4);
// StorageDead(_1);
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use std::fmt;
+
#[repr(packed)]
-#[derive(Copy, Clone, PartialEq, Debug)]
+#[derive(Copy, Clone)]
struct Foo {
a: i8,
b: i16,
c: i8
}
+impl PartialEq for Foo {
+ fn eq(&self, other: &Foo) -> bool {
+ self.a == other.a && self.b == other.b && self.c == other.c
+ }
+}
+
+impl fmt::Debug for Foo {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let a = self.a;
+ let b = self.b;
+ let c = self.c;
+
+ f.debug_struct("Foo")
+ .field("a", &a)
+ .field("b", &b)
+ .field("c", &c)
+ .finish()
+ }
+}
+
#[link(name = "test", kind = "static")]
extern {
fn foo(f: Foo) -> Foo;
#![feature(no_core)]
#![no_core]
+macro_rules! foo /* 60#0 */(( $ x : ident ) => { y + $ x });
fn bar /* 62#0 */() { let x /* 59#2 */ = 1; y /* 61#4 */ + x /* 59#5 */ }
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// pretty-expanded FIXME #23616
+
+use std::rc::Rc;
+
+fn lub_short<'a, T>(_: &[&'a T], _: &[&'a T]) {}
+
+// The two arguments are a subtype of their LUB, after coercion.
+fn long_and_short<'a, T>(xs: &[&'static T; 1], ys: &[&'a T; 1]) {
+ lub_short(xs, ys);
+}
+
+// The argument coerces to a subtype of the return type.
+fn long_to_short<'a, 'b, T>(xs: &'b [&'static T; 1]) -> &'b [&'a T] {
+ xs
+}
+
+// Rc<T> is covariant over T just like &T.
+fn long_to_short_rc<'a, T>(xs: Rc<[&'static T; 1]>) -> Rc<[&'a T]> {
+ xs
+}
+
+// LUB-coercion (if-else/match/array) coerces `xs: &'b [&'static T: N]`
+// to a subtype of the LUB of `xs` and `ys` (i.e. `&'b [&'a T]`),
+// regardless of the order they appear (in if-else/match/array).
+fn long_and_short_lub1<'a, 'b, T>(xs: &'b [&'static T; 1], ys: &'b [&'a T]) {
+ let _order1 = [xs, ys];
+ let _order2 = [ys, xs];
+}
+
+// LUB-coercion should also have the exact same effect when `&'b [&'a T; N]`
+// needs to be coerced, i.e. the resulting type is not &'b [&'static T], but
+// rather the `&'b [&'a T]` LUB.
+fn long_and_short_lub2<'a, 'b, T>(xs: &'b [&'static T], ys: &'b [&'a T; 1]) {
+ let _order1 = [xs, ys];
+ let _order2 = [ys, xs];
+}
+
+fn main() {}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+use std::fmt;
use std::mem;
#[repr(packed)]
-#[derive(Copy, Clone, PartialEq, Debug)]
+#[derive(Copy, Clone)]
struct Foo {
bar: u8,
baz: u64
}
+impl PartialEq for Foo {
+ fn eq(&self, other: &Foo) -> bool {
+ self.bar == other.bar && self.baz == other.baz
+ }
+}
+
+impl fmt::Debug for Foo {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let bar = self.bar;
+ let baz = self.baz;
+
+ f.debug_struct("Foo")
+ .field("bar", &bar)
+ .field("baz", &baz)
+ .finish()
+ }
+}
+
pub fn main() {
let foos = [Foo { bar: 1, baz: 2 }; 10];
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-fn main() {
- let (x,) = (vec![],);
-}
+++ /dev/null
-error[E0282]: type annotations needed
- --> $DIR/issue-38812-2.rs:12:17
- |
-12 | let (x,) = (vec![],);
- | ---- ^^^^^^ cannot infer type for `T`
- | |
- | consider giving a type to pattern
- |
- = note: this error originates in a macro outside of the current crate
-
-error: aborting due to previous error
-
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-fn main() {
- let x = vec![];
-}
+++ /dev/null
-error[E0282]: type annotations needed
- --> $DIR/issue-38812.rs:12:13
- |
-12 | let x = vec![];
- | - ^^^^^^ cannot infer type for `T`
- | |
- | consider giving `x` a type
- |
- = note: this error originates in a macro outside of the current crate
-
-error: aborting due to previous error
-
pub struct S(u8);
pub mod n {
- pub(m) struct Z(pub(m::n) u8);
+ pub(in m) struct Z(pub(in m::n) u8);
}
}
error[E0425]: cannot find value `A` in module `namespaced_enums`
- --> $DIR/enums-are-namespaced-xc.rs:15:13
+ --> $DIR/enums-are-namespaced-xc.rs:15:31
|
15 | let _ = namespaced_enums::A;
- | ^^^^^^^^^^^^^^^^^^^ not found in `namespaced_enums`
+ | ^ not found in `namespaced_enums`
|
= help: possible candidate is found in another module, you can import it into scope:
`use namespaced_enums::Foo::A;`
error[E0425]: cannot find function `B` in module `namespaced_enums`
- --> $DIR/enums-are-namespaced-xc.rs:18:13
+ --> $DIR/enums-are-namespaced-xc.rs:18:31
|
18 | let _ = namespaced_enums::B(10);
- | ^^^^^^^^^^^^^^^^^^^ not found in `namespaced_enums`
+ | ^ not found in `namespaced_enums`
|
= help: possible candidate is found in another module, you can import it into scope:
`use namespaced_enums::Foo::B;`
error[E0422]: cannot find struct, variant or union type `C` in module `namespaced_enums`
- --> $DIR/enums-are-namespaced-xc.rs:21:13
+ --> $DIR/enums-are-namespaced-xc.rs:21:31
|
21 | let _ = namespaced_enums::C { a: 10 };
- | ^^^^^^^^^^^^^^^^^^^ not found in `namespaced_enums`
+ | ^ not found in `namespaced_enums`
|
= help: possible candidate is found in another module, you can import it into scope:
`use namespaced_enums::Foo::C;`
--> $DIR/levenshtein.rs:20:10
|
20 | type B = Opiton<u8>; // Misspelled type name from the prelude.
- | ^^^^^^^^^^ did you mean `Option`?
+ | ^^^^^^ did you mean `Option`?
error[E0412]: cannot find type `Baz` in this scope
--> $DIR/levenshtein.rs:23:14
| ^^^^^^ did you mean `foo_bar`?
error[E0412]: cannot find type `first` in module `m`
- --> $DIR/levenshtein.rs:32:12
+ --> $DIR/levenshtein.rs:32:15
|
32 | let b: m::first = m::second; // Misspelled item in module.
- | ^^^^^^^^ did you mean `m::First`?
+ | ^^^^^ did you mean `First`?
error[E0425]: cannot find value `second` in module `m`
- --> $DIR/levenshtein.rs:32:23
+ --> $DIR/levenshtein.rs:32:26
|
32 | let b: m::first = m::second; // Misspelled item in module.
- | ^^^^^^^^^ did you mean `m::Second`?
+ | ^^^^^^ did you mean `Second`?
error: aborting due to 8 previous errors
pub struct S(u8);
pub mod n {
- pub(m) struct Z(pub(m::n) u8);
+ pub(in m) struct Z(pub(in m::n) u8);
}
use m::n::Z; // OK, only the type is imported
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:27:5
|
27 | a.I
- | ^ did you mean `a::I`?
+ | ^--
+ | |
+ | did you mean `a::I`?
error[E0423]: expected value, found module `a`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:33:5
|
33 | a.g()
- | ^ did you mean `a::g(...)`?
+ | ^----
+ | |
+ | did you mean `a::g(...)`?
error[E0423]: expected value, found module `a`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:39:5
|
39 | a.b.J
- | ^ did you mean `a::b`?
+ | ^--
+ | |
+ | did you mean `a::b`?
error[E0423]: expected value, found module `a::b`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:45:5
|
45 | a::b.J
- | ^^^^ did you mean `a::b::J`?
+ | ^^^^--
+ | |
+ | did you mean `a::b::J`?
error[E0423]: expected value, found module `a`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:51:5
|
51 | a.b.f();
- | ^ did you mean `a::b`?
+ | ^--
+ | |
+ | did you mean `a::b`?
error[E0423]: expected value, found module `a::b`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:55:12
|
55 | v.push(a::b);
- | ^^^^ did you mean `a::I`?
+ | ^^^-
+ | |
+ | did you mean `I`?
error[E0423]: expected value, found module `a::b`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:61:5
|
61 | a::b.f()
- | ^^^^ did you mean `a::b::f(...)`?
+ | ^^^^----
+ | |
+ | did you mean `a::b::f(...)`?
error[E0423]: expected value, found module `a::b`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:67:5
|
67 | a::b
- | ^^^^ did you mean `a::I`?
+ | ^^^-
+ | |
+ | did you mean `I`?
error[E0423]: expected function, found module `a::b`
--> $DIR/suggest-path-instead-of-mod-dot-item.rs:73:5
|
73 | a::b()
- | ^^^^ did you mean `a::I`?
+ | ^^^-
+ | |
+ | did you mean `I`?
error: main function not found
--> $DIR/unboxed-closure-sugar-nonexistent-trait.rs:11:8
|
11 | fn f<F:Nonexist(isize) -> isize>(x: F) {}
- | ^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
+ | ^^^^^^^^ not found in this scope
error[E0404]: expected trait, found type alias `Typedef`
--> $DIR/unboxed-closure-sugar-nonexistent-trait.rs:17:8
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let (x,) = (vec![],);
+}
--- /dev/null
+error[E0282]: type annotations needed
+ --> $DIR/issue-38812-2.rs:12:17
+ |
+12 | let (x,) = (vec![],);
+ | ---- ^^^^^^ cannot infer type for `T`
+ | |
+ | consider giving a type to pattern
+ |
+ = note: this error originates in a macro outside of the current crate
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let x = vec![];
+}
--- /dev/null
+error[E0282]: type annotations needed
+ --> $DIR/issue-38812.rs:12:13
+ |
+12 | let x = vec![];
+ | - ^^^^^^ cannot infer type for `T`
+ | |
+ | consider giving `x` a type
+ |
+ = note: this error originates in a macro outside of the current crate
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+trait Foo: Sized {
+ fn foo(self);
+}
+
+fn foo<'a,'b,T>(x: &'a T, y: &'b T)
+ where &'a T : Foo,
+ &'b T : Foo
+{
+ x.foo();
+ y.foo();
+}
+
+fn main() { }
--- /dev/null
+error[E0282]: type annotations needed
+ --> $DIR/issue-40294.rs:15:1
+ |
+15 | fn foo<'a,'b,T>(x: &'a T, y: &'b T)
+ | _^ starting here...
+16 | | where &'a T : Foo,
+17 | | &'b T : Foo
+18 | | {
+19 | | x.foo();
+20 | | y.foo();
+21 | | }
+ | |_^ ...ending here: cannot infer type for `&'a T`
+
+error: aborting due to previous error
+