[gsearchdocs]: https://www.google.com/search?q=site:doc.rust-lang.org+your+query+here
[rif]: http://internals.rust-lang.org
[rr]: https://doc.rust-lang.org/book/README.html
-[tlgba]: http://tomlee.co/2014/04/03/a-more-detailed-tour-of-the-rust-compiler/
+[tlgba]: http://tomlee.co/2014/04/a-more-detailed-tour-of-the-rust-compiler/
[ro]: http://www.rustaceans.org/
[rctd]: ./COMPILER_TESTS.md
[cheatsheet]: http://buildbot.rust-lang.org/homu/
```rust
use std::mem;
-unsafe {
- let a = [0u8, 0u8, 0u8, 0u8];
-
- let b = mem::transmute::<[u8; 4], u32>(a);
+fn main() {
+ unsafe {
+ let a = [0u8, 1u8, 0u8, 0u8];
+ let b = mem::transmute::<[u8; 4], u32>(a);
+ println!("{}", b); // 256
+ // or, more concisely:
+ let c: u32 = mem::transmute(a);
+ println!("{}", c); // 256
+ }
}
```
use core::iter::FromIterator;
use core::mem::swap;
+use core::mem::size_of;
use core::ptr;
use core::fmt;
use slice;
use vec::{self, Vec};
+use super::SpecExtend;
+
/// A priority queue implemented with a binary heap.
///
/// This will be a max-heap.
pub fn clear(&mut self) {
self.drain();
}
+
+ fn rebuild(&mut self) {
+ let mut n = self.len() / 2;
+ while n > 0 {
+ n -= 1;
+ self.sift_down(n);
+ }
+ }
+
+ /// Moves all the elements of `other` into `self`, leaving `other` empty.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(binary_heap_append)]
+ ///
+ /// use std::collections::BinaryHeap;
+ ///
+ /// let v = vec![-10, 1, 2, 3, 3];
+ /// let mut a = BinaryHeap::from(v);
+ ///
+ /// let v = vec![-20, 5, 43];
+ /// let mut b = BinaryHeap::from(v);
+ ///
+ /// a.append(&mut b);
+ ///
+ /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
+ /// assert!(b.is_empty());
+ /// ```
+ #[unstable(feature = "binary_heap_append",
+ reason = "needs to be audited",
+ issue = "32526")]
+ pub fn append(&mut self, other: &mut Self) {
+ if self.len() < other.len() {
+ swap(self, other);
+ }
+
+ if other.is_empty() {
+ return;
+ }
+
+ #[inline(always)]
+ fn log2_fast(x: usize) -> usize {
+ 8 * size_of::<usize>() - (x.leading_zeros() as usize) - 1
+ }
+
+ // `rebuild` takes O(len1 + len2) operations
+ // and about 2 * (len1 + len2) comparisons in the worst case
+ // while `extend` takes O(len2 * log_2(len1)) operations
+ // and about 1 * len2 * log_2(len1) comparisons in the worst case,
+ // assuming len1 >= len2.
+ #[inline]
+ fn better_to_rebuild(len1: usize, len2: usize) -> bool {
+ 2 * (len1 + len2) < len2 * log2_fast(len1)
+ }
+
+ if better_to_rebuild(self.len(), other.len()) {
+ self.data.append(&mut other.data);
+ self.rebuild();
+ } else {
+ self.extend(other.drain());
+ }
+ }
}
/// Hole represents a hole in a slice i.e. an index without valid value
impl<T: Ord> From<Vec<T>> for BinaryHeap<T> {
fn from(vec: Vec<T>) -> BinaryHeap<T> {
let mut heap = BinaryHeap { data: vec };
- let mut n = heap.len() / 2;
- while n > 0 {
- n -= 1;
- heap.sift_down(n);
- }
+ heap.rebuild();
heap
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord> Extend<T> for BinaryHeap<T> {
+ #[inline]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+ <Self as SpecExtend<I>>::spec_extend(self, iter);
+ }
+}
+
+impl<T: Ord, I: IntoIterator<Item = T>> SpecExtend<I> for BinaryHeap<T> {
+ default fn spec_extend(&mut self, iter: I) {
+ self.extend_desugared(iter.into_iter());
+ }
+}
+
+impl<T: Ord> SpecExtend<BinaryHeap<T>> for BinaryHeap<T> {
+ fn spec_extend(&mut self, ref mut other: BinaryHeap<T>) {
+ self.append(other);
+ }
+}
+
+impl<T: Ord> BinaryHeap<T> {
+ fn extend_desugared<I: IntoIterator<Item = T>>(&mut self, iter: I) {
let iterator = iter.into_iter();
let (lower, _) = iterator.size_hint();
// Since Rust doesn't actually have dependent types and polymorphic recursion,
// we make do with lots of unsafety.
+// A major goal of this module is to avoid complexity by treating the tree as a generic (if
+// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
+// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
+// even what underfull means. However, we do rely on a few invariants:
+//
+// - Trees must have uniform depth/height. This means that every path down to a leaf from a
+// given node has exactly the same length.
+// - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
+// This implies that even an empty internal node has at least one edge.
+
use alloc::heap;
use core::marker::PhantomData;
use core::mem;
const B: usize = 6;
pub const CAPACITY: usize = 2 * B - 1;
+/// The underlying representation of leaf nodes. Note that it is often unsafe to actually store
+/// these, since only the first `len` keys and values are assumed to be initialized. As such,
+/// these should always be put behind pointers, and specifically behind `BoxedNode` in the owned
+/// case.
+///
+/// See also rust-lang/rfcs#197, which would make this structure significantly more safe by
+/// avoiding accidentally dropping unused and uninitialized keys and values.
struct LeafNode<K, V> {
+ /// The arrays storing the actual data of the node. Only the first `len` elements of each
+ /// array are initialized and valid.
keys: [K; CAPACITY],
vals: [V; CAPACITY],
+
+ /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
+ /// This either points to an actual node or is null.
parent: *const InternalNode<K, V>,
+
+ /// This node's index into the parent node's `edges` array.
+ /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
+ /// This is only guaranteed to be initialized when `parent` is nonnull.
parent_idx: u16,
+
+ /// The number of keys and values this node stores.
+ ///
+ /// This is at the end of the node's representation and next to `parent_idx` to encourage
+ /// the compiler to join `len` and `parent_idx` into the same 32-bit word, reducing space
+ /// overhead.
len: u16,
}
impl<K, V> LeafNode<K, V> {
+ /// Creates a new `LeafNode`. Unsafe because all nodes should really be hidden behind
+ /// `BoxedNode`, preventing accidental dropping of uninitialized keys and values.
unsafe fn new() -> Self {
LeafNode {
+ // As a general policy, we leave fields uninitialized if they can be, as this should
+ // be both slightly faster and easier to track in Valgrind.
keys: mem::uninitialized(),
vals: mem::uninitialized(),
parent: ptr::null(),
}
}
-// We use repr(C) so that a pointer to an internal node can be
-// directly used as a pointer to a leaf node
+/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
+/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
+/// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
+/// node, allowing code to act on leaf and internal nodes generically without having to even check
+/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
#[repr(C)]
struct InternalNode<K, V> {
data: LeafNode<K, V>,
+
+ /// The pointers to the children of this node. `len + 1` of these are considered
+ /// initialized and valid.
edges: [BoxedNode<K, V>; 2 * B],
}
impl<K, V> InternalNode<K, V> {
+ /// Creates a new `InternalNode`.
+ ///
+ /// This is unsafe for two reasons. First, it returns an `InternalNode` by value, risking
+ /// dropping of uninitialized fields. Second, an invariant of internal nodes is that `len + 1`
+ /// edges are initialized and valid, meaning that even when the node is empty (having a
+ /// `len` of 0), there must be one initialized and valid edge. This function does not set up
+ /// such an edge.
unsafe fn new() -> Self {
InternalNode {
data: LeafNode::new(),
}
}
+/// An owned pointer to a node. This basically is either `Box<LeafNode<K, V>>` or
+/// `Box<InternalNode<K, V>>`. However, it contains no information as to which of the two types
+/// of nodes is acutally behind the box, and, partially due to this lack of information, has no
+/// destructor.
struct BoxedNode<K, V> {
- ptr: Unique<LeafNode<K, V>> // we don't know if this points to a leaf node or an internal node
+ ptr: Unique<LeafNode<K, V>>
}
impl<K, V> BoxedNode<K, V> {
}
}
- /// Add a new internal node with a single edge, pointing to the previous root, and make that
+ /// Adds a new internal node with a single edge, pointing to the previous root, and make that
/// new node the root. This increases the height by 1 and is the opposite of `pop_level`.
pub fn push_level(&mut self)
-> NodeRef<marker::Mut, K, V, marker::Internal> {
ret
}
- ///Â Remove the root node, using its first child as the new root. This cannot be called when
+ /// Removes the root node, using its first child as the new root. This cannot be called when
/// the tree consists only of a leaf node. As it is intended only to be called when the root
/// has only one edge, no cleanup is done on any of the other children are elements of the root.
/// This decreases the height by 1 and is the opposite of `push_level`.
pub struct NodeRef<BorrowType, K, V, Type> {
height: usize,
node: NonZero<*const LeafNode<K, V>>,
+ // This is null unless the borrow type is `Mut`
root: *const Root<K, V>,
_marker: PhantomData<(BorrowType, Type)>
}
impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
+ /// Finds the length of the node. This is the number of keys or values. In an
+ /// internal node, the number of edges is `len() + 1`.
pub fn len(&self) -> usize {
self.as_leaf().len as usize
}
+ /// Removes any static information about whether this node is a `Leaf` or an
+ /// `Internal` node.
pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
NodeRef {
height: self.height,
}
}
+ /// Temporarily takes out another, immutable reference to the same node.
fn reborrow<'a>(&'a self) -> NodeRef<marker::Immut<'a>, K, V, Type> {
NodeRef {
height: self.height,
self.reborrow().into_slices().1
}
+ /// Finds the parent of the current node. Returns `Ok(handle)` if the current
+ /// node actually has a parent, where `handle` points to the edge of the parent
+ /// that points to the current node. Returns `Err(self)` if the current node has
+ /// no parent, giving back the original `NodeRef`.
+ ///
+ /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
+ /// both, upon success, do nothing.
pub fn ascend(self) -> Result<
Handle<
NodeRef<
}
impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
+ /// Similar to `ascend`, gets a reference to a node's parent node, but also
+ /// deallocate the current node in the process. This is unsafe because the
+ /// current node will still be accessible despite being deallocated.
pub unsafe fn deallocate_and_ascend(self) -> Option<
Handle<
NodeRef<
}
impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
+ /// Similar to `ascend`, gets a reference to a node's parent node, but also
+ /// deallocate the current node in the process. This is unsafe because the
+ /// current node will still be accessible despite being deallocated.
pub unsafe fn deallocate_and_ascend(self) -> Option<
Handle<
NodeRef<
}
impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+ /// Unsafely asserts to the compiler some static information about whether this
+ /// node is a `Leaf`.
unsafe fn cast_unchecked<NewType>(&mut self)
-> NodeRef<marker::Mut, K, V, NewType> {
}
}
+ /// Temporarily takes out another, mutable reference to the same node. Beware, as
+ /// this method is very dangerous, doubly so since it may not immediately appear
+ /// dangerous.
+ ///
+ /// Because mutable pointers can roam anywhere around the tree and can even (through
+ /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
+ /// can easily be used to make the original mutable pointer dangling, or, in the case
+ /// of a reborrowed handle, out of bounds.
+ // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
+ // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut, K, V, Type> {
NodeRef {
height: self.height,
}
impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+ /// Gets a mutable reference to the root itself. This is useful primarily when the
+ /// height of the tree needs to be adjusted. Never call this on a reborrowed pointer.
pub fn into_root_mut(self) -> &'a mut Root<K, V> {
unsafe {
&mut *(self.root as *mut Root<K, V>)
}
impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
+ /// Adds a key/value pair the end of the node.
pub fn push(&mut self, key: K, val: V) {
// Necessary for correctness, but this is an internal module
debug_assert!(self.len() < CAPACITY);
self.as_leaf_mut().len += 1;
}
+ /// Adds a key/value pair to the beginning of the node.
pub fn push_front(&mut self, key: K, val: V) {
// Necessary for correctness, but this is an internal module
debug_assert!(self.len() < CAPACITY);
}
impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ /// Adds a key/value pair and an edge to go to the right of that pair to
+ /// the end of the node.
pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
// Necessary for correctness, but this is an internal module
debug_assert!(edge.height == self.height - 1);
}
}
+ /// Adds a key/value pair and an edge to go to the left of that pair to
+ /// the beginning of the node.
pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
// Necessary for correctness, but this is an internal module
debug_assert!(edge.height == self.height - 1);
}
impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ /// Removes a key/value pair from the end of this node. If this is an internal node,
+ /// also removes the edge that was to the right of that pair.
pub fn pop(&mut self) -> (K, V, Option<Root<K, V>>) {
// Necessary for correctness, but this is an internal module
debug_assert!(self.len() > 0);
}
}
+ /// Removes a key/value pair from the beginning of this node. If this is an internal node,
+ /// also removes the edge that was to the left of that pair.
pub fn pop_front(&mut self) -> (K, V, Option<Root<K, V>>) {
// Necessary for correctness, but this is an internal module
debug_assert!(self.len() > 0);
}
impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ /// Checks whether a node is an `Internal` node or a `Leaf` node.
pub fn force(self) -> ForceResult<
NodeRef<BorrowType, K, V, marker::Leaf>,
NodeRef<BorrowType, K, V, marker::Internal>
}
}
+/// A reference to a specific key/value pair or edge within a node. The `Node` parameter
+/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key/value
+/// pair) or `Edge` (signifying a handle on an edge).
+///
+/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
+/// a child node, these represent the spaces where child pointers would go between the key/value
+/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
+/// to the left of the node, one between the two pairs, and one at the right of the node.
pub struct Handle<Node, Type> {
node: Node,
idx: usize,
}
impl<Node: Copy, Type> Copy for Handle<Node, Type> { }
+// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
+// `Clone`able is when it is an immutable reference and therefore `Copy`.
impl<Node: Copy, Type> Clone for Handle<Node, Type> {
fn clone(&self) -> Self {
*self
}
impl<Node, Type> Handle<Node, Type> {
+ /// Retrieves the node that contains the edge of key/value pair this handle pointes to.
pub fn into_node(self) -> Node {
self.node
}
}
impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
+ /// Creates a new handle to a key/value pair in `node`. `idx` must be less than `node.len()`.
pub fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
// Necessary for correctness, but in a private module
debug_assert!(idx < node.len());
impl<BorrowType, K, V, NodeType, HandleType>
Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
+ /// Temporarily takes out another, immutable handle on the same location.
pub fn reborrow(&self)
-> Handle<NodeRef<marker::Immut, K, V, NodeType>, HandleType> {
impl<'a, K, V, NodeType, HandleType>
Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
+ /// Temporarily takes out another, mutable handle on the same location. Beware, as
+ /// this method is very dangerous, doubly so since it may not immediately appear
+ /// dangerous.
+ ///
+ /// Because mutable pointers can roam anywhere around the tree and can even (through
+ /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
+ /// can easily be used to make the original mutable pointer dangling, or, in the case
+ /// of a reborrowed handle, out of bounds.
+ // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
+ // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
pub unsafe fn reborrow_mut(&mut self)
-> Handle<NodeRef<marker::Mut, K, V, NodeType>, HandleType> {
impl<BorrowType, K, V, NodeType>
Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ /// Creates a new handle to an edge in `node`. `idx` must be less than or equal to
+ /// `node.len()`.
pub fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
// Necessary for correctness, but in a private module
debug_assert!(idx <= node.len());
}
impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
+ /// Inserts a new key/value pair between the key/value pairs to the right and left of
+ /// this edge. This method assumes that there is enough space in the node for the new
+ /// pair to fit.
+ ///
+ /// The returned pointer points to the inserted value.
fn insert_fit(&mut self, key: K, val: V) -> *mut V {
// Necessary for correctness, but in a private module
debug_assert!(self.node.len() < CAPACITY);
}
}
+ /// Inserts a new key/value pair between the key/value pairs to the right and left of
+ /// this edge. This method splits the node if there isn't enough room.
+ ///
+ /// The returned pointer points to the inserted value.
pub fn insert(mut self, key: K, val: V)
-> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
}
impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+ /// Fixes the parent pointer and index in the child node below this edge. This is useful
+ /// when the ordering of edges has been changed, such as in the various `insert` methods.
fn correct_parent_link(mut self) {
let idx = self.idx as u16;
let ptr = self.node.as_internal_mut() as *mut _;
child.as_leaf_mut().parent_idx = idx;
}
+ /// Unsafely asserts to the compiler some static information about whether the underlying
+ /// node of this handle is a `Leaf`.
unsafe fn cast_unchecked<NewType>(&mut self)
-> Handle<NodeRef<marker::Mut, K, V, NewType>, marker::Edge> {
Handle::new_edge(self.node.cast_unchecked(), self.idx)
}
+ /// Inserts a new key/value pair and an edge that will go to the right of that new pair
+ /// between this edge and the key/value pair to the right of this edge. This method assumes
+ /// that there is enough space in the node for the new pair to fit.
fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
// Necessary for correctness, but in an internal module
debug_assert!(self.node.len() < CAPACITY);
debug_assert!(edge.height == self.node.height - 1);
unsafe {
+ // This cast is a lie, but it allows us to reuse the key/value insertion logic.
self.cast_unchecked::<marker::Leaf>().insert_fit(key, val);
slice_insert(
}
}
+ /// Inserts a new key/value pair and an edge that will go to the right of that new pair
+ /// between this edge and the key/value pair to the right of this edge. This method splits
+ /// the node if there isn't enough room.
pub fn insert(mut self, key: K, val: V, edge: Root<K, V>)
-> InsertResult<'a, K, V, marker::Internal> {
impl<BorrowType, K, V>
Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
+ /// Finds the node pointed to by this edge.
+ ///
+ /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
+ /// both, upon success, do nothing.
pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
NodeRef {
height: self.node.height - 1,
}
impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
+ /// Splits the underlying node into three parts:
+ ///
+ /// - The node is truncated to only contain the key/value pairs to the right of
+ /// this handle.
+ /// - The key and value pointed to by this handle and extracted.
+ /// - All the key/value pairs to the right of this handle are put into a newly
+ /// allocated node.
pub fn split(mut self)
-> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
unsafe {
}
}
+ /// Removes the key/value pair pointed to by this handle, returning the edge between the
+ /// now adjacent key/value pairs to the left and right of this handle.
pub fn remove(mut self)
-> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>, K, V) {
unsafe {
}
impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
+ /// Splits the underlying node into three parts:
+ ///
+ /// - The node is truncated to only contain the edges and key/value pairs to the
+ /// right of this handle.
+ /// - The key and value pointed to by this handle and extracted.
+ /// - All the edges and key/value pairs to the right of this handle are put into
+ /// a newly allocated node.
pub fn split(mut self)
-> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
unsafe {
}
}
+ /// Returns whether it is valid to call `.merge()`, i.e., whether there is enough room in
+ /// a node to hold the combination of the nodes to the left and right of this handle along
+ /// with the key/value pair at this handle.
pub fn can_merge(&self) -> bool {
(
self.reborrow()
) <= CAPACITY
}
+ /// Combines the node immediately to the left of this handle, the key/value pair pointed
+ /// to by this handle, and the node immediately to the right of this handle into one new
+ /// child of the underlying node, returning an edge referencing that new child.
+ ///
+ /// Assumes that this edge `.can_merge()`.
pub fn merge(mut self)
-> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
let self1 = unsafe { ptr::read(&self) };
impl<BorrowType, K, V, HandleType>
Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType> {
+ /// Check whether the underlying node is an `Internal` node or a `Leaf` node.
pub fn force(self) -> ForceResult<
Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>
/// Shortens this `String` to the specified length.
///
+ /// If `new_len` is greater than the string's current length, this has no
+ /// effect.
+ ///
/// # Panics
///
- /// Panics if `new_len` > current length, or if `new_len` does not lie on a
- /// [`char`] boundary.
+ /// Panics if `new_len` does not lie on a [`char`] boundary.
///
/// [`char`]: ../../std/primitive.char.html
///
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn truncate(&mut self, new_len: usize) {
- assert!(self.is_char_boundary(new_len));
- self.vec.truncate(new_len)
+ if new_len <= self.len() {
+ assert!(self.is_char_boundary(new_len));
+ self.vec.truncate(new_len)
+ }
}
/// Removes the last character from the string buffer and returns it.
assert_eq!(a.len(), 5);
assert_eq!(a.into_sorted_vec(), [1, 2, 3, 4, 5]);
}
+
+#[test]
+fn test_append() {
+ let mut a = BinaryHeap::from(vec![-10, 1, 2, 3, 3]);
+ let mut b = BinaryHeap::from(vec![-20, 5, 43]);
+
+ a.append(&mut b);
+
+ assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
+ assert!(b.is_empty());
+}
+
+#[test]
+fn test_append_to_empty() {
+ let mut a = BinaryHeap::new();
+ let mut b = BinaryHeap::from(vec![-20, 5, 43]);
+
+ a.append(&mut b);
+
+ assert_eq!(a.into_sorted_vec(), [-20, 5, 43]);
+ assert!(b.is_empty());
+}
+
+#[test]
+fn test_extend_specialization() {
+ let mut a = BinaryHeap::from(vec![-10, 1, 2, 3, 3]);
+ let b = BinaryHeap::from(vec![-20, 5, 43]);
+
+ a.extend(b);
+
+ assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
+}
#![deny(warnings)]
#![feature(binary_heap_extras)]
+#![feature(binary_heap_append)]
#![feature(box_syntax)]
#![feature(btree_range)]
#![feature(collections)]
}
#[test]
-#[should_panic]
fn test_str_truncate_invalid_len() {
let mut s = String::from("12345");
s.truncate(6);
+ assert_eq!(s, "12345");
}
#[test]
// except according to those terms.
use prelude::v1::*;
-use fmt::{self, Write, FlagV1};
+use fmt::{self, FlagV1};
struct PadAdapter<'a, 'b: 'a> {
fmt: &'a mut fmt::Formatter<'b>,
#![allow(missing_docs)]
use char::CharExt;
-use cmp::{Eq, PartialOrd};
+use cmp::PartialOrd;
use convert::From;
use fmt;
use intrinsics;
/// all standard arithmetic operations on the underlying value are
/// intended to have wrapping semantics.
#[stable(feature = "rust1", since = "1.0.0")]
-#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Debug, Default)]
+#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Hash)]
pub struct Wrapping<T>(#[stable(feature = "rust1", since = "1.0.0")] pub T);
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Debug> fmt::Debug for Wrapping<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+#[stable(feature = "wrapping_display", since = "1.10.0")]
+impl<T: fmt::Display> fmt::Display for Wrapping<T> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
mod wrapping;
// All these modules are technically private and only exposed for libcoretest:
use self::Option::*;
use clone::Clone;
-use cmp::{Eq, Ord};
use default::Default;
use iter::ExactSizeIterator;
use iter::{Iterator, DoubleEndedIterator, FromIterator, IntoIterator};
/// `src` is not used before the data is overwritten again (e.g. with `write`,
/// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use
/// because it will attempt to drop the value previously at `*src`.
+///
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// let x = 12;
+/// let y = &x as *const i32;
+///
+/// unsafe { println!("{}", std::ptr::read(y)); }
+/// ```
#[inline(always)]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn read<T>(src: *const T) -> T {
///
/// This is appropriate for initializing uninitialized memory, or overwriting
/// memory that has previously been `read` from.
+///
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// let mut x = 0;
+/// let y = &mut x as *mut i32;
+/// let z = 12;
+///
+/// unsafe {
+/// std::ptr::write(y, z);
+/// println!("{}", std::ptr::read(y));
+/// }
+/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn write<T>(dst: *mut T, src: T) {
/// `src` is not used before the data is overwritten again (e.g. with `write`,
/// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use
/// because it will attempt to drop the value previously at `*src`.
+///
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// let x = 12;
+/// let y = &x as *const i32;
+///
+/// unsafe { println!("{}", std::ptr::read_volatile(y)); }
+/// ```
#[inline]
#[stable(feature = "volatile", since = "1.9.0")]
pub unsafe fn read_volatile<T>(src: *const T) -> T {
///
/// This is appropriate for initializing uninitialized memory, or overwriting
/// memory that has previously been `read` from.
+///
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// let mut x = 0;
+/// let y = &mut x as *mut i32;
+/// let z = 12;
+///
+/// unsafe {
+/// std::ptr::write_volatile(y, z);
+/// println!("{}", std::ptr::read_volatile(y));
+/// }
+/// ```
#[inline]
#[stable(feature = "volatile", since = "1.9.0")]
pub unsafe fn write_volatile<T>(dst: *mut T, src: T) {
use fmt;
use intrinsics::assume;
use iter::*;
-use ops::{FnMut, self, Index};
+use ops::{FnMut, self};
use ops::RangeFull;
use option::Option;
use option::Option::{None, Some};
use char::{self, CharExt};
use clone::Clone;
-use cmp::Eq;
use convert::AsRef;
use default::Default;
use fmt;
mod traits {
use cmp::{Ord, Ordering, PartialEq, PartialOrd, Eq};
- use iter::Iterator;
use option::Option;
use option::Option::Some;
use ops;
use ty::{IntType, UintType};
use ty::{self, Ty, TyCtxt};
use ty::error::TypeError;
-use ty::fold::{TypeFolder, TypeFoldable};
-use ty::relate::{Relate, RelateResult, TypeRelation};
+use ty::fold::TypeFoldable;
+use ty::relate::{RelateResult, TypeRelation};
use traits::PredicateObligations;
use syntax::ast;
use hir;
use hir::print as pprust;
-use middle::cstore::CrateStore;
use hir::def::Def;
use hir::def_id::DefId;
use infer::{self, TypeOrigin};
use ty::{TyVid, IntVid, FloatVid};
use ty::{self, Ty, TyCtxt};
use ty::error::{ExpectedFound, TypeError, UnconstrainedNumeric};
-use ty::fold::{TypeFolder, TypeFoldable};
+use ty::fold::TypeFoldable;
use ty::relate::{Relate, RelateResult, TypeRelation};
use traits::{self, PredicateObligations, ProjectionMode};
use rustc_data_structures::unify::{self, UnificationTable};
use ty::TyCtxt;
use session::{config, early_error, Session};
use lint::{Level, LevelSource, Lint, LintId, LintArray, LintPass};
-use lint::{EarlyLintPass, EarlyLintPassObject, LateLintPass, LateLintPassObject};
+use lint::{EarlyLintPassObject, LateLintPass, LateLintPassObject};
use lint::{Default, CommandLine, Node, Allow, Warn, Deny, Forbid};
use lint::builtin;
use util::nodemap::FnvHashMap;
use std::collections::HashSet;
use syntax::{ast, codemap};
-use syntax::attr::{self, AttrMetaMethods};
+use syntax::attr;
// Any local node that may call something in its body block should be
// explored. For example, if it's a live NodeItem that is a
use session;
use session::config;
-use middle::cstore::CrateStore;
use middle::cstore::LinkagePreference::{self, RequireStatic, RequireDynamic};
use util::nodemap::FnvHashMap;
use dep_graph::DepNode;
use hir::map as hir_map;
use session::Session;
-use middle::cstore::CrateStore;
use hir::def_id::DefId;
use ty;
use middle::weak_lang_items;
use hir::map as hir_map;
use session::Session;
use lint;
-use middle::cstore::{CrateStore, LOCAL_CRATE};
+use middle::cstore::LOCAL_CRATE;
use hir::def::Def;
use hir::def_id::{CRATE_DEF_INDEX, DefId};
use ty::{self, TyCtxt};
use session::config;
use session::Session;
-use middle::cstore::CrateStore;
use middle::lang_items;
use syntax::ast;
/// through the resulting reference.
pub temp_decls: Vec<TempDecl<'tcx>>,
+ /// Names and capture modes of all the closure upvars, assuming
+ /// the first argument is either the closure or a reference to it.
+ pub upvar_decls: Vec<UpvarDecl>,
+
/// A span representing this MIR, for error reporting
pub span: Span,
}
/// If true, this argument is a tuple after monomorphization,
/// and has to be collected from multiple actual arguments.
- pub spread: bool
+ pub spread: bool,
+
+ /// Either special_idents::invalid or the name of a single-binding
+ /// pattern associated with this argument. Useful for debuginfo.
+ pub debug_name: Name
+}
+
+/// A closure capture, with its name and mode.
+#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
+pub struct UpvarDecl {
+ pub debug_name: Name,
+
+ /// If true, the capture is behind a reference.
+ pub by_ref: bool
}
///////////////////////////////////////////////////////////////////////////
ref $($mutability)* var_decls,
ref $($mutability)* arg_decls,
ref $($mutability)* temp_decls,
+ upvar_decls: _,
ref $($mutability)* span,
} = *mir;
arg_decl: & $($mutability)* ArgDecl<'tcx>) {
let ArgDecl {
ref $($mutability)* ty,
- spread: _
+ spread: _,
+ debug_name: _
} = *arg_decl;
self.visit_ty(ty);
use syntax::attr::AttrMetaMethods;
use syntax::errors::{ColorConfig, Handler};
use syntax::parse;
-use syntax::parse::lexer::Reader;
use syntax::parse::token::InternedString;
use syntax::feature_gate::UnstableFeatures;
use std::collections::HashSet;
use std::env;
use std::fs;
-use std::io::prelude::*;
use std::path::{Path, PathBuf};
use session::search_paths::{SearchPaths, PathKind};
use super::{SelectionContext, FulfillmentContext};
use super::util::{fresh_type_vars_for_impl, impl_trait_ref_and_oblig};
-use middle::cstore::CrateStore;
use hir::def_id::DefId;
use infer::{self, InferCtxt, TypeOrigin};
use middle::region;
use super::{Overlap, specializes};
-use middle::cstore::CrateStore;
use hir::def_id::DefId;
use infer;
use traits::{self, ProjectionMode};
use session::Session;
use lint;
use middle;
-use middle::cstore::{CrateStore, LOCAL_CRATE};
+use middle::cstore::LOCAL_CRATE;
use hir::def::DefMap;
use hir::def_id::DefId;
use middle::free_region::FreeRegionMap;
use dep_graph::{self, DepNode};
use hir::map as ast_map;
use middle;
-use middle::cstore::{self, CrateStore, LOCAL_CRATE};
+use middle::cstore::{self, LOCAL_CRATE};
use hir::def::{self, Def, ExportMap};
use hir::def_id::DefId;
use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem};
use middle::region::{CodeExtent};
use traits;
use ty;
-use ty::fold::TypeFolder;
use ty::subst::{Subst, Substs, VecPerParamSpace};
use ty::walk::TypeWalker;
use util::common::MemoizationMap;
use util::nodemap::FnvHashMap;
use serialize::{Encodable, Encoder, Decodable, Decoder};
-use std::borrow::{Borrow, Cow};
+use std::borrow::Cow;
use std::cell::Cell;
use std::hash::{Hash, Hasher};
use std::iter;
use ty;
use ty::fast_reject;
use ty::{Ty, TyCtxt, TraitRef};
-use std::borrow::{Borrow};
use std::cell::{Cell, RefCell};
use syntax::ast::Name;
use hir;
use std::cmp;
use std::hash::{Hash, SipHasher, Hasher};
use syntax::ast::{self, Name};
-use syntax::attr::{self, AttrMetaMethods, SignedInt, UnsignedInt};
+use syntax::attr::{self, SignedInt, UnsignedInt};
use syntax::codemap::Span;
use hir;
-> BlockAnd<()> {
let discriminant_lvalue = unpack!(block = self.as_lvalue(block, discriminant));
- // Before we do anything, create uninitialized variables with
- // suitable extent for all of the bindings in this match. It's
- // easiest to do this up front because some of these arms may
- // be unreachable or reachable multiple times.
- let var_scope_id = self.innermost_scope_id();
- for arm in &arms {
- self.declare_bindings(var_scope_id, &arm.patterns[0]);
- }
-
let mut arm_blocks = ArmBlocks {
blocks: arms.iter()
.map(|_| self.cfg.start_new_block())
.collect(),
};
- let arm_bodies: Vec<ExprRef<'tcx>> =
- arms.iter()
- .map(|arm| arm.body.clone())
- .collect();
+ // Get the body expressions and their scopes, while declaring bindings.
+ let arm_bodies: Vec<_> = arms.iter().enumerate().map(|(i, arm)| {
+ // Assume that all expressions are wrapped in Scope.
+ let body = self.hir.mirror(arm.body.clone());
+ match body.kind {
+ ExprKind::Scope { extent, value } => {
+ let scope_id = self.push_scope(extent, arm_blocks.blocks[i]);
+ self.declare_bindings(scope_id, &arm.patterns[0]);
+ (extent, self.scopes.pop().unwrap(), value)
+ }
+ _ => {
+ span_bug!(body.span, "arm body is not wrapped in Scope {:?}",
+ body.kind);
+ }
+ }
+ }).collect();
// assemble a list of candidates: there is one candidate per
// pattern, which means there may be more than one candidate
// all the arm blocks will rejoin here
let end_block = self.cfg.start_new_block();
- for (arm_index, arm_body) in arm_bodies.into_iter().enumerate() {
+ let scope_id = self.innermost_scope_id();
+ for (arm_index, (extent, scope, body)) in arm_bodies.into_iter().enumerate() {
let mut arm_block = arm_blocks.blocks[arm_index];
- unpack!(arm_block = self.into(destination, arm_block, arm_body));
+ // Re-enter the scope we created the bindings in.
+ self.scopes.push(scope);
+ unpack!(arm_block = self.into(destination, arm_block, body));
+ unpack!(arm_block = self.pop_scope(extent, arm_block));
self.cfg.terminate(arm_block,
- var_scope_id,
+ scope_id,
span,
TerminatorKind::Goto { target: end_block });
}
use hair::cx::Cx;
use rustc::middle::region::{CodeExtent, CodeExtentData};
-use rustc::ty::{FnOutput, Ty};
+use rustc::ty::{self, FnOutput, Ty};
use rustc::mir::repr::*;
use rustc_data_structures::fnv::FnvHashMap;
use rustc::hir;
+use rustc::hir::pat_util::pat_is_binding;
use std::ops::{Index, IndexMut};
use syntax::ast;
use syntax::codemap::Span;
+use syntax::parse::token;
pub struct Builder<'a, 'tcx: 'a> {
hir: Cx<'a, 'tcx>,
true
}));
+ // Gather the upvars of a closure, if any.
+ let upvar_decls: Vec<_> = tcx.with_freevars(fn_id, |freevars| {
+ freevars.iter().map(|fv| {
+ let by_ref = tcx.upvar_capture(ty::UpvarId {
+ var_id: fv.def.var_id(),
+ closure_expr_id: fn_id
+ }).map_or(false, |capture| match capture {
+ ty::UpvarCapture::ByValue => false,
+ ty::UpvarCapture::ByRef(..) => true
+ });
+ let mut decl = UpvarDecl {
+ debug_name: token::special_idents::invalid.name,
+ by_ref: by_ref
+ };
+ if let Some(hir::map::NodeLocal(pat)) = tcx.map.find(fv.def.var_id()) {
+ if let hir::PatKind::Ident(_, ref ident, _) = pat.node {
+ decl.debug_name = ident.node.name;
+ }
+ }
+ decl
+ }).collect()
+ });
+
(
Mir {
basic_blocks: builder.cfg.basic_blocks,
var_decls: builder.var_decls,
arg_decls: arg_decls.take().expect("args never built?"),
temp_decls: builder.temp_decls,
+ upvar_decls: upvar_decls,
return_ty: return_ty,
span: span
},
self.schedule_drop(pattern.as_ref().map_or(ast_block.span, |pat| pat.span),
argument_extent, &lvalue, ty);
- ArgDecl { ty: ty, spread: false }
+ let mut name = token::special_idents::invalid.name;
+ if let Some(pat) = pattern {
+ if let hir::PatKind::Ident(_, ref ident, _) = pat.node {
+ if pat_is_binding(&self.hir.tcx().def_map.borrow(), pat) {
+ name = ident.node.name;
+ }
+ }
+ }
+
+ ArgDecl {
+ ty: ty,
+ spread: false,
+ debug_name: name
+ }
})
.collect();
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
use rustc::ty::{self, VariantKind};
-use syntax::ast::Name;
+use syntax::ast::{Name, NodeId};
use syntax::attr::AttrMetaMethods;
-use syntax::parse::token::{special_idents, SELF_KEYWORD_NAME, SUPER_KEYWORD_NAME};
+use syntax::parse::token::keywords;
use syntax::codemap::{Span, DUMMY_SP};
use rustc::hir;
block.stmts.iter().any(is_item)
}
+ fn sanity_check_import(&self, view_path: &hir::ViewPath, id: NodeId) {
+ let path = match view_path.node {
+ ViewPathSimple(_, ref path) |
+ ViewPathGlob (ref path) |
+ ViewPathList(ref path, _) => path
+ };
+
+ // Check for type parameters
+ let found_param = path.segments.iter().any(|segment| {
+ !segment.parameters.types().is_empty() ||
+ !segment.parameters.lifetimes().is_empty() ||
+ !segment.parameters.bindings().is_empty()
+ });
+ if found_param {
+ self.session.span_err(path.span,
+ "type or lifetime parameter is found in import path");
+ }
+
+ // Checking for special identifiers in path
+ // prevent `self` or `super` at beginning of global path
+ if path.global && path.segments.len() > 0 {
+ let first = path.segments[0].identifier.name;
+ if first == keywords::Super.to_name() || first == keywords::SelfValue.to_name() {
+ self.session.add_lint(
+ lint::builtin::SUPER_OR_SELF_IN_GLOBAL_PATH, id, path.span,
+ format!("expected identifier, found keyword `{}`", first)
+ );
+ }
+ }
+ }
+
/// Constructs the reduced graph for one item.
fn build_reduced_graph_for_item(&mut self, item: &Item, parent_ref: &mut Module<'b>) {
let parent = *parent_ref;
// Extract and intern the module part of the path. For
// globs and lists, the path is found directly in the AST;
// for simple paths we have to munge the path a little.
- let is_global;
let module_path: Vec<Name> = match view_path.node {
ViewPathSimple(_, ref full_path) => {
- is_global = full_path.global;
full_path.segments
.split_last()
.unwrap()
ViewPathGlob(ref module_ident_path) |
ViewPathList(ref module_ident_path, _) => {
- is_global = module_ident_path.global;
module_ident_path.segments
.iter()
.map(|seg| seg.identifier.name)
}
};
- // Checking for special identifiers in path
- // prevent `self` or `super` at beginning of global path
- if is_global && (module_path.first() == Some(&SELF_KEYWORD_NAME) ||
- module_path.first() == Some(&SUPER_KEYWORD_NAME)) {
- self.session.add_lint(
- lint::builtin::SUPER_OR_SELF_IN_GLOBAL_PATH,
- item.id,
- item.span,
- format!("expected identifier, found keyword `{}`",
- module_path.first().unwrap().as_str()));
- }
+ self.sanity_check_import(view_path, item.id);
// Build up the import directives.
- let is_prelude = item.attrs.iter().any(|attr| {
- attr.name() == special_idents::prelude_import.name.as_str()
- });
+ let is_prelude = item.attrs.iter().any(|attr| attr.name() == "prelude_import");
match view_path.node {
ViewPathSimple(binding, ref full_path) => {
return;
};
- scopes[idx] = if !has_variables.contains(idx) {
+ if !has_variables.contains(idx) {
// Do not create a DIScope if there are no variables
// defined in this MIR Scope, to avoid debuginfo bloat.
- parent_scope
- } else {
- let loc = span_start(ccx, scope_data.span);
- let file_metadata = file_metadata(ccx, &loc.file.name);
- unsafe {
- llvm::LLVMDIBuilderCreateLexicalBlock(
- DIB(ccx),
- parent_scope,
- file_metadata,
- loc.line as c_uint,
- loc.col.to_usize() as c_uint)
+
+ // However, we don't skip creating a nested scope if
+ // our parent is the root, because we might want to
+ // put arguments in the root and not have shadowing.
+ if parent_scope != fn_metadata {
+ scopes[idx] = parent_scope;
+ return;
}
+ }
+
+ let loc = span_start(ccx, scope_data.span);
+ let file_metadata = file_metadata(ccx, &loc.file.name);
+ scopes[idx] = unsafe {
+ llvm::LLVMDIBuilderCreateLexicalBlock(
+ DIB(ccx),
+ parent_scope,
+ file_metadata,
+ loc.line as c_uint,
+ loc.col.to_usize() as c_uint)
};
}
let scopes = debuginfo::create_mir_scopes(fcx);
// Allocate variable and temp allocas
+ let args = arg_value_refs(&bcx, &mir, &scopes);
let vars = mir.var_decls.iter()
.map(|decl| (bcx.monomorphize(&decl.ty), decl))
.map(|(mty, decl)| {
TempRef::Operand(None)
})
.collect();
- let args = arg_value_refs(&bcx, &mir, &scopes);
// Allocate a `Block` for every basic block
let block_bcxs: Vec<Block<'blk,'tcx>> =
let byte_offset_of_var_in_tuple =
machine::llelement_offset(bcx.ccx(), lltuplety, i);
- let address_operations = unsafe {
+ let ops = unsafe {
[llvm::LLVMDIBuilderCreateOpDeref(),
llvm::LLVMDIBuilderCreateOpPlus(),
byte_offset_of_var_in_tuple as i64]
let variable_access = VariableAccess::IndirectVariable {
alloca: lltemp,
- address_operations: &address_operations
+ address_operations: &ops
};
declare_local(bcx, token::special_idents::invalid.name,
tupled_arg_ty, scope, variable_access,
lltemp
};
bcx.with_block(|bcx| arg_scope.map(|scope| {
- declare_local(bcx, token::special_idents::invalid.name, arg_ty, scope,
- VariableAccess::DirectVariable { alloca: llval },
- VariableKind::ArgumentVariable(arg_index + 1),
- bcx.fcx().span.unwrap_or(DUMMY_SP));
+ // Is this a regular argument?
+ if arg_index > 0 || mir.upvar_decls.is_empty() {
+ declare_local(bcx, arg_decl.debug_name, arg_ty, scope,
+ VariableAccess::DirectVariable { alloca: llval },
+ VariableKind::ArgumentVariable(arg_index + 1),
+ bcx.fcx().span.unwrap_or(DUMMY_SP));
+ return;
+ }
+
+ // Or is it the closure environment?
+ let (closure_ty, env_ref) = if let ty::TyRef(_, mt) = arg_ty.sty {
+ (mt.ty, true)
+ } else {
+ (arg_ty, false)
+ };
+ let upvar_tys = if let ty::TyClosure(_, ref substs) = closure_ty.sty {
+ &substs.upvar_tys[..]
+ } else {
+ bug!("upvar_decls with non-closure arg0 type `{}`", closure_ty);
+ };
+
+ // Store the pointer to closure data in an alloca for debuginfo
+ // because that's what the llvm.dbg.declare intrinsic expects.
+
+ // FIXME(eddyb) this shouldn't be necessary but SROA seems to
+ // mishandle DW_OP_plus not preceded by DW_OP_deref, i.e. it
+ // doesn't actually strip the offset when splitting the closure
+ // environment into its components so it ends up out of bounds.
+ let env_ptr = if !env_ref {
+ use base::*;
+ use build::*;
+ use common::*;
+ let alloc = alloca(bcx, val_ty(llval), "__debuginfo_env_ptr");
+ Store(bcx, llval, alloc);
+ alloc
+ } else {
+ llval
+ };
+
+ let llclosurety = type_of::type_of(bcx.ccx(), closure_ty);
+ for (i, (decl, ty)) in mir.upvar_decls.iter().zip(upvar_tys).enumerate() {
+ let byte_offset_of_var_in_env =
+ machine::llelement_offset(bcx.ccx(), llclosurety, i);
+
+ let ops = unsafe {
+ [llvm::LLVMDIBuilderCreateOpDeref(),
+ llvm::LLVMDIBuilderCreateOpPlus(),
+ byte_offset_of_var_in_env as i64,
+ llvm::LLVMDIBuilderCreateOpDeref()]
+ };
+
+ // The environment and the capture can each be indirect.
+
+ // FIXME(eddyb) see above why we have to keep
+ // a pointer in an alloca for debuginfo atm.
+ let mut ops = if env_ref || true { &ops[..] } else { &ops[1..] };
+
+ let ty = if let (true, &ty::TyRef(_, mt)) = (decl.by_ref, &ty.sty) {
+ mt.ty
+ } else {
+ ops = &ops[..ops.len() - 1];
+ ty
+ };
+
+ let variable_access = VariableAccess::IndirectVariable {
+ alloca: env_ptr,
+ address_operations: &ops
+ };
+ declare_local(bcx, decl.debug_name, ty, scope, variable_access,
+ VariableKind::CapturedVariable,
+ bcx.fcx().span.unwrap_or(DUMMY_SP));
+ }
}));
LvalueRef::new_sized(llval, LvalueTy::from_ty(arg_ty))
}).collect()
```compile_fail
fn main() {
- let _: Box<std::io::Read+std::io::Write>;
+ let _: Box<std::io::Read + std::io::Write>;
}
```
```
fn main() {
- let _: Box<std::io::Read+Copy+Sync>;
+ let _: Box<std::io::Read + Send + Sync>;
}
```
"##,
/// let abs_difference_1 = (f.1 - x.cos()).abs();
///
/// assert!(abs_difference_0 <= f32::EPSILON);
- /// assert!(abs_difference_0 <= f32::EPSILON);
+ /// assert!(abs_difference_1 <= f32::EPSILON);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
/// let abs_difference_1 = (f.1 - x.cos()).abs();
///
/// assert!(abs_difference_0 < 1e-10);
- /// assert!(abs_difference_0 < 1e-10);
+ /// assert!(abs_difference_1 < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
// except according to those terms.
use prelude::v1::*;
-use io::prelude::*;
use os::unix::prelude::*;
use ffi::{CString, CStr, OsString, OsStr};
use std::cell::{RefCell, Cell};
use std::{error, fmt};
-use std::io::prelude::*;
use std::rc::Rc;
use term;
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- use std::fmt::Display;
-
self.to_str().fmt(f)
}
}
use str::char_at;
use std::cell::RefCell;
-use std::io::Read;
use std::iter;
use std::path::{Path, PathBuf};
use std::rc::Rc;
use parse::PResult;
use std::collections::HashSet;
-use std::io::prelude::*;
use std::mem;
use std::path::{Path, PathBuf};
use std::rc::Rc;
pub enum PathParsingMode {
/// A path with no type parameters; e.g. `foo::bar::Baz`
NoTypesAllowed,
+ /// Same as `NoTypesAllowed`, but may end with `::{` or `::*`, which are left unparsed
+ ImportPrefix,
/// A path with a lifetime and type parameters, with no double colons
/// before the type parameters; e.g. `foo::bar<'a>::Baz<T>`
LifetimeAndTypesWithoutColons,
}
}
- pub fn parse_path_list_item(&mut self) -> PResult<'a, ast::PathListItem> {
- let lo = self.span.lo;
- let node = if self.eat_keyword(keywords::SelfValue) {
- let rename = self.parse_rename()?;
- ast::PathListItemKind::Mod { id: ast::DUMMY_NODE_ID, rename: rename }
- } else {
- let ident = self.parse_ident()?;
- let rename = self.parse_rename()?;
- ast::PathListItemKind::Ident { name: ident, rename: rename, id: ast::DUMMY_NODE_ID }
- };
- let hi = self.last_span.hi;
- Ok(spanned(lo, hi, node))
- }
-
/// Check if the next token is `tok`, and return `true` if so.
///
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not
LifetimeAndTypesWithColons => {
self.parse_path_segments_with_colons()?
}
- NoTypesAllowed => {
- self.parse_path_segments_without_types()?
+ NoTypesAllowed | ImportPrefix => {
+ self.parse_path_segments_without_types(mode == ImportPrefix)?
}
};
path.segments.extend(segments);
LifetimeAndTypesWithColons => {
self.parse_path_segments_with_colons()?
}
- NoTypesAllowed => {
- self.parse_path_segments_without_types()?
+ NoTypesAllowed | ImportPrefix => {
+ self.parse_path_segments_without_types(mode == ImportPrefix)?
}
};
/// Examples:
/// - `a::b::c`
- pub fn parse_path_segments_without_types(&mut self) -> PResult<'a, Vec<ast::PathSegment>> {
+ pub fn parse_path_segments_without_types(&mut self, import_prefix: bool)
+ -> PResult<'a, Vec<ast::PathSegment>> {
let mut segments = Vec::new();
loop {
// First, parse an identifier.
parameters: ast::PathParameters::none()
});
- // If we do not see a `::`, stop.
- if !self.eat(&token::ModSep) {
+ // If we do not see a `::` or see `::{`/`::*`, stop.
+ if !self.check(&token::ModSep) || import_prefix && self.is_import_coupler() {
return Ok(segments);
+ } else {
+ self.bump();
}
}
}
self.parse_item_(attrs, true, false)
}
+ fn parse_path_list_items(&mut self) -> PResult<'a, Vec<ast::PathListItem>> {
+ self.parse_unspanned_seq(&token::OpenDelim(token::Brace),
+ &token::CloseDelim(token::Brace),
+ SeqSep::trailing_allowed(token::Comma), |this| {
+ let lo = this.span.lo;
+ let node = if this.eat_keyword(keywords::SelfValue) {
+ let rename = this.parse_rename()?;
+ ast::PathListItemKind::Mod { id: ast::DUMMY_NODE_ID, rename: rename }
+ } else {
+ let ident = this.parse_ident()?;
+ let rename = this.parse_rename()?;
+ ast::PathListItemKind::Ident { name: ident, rename: rename, id: ast::DUMMY_NODE_ID }
+ };
+ let hi = this.last_span.hi;
+ Ok(spanned(lo, hi, node))
+ })
+ }
+
+ /// `::{` or `::*`
+ fn is_import_coupler(&mut self) -> bool {
+ self.check(&token::ModSep) &&
+ self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace) ||
+ *t == token::BinOp(token::Star))
+ }
- /// Matches view_path : MOD? non_global_path as IDENT
- /// | MOD? non_global_path MOD_SEP LBRACE RBRACE
- /// | MOD? non_global_path MOD_SEP LBRACE ident_seq RBRACE
- /// | MOD? non_global_path MOD_SEP STAR
- /// | MOD? non_global_path
+ /// Matches ViewPath:
+ /// MOD_SEP? non_global_path
+ /// MOD_SEP? non_global_path as IDENT
+ /// MOD_SEP? non_global_path MOD_SEP STAR
+ /// MOD_SEP? non_global_path MOD_SEP LBRACE item_seq RBRACE
+ /// MOD_SEP? LBRACE item_seq RBRACE
fn parse_view_path(&mut self) -> PResult<'a, P<ViewPath>> {
let lo = self.span.lo;
-
- // Allow a leading :: because the paths are absolute either way.
- // This occurs with "use $crate::..." in macros.
- let is_global = self.eat(&token::ModSep);
-
- if self.check(&token::OpenDelim(token::Brace)) {
- // use {foo,bar}
- let idents = self.parse_unspanned_seq(
- &token::OpenDelim(token::Brace),
- &token::CloseDelim(token::Brace),
- SeqSep::trailing_allowed(token::Comma),
- |p| p.parse_path_list_item())?;
- let path = ast::Path {
+ if self.check(&token::OpenDelim(token::Brace)) || self.is_import_coupler() {
+ // `{foo, bar}` or `::{foo, bar}`
+ let prefix = ast::Path {
+ global: self.eat(&token::ModSep),
+ segments: Vec::new(),
span: mk_sp(lo, self.span.hi),
- global: is_global,
- segments: Vec::new()
};
- return Ok(P(spanned(lo, self.span.hi, ViewPathList(path, idents))));
- }
-
- let first_ident = self.parse_ident()?;
- let mut path = vec!(first_ident);
- if let token::ModSep = self.token {
- // foo::bar or foo::{a,b,c} or foo::*
- while self.check(&token::ModSep) {
+ let items = self.parse_path_list_items()?;
+ Ok(P(spanned(lo, self.span.hi, ViewPathList(prefix, items))))
+ } else {
+ let prefix = self.parse_path(ImportPrefix)?;
+ if self.is_import_coupler() {
+ // `foo::bar::{a, b}` or `foo::bar::*`
self.bump();
-
- match self.token {
- token::Ident(..) => {
- let ident = self.parse_ident()?;
- path.push(ident);
- }
-
- // foo::bar::{a,b,c}
- token::OpenDelim(token::Brace) => {
- let idents = self.parse_unspanned_seq(
- &token::OpenDelim(token::Brace),
- &token::CloseDelim(token::Brace),
- SeqSep::trailing_allowed(token::Comma),
- |p| p.parse_path_list_item()
- )?;
- let path = ast::Path {
- span: mk_sp(lo, self.span.hi),
- global: is_global,
- segments: path.into_iter().map(|identifier| {
- ast::PathSegment {
- identifier: identifier,
- parameters: ast::PathParameters::none(),
- }
- }).collect()
- };
- return Ok(P(spanned(lo, self.span.hi, ViewPathList(path, idents))));
- }
-
- // foo::bar::*
- token::BinOp(token::Star) => {
+ if self.check(&token::BinOp(token::Star)) {
self.bump();
- let path = ast::Path {
- span: mk_sp(lo, self.span.hi),
- global: is_global,
- segments: path.into_iter().map(|identifier| {
- ast::PathSegment {
- identifier: identifier,
- parameters: ast::PathParameters::none(),
- }
- }).collect()
- };
- return Ok(P(spanned(lo, self.span.hi, ViewPathGlob(path))));
- }
-
- // fall-through for case foo::bar::;
- token::Semi => {
- self.span_err(self.span, "expected identifier or `{` or `*`, found `;`");
- }
-
- _ => break
+ Ok(P(spanned(lo, self.span.hi, ViewPathGlob(prefix))))
+ } else {
+ let items = self.parse_path_list_items()?;
+ Ok(P(spanned(lo, self.span.hi, ViewPathList(prefix, items))))
}
+ } else {
+ // `foo::bar` or `foo::bar as baz`
+ let rename = self.parse_rename()?.
+ unwrap_or(prefix.segments.last().unwrap().identifier);
+ Ok(P(spanned(lo, self.last_span.hi, ViewPathSimple(rename, prefix))))
}
}
- let mut rename_to = path[path.len() - 1];
- let path = ast::Path {
- span: mk_sp(lo, self.last_span.hi),
- global: is_global,
- segments: path.into_iter().map(|identifier| {
- ast::PathSegment {
- identifier: identifier,
- parameters: ast::PathParameters::none(),
- }
- }).collect()
- };
- rename_to = self.parse_rename()?.unwrap_or(rename_to);
- Ok(P(spanned(lo, self.last_span.hi, ViewPathSimple(rename_to, path))))
}
fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
impl fmt::Debug for RcStr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- use std::fmt::Debug;
self[..].fmt(f)
}
}
impl fmt::Display for RcStr {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- use std::fmt::Display;
self[..].fmt(f)
}
}
use self::NamePadding::*;
use self::OutputLocation::*;
-use stats::Stats;
use std::boxed::FnBox;
-use term::Terminal;
use std::any::Any;
use std::cmp;
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+mod a {
+ pub mod b {
+ pub mod c {
+ pub struct S;
+ pub struct Z;
+ }
+ }
+}
+
+macro_rules! import {
+ ($p: path) => (use $p {S, Z}); //~ERROR expected one of `::`, `;`, or `as`, found `{`
+}
+
+import! { a::b::c }
+
+fn main() {}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+mod a {
+ pub mod b {
+ pub mod c {
+ pub struct S;
+ pub struct Z;
+ }
+ }
+}
+
+macro_rules! import {
+ ($p: path) => (use ::$p {S, Z}); //~ERROR expected identifier, found `a::b::c`
+}
+
+import! { a::b::c }
+
+fn main() {}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+mod a {
+ pub mod b {
+ pub mod c {
+ pub struct S<T>(T);
+ }
+ }
+}
+
+macro_rules! import {
+ ($p: path) => (use $p;);
+}
+
+import! { a::b::c::S<u8> } //~ERROR type or lifetime parameter is found in import path
+
+fn main() {}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use self::Self as Foo; //~ ERROR unresolved import `self::Self`
+
+pub fn main() {}
}
}
-use self::Self as Foo;
-//~^ ERROR expected identifier, found keyword `Self`
-
use std::option::Option as Self;
//~^ ERROR expected identifier, found keyword `Self`
#![allow(unused_variables)]
#![allow(dead_code)]
-#![feature(omit_gdb_pretty_printer_section, rustc_attrs)]
+#![feature(omit_gdb_pretty_printer_section)]
#![omit_gdb_pretty_printer_section]
trait TraitWithAssocType {
zzz(); // #break
}
-#[rustc_no_mir] // FIXME(#32790) MIR reuses scopes for match arms.
fn assoc_enum<T: TraitWithAssocType>(arg: Enum<T>) {
match arg {
#![omit_gdb_pretty_printer_section]
#[no_stack_check]
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing argument names.
+#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars.
fn immediate_args(a: isize, b: bool, c: f64) {
println!("");
}
}
#[no_stack_check]
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing argument names.
+#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars.
fn non_immediate_args(a: BigStruct, b: BigStruct) {
println!("");
}
#[no_stack_check]
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing argument names.
+#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars.
fn binding(a: i64, b: u64, c: f64) {
let x = 0;
println!("");
}
#[no_stack_check]
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing argument names.
+#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars.
fn assignment(mut a: u64, b: u64, c: f64) {
a = b;
println!("");
}
#[no_stack_check]
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing argument names.
+#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars.
fn function_call(x: u64, y: u64, z: f64) {
println!("Hi!")
}
#[no_stack_check]
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing argument names.
+#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars.
fn identifier(x: u64, y: u64, z: f64) -> u64 {
x
}
#[no_stack_check]
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing argument names.
+#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars.
fn return_expr(x: u64, y: u64, z: f64) -> u64 {
return x;
}
#[no_stack_check]
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing argument names.
+#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars.
fn arithmetic_expr(x: u64, y: u64, z: f64) -> u64 {
x + y
}
#[no_stack_check]
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing argument names.
+#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars.
fn if_expr(x: u64, y: u64, z: f64) -> u64 {
if x + y < 1000 {
x
}
#[no_stack_check]
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing argument names.
+#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars.
fn while_expr(mut x: u64, y: u64, z: u64) -> u64 {
while x + y < 1000 {
x += z
}
#[no_stack_check]
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing argument names.
+#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars.
fn loop_expr(mut x: u64, y: u64, z: u64) -> u64 {
loop {
x += z;
// gdb-command:continue
#![allow(unused_variables)]
-#![feature(no_debug, rustc_attrs)]
+#![feature(no_debug)]
#![feature(omit_gdb_pretty_printer_section)]
#![omit_gdb_pretty_printer_section]
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is inaccurate for returns.
+#[inline(never)]
+fn id<T>(x: T) -> T {x}
+
fn function_with_debuginfo() {
let abc = 10_usize;
- return (); // #break
+ id(abc); // #break
}
#[no_debug]
fn function_without_debuginfo() {
let abc = -57i32;
- return (); // #break
+ id(abc); // #break
}
fn main() {
// lldb-command:continue
#![allow(unused_variables)]
-#![feature(box_syntax, rustc_attrs, stmt_expr_attributes)]
+#![feature(box_syntax)]
#![feature(omit_gdb_pretty_printer_section)]
#![omit_gdb_pretty_printer_section]
c: usize
}
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing captures.
fn main() {
let mut variable = 1;
let constant = 2;
let struct_ref = &a_struct;
let owned: Box<_> = box 6;
- let mut closure =
- #[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing captures.
- || {
+ let mut closure = || {
let closure_local = 8;
- let mut nested_closure =
- #[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing captures.
- || {
+ let mut nested_closure = || {
zzz(); // #break
variable = constant + a_struct.a + struct_ref.a + *owned + closure_local;
};
// lldb-check:[...]$2 = 5
#![allow(unused_variables)]
-#![feature(unboxed_closures, box_syntax, rustc_attrs, stmt_expr_attributes)]
+#![feature(unboxed_closures, box_syntax)]
#![feature(omit_gdb_pretty_printer_section)]
#![omit_gdb_pretty_printer_section]
c: usize
}
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing captures.
fn main() {
let constant = 1;
let owned: Box<_> = box 5;
- let closure =
- #[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing captures.
- move || {
+ let closure = move || {
zzz(); // #break
do_something(&constant, &a_struct.a, &*owned);
};
// The `self` argument of the following closure should be passed by value
// to FnOnce::call_once(self, args), which gets translated a bit differently
// than the regular case. Let's make sure this is supported too.
- let immedate_env =
- #[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing captures.
- move || {
+ let immedate_env = move || {
zzz(); // #break
return constant2;
};
// lldb-command:print *owned
// lldb-check:[...]$9 = 6
-#![feature(unboxed_closures, box_syntax, rustc_attrs, stmt_expr_attributes)]
+#![feature(unboxed_closures, box_syntax)]
#![allow(unused_variables)]
#![feature(omit_gdb_pretty_printer_section)]
#![omit_gdb_pretty_printer_section]
c: usize
}
-#[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing captures.
fn main() {
let mut variable = 1;
let constant = 2;
let owned: Box<_> = box 6;
{
- let mut first_closure =
- #[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing captures.
- || {
+ let mut first_closure = || {
zzz(); // #break
variable = constant + a_struct.a + struct_ref.a + *owned;
};
}
{
- let mut second_closure =
- #[rustc_no_mir] // FIXME(#31005) MIR debuginfo is missing captures.
- || {
+ let mut second_closure = || {
zzz(); // #break
variable = constant + a_struct.a + struct_ref.a + *owned;
};
// compile-flags: -Z parse-only
-use std::any::; //~ ERROR expected identifier or `{` or `*`, found `;`
+use std::any::; //~ ERROR expected identifier, found `;`
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+mod a {
+ pub mod b {
+ pub mod c {
+ pub struct S;
+ pub struct Z;
+ }
+ pub struct W;
+ }
+}
+
+macro_rules! import {
+ (1 $p: path) => (use $p;);
+ (2 $p: path) => (use $p::{Z};);
+ (3 $p: path) => (use $p::*;);
+}
+
+import! { 1 a::b::c::S }
+import! { 2 a::b::c }
+import! { 3 a::b }
+
+fn main() {
+ let s = S;
+ let z = Z;
+ let w = W;
+}