use core::iter::{Map, FromIterator};
use core::ops::{Index, IndexMut};
use core::{iter, fmt, mem};
+use Bound::{self, Included, Excluded, Unbounded};
use ring_buf::RingBuf;
use super::node::{Traversal, MutTraversal, MoveTraversal};
use super::node::{self, Node, Found, GoDown};
-// FIXME(conventions): implement bounded iterators
-
/// A map based on a B-Tree.
///
/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
/// An abstract base over-which all other BTree iterators are built.
struct AbsIter<T> {
- lca: T,
- left: RingBuf<T>,
- right: RingBuf<T>,
+ traversals: RingBuf<T>,
size: uint,
}
inner: Map<(&'a K, &'a V), &'a V, Iter<'a, K, V>, fn((&'a K, &'a V)) -> &'a V>
}
+/// An iterator over a sub-range of BTreeMap's entries.
+pub struct Range<'a, K: 'a, V: 'a> {
+ inner: AbsIter<Traversal<'a, K, V>>
+}
+
+/// A mutable iterator over a sub-range of BTreeMap's entries.
+pub struct RangeMut<'a, K: 'a, V: 'a> {
+ inner: AbsIter<MutTraversal<'a, K, V>>
+}
+
/// A view into a single entry in a map, which may either be vacant or occupied.
#[unstable = "precise API still under development"]
pub enum Entry<'a, K:'a, V:'a> {
}
/// Represents an operation to perform inside the following iterator methods.
-/// This is necessary to use in `next` because we want to modify self.left inside
-/// a match that borrows it. Similarly, in `next_back` for self.right. Instead, we use this
-/// enum to note what we want to do, and do it after the match.
+/// This is necessary to use in `next` because we want to modify `self.traversals` inside
+/// a match that borrows it. Similarly in `next_back`. Instead, we use this enum to note
+/// what we want to do, and do it after the match.
enum StackOp<T> {
Push(T),
Pop,
}
-
impl<K, V, E, T> Iterator for AbsIter<T> where
T: DoubleEndedIterator<Item=TraversalItem<K, V, E>> + Traverse<E>,
{
type Item = (K, V);
- // This function is pretty long, but only because there's a lot of cases to consider.
- // Our iterator represents two search paths, left and right, to the smallest and largest
- // elements we have yet to yield. lca represents the least common ancestor of these two paths,
- // above-which we never walk, since everything outside it has already been consumed (or was
- // never in the range to iterate).
- //
- // Note that the design of these iterators permits an *arbitrary* initial pair of min and max,
- // making these arbitrary sub-range iterators. However the logic to construct these paths
- // efficiently is fairly involved, so this is a FIXME. The sub-range iterators also wouldn't be
- // able to accurately predict size, so those iterators can't implement ExactSizeIterator.
+ // Our iterator represents a queue of all ancestors of elements we have
+ // yet to yield, from smallest to largest. Note that the design of these
+ // iterators permits an *arbitrary* initial pair of min and max, making
+ // these arbitrary sub-range iterators.
fn next(&mut self) -> Option<(K, V)> {
loop {
- // We want the smallest element, so try to get the top of the left stack
- let op = match self.left.back_mut() {
- // The left stack is empty, so try to get the next element of the two paths
- // LCAs (the left search path is currently a subpath of the right one)
- None => match self.lca.next() {
- // The lca has been exhausted, walk further down the right path
- None => match self.right.pop_front() {
- // The right path is exhausted, so we're done
- None => return None,
- // The right path had something, make that the new LCA
- // and restart the whole process
- Some(right) => {
- self.lca = right;
- continue;
- }
- },
- // The lca yielded an edge, make that the new head of the left path
- Some(Edge(next)) => Push(Traverse::traverse(next)),
- // The lca yielded an entry, so yield that
- Some(Elem(k, v)) => {
- self.size -= 1;
- return Some((k, v))
- }
- },
- // The left stack wasn't empty, so continue along the node in its head
+ // We want the smallest element, so try to get the back of the queue
+ let op = match self.traversals.back_mut() {
+ None => return None,
+ // The queue wasn't empty, so continue along the node in its head
Some(iter) => match iter.next() {
- // The head of the left path is empty, so Pop it off and restart the process
+ // The head is empty, so Pop it off and continue the process
None => Pop,
- // The head of the left path yielded an edge, so make that the new head
- // of the left path
+ // The head yielded an edge, so make that the new head
Some(Edge(next)) => Push(Traverse::traverse(next)),
- // The head of the left path yielded entry, so yield that
- Some(Elem(k, v)) => {
+ // The head yielded an entry, so yield that
+ Some(Elem(kv)) => {
self.size -= 1;
- return Some((k, v))
+ return Some(kv)
}
}
};
- // Handle any operation on the left stack as necessary
+ // Handle any operation as necessary, without a conflicting borrow of the queue
match op {
- Push(item) => { self.left.push_back(item); },
- Pop => { self.left.pop_back(); },
+ Push(item) => { self.traversals.push_back(item); },
+ Pop => { self.traversals.pop_back(); },
}
}
}
T: DoubleEndedIterator<Item=TraversalItem<K, V, E>> + Traverse<E>,
{
// next_back is totally symmetric to next
+ #[inline]
fn next_back(&mut self) -> Option<(K, V)> {
loop {
- let op = match self.right.back_mut() {
- None => match self.lca.next_back() {
- None => match self.left.pop_front() {
- None => return None,
- Some(left) => {
- self.lca = left;
- continue;
- }
- },
- Some(Edge(next)) => Push(Traverse::traverse(next)),
- Some(Elem(k, v)) => {
- self.size -= 1;
- return Some((k, v))
- }
- },
+ let op = match self.traversals.front_mut() {
+ None => return None,
Some(iter) => match iter.next_back() {
None => Pop,
Some(Edge(next)) => Push(Traverse::traverse(next)),
- Some(Elem(k, v)) => {
+ Some(Elem(kv)) => {
self.size -= 1;
- return Some((k, v))
+ return Some(kv)
}
}
};
match op {
- Push(item) => { self.right.push_back(item); },
- Pop => { self.right.pop_back(); }
+ Push(item) => { self.traversals.push_front(item); },
+ Pop => { self.traversals.pop_front(); }
}
}
}
#[stable]
impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> {}
+impl<'a, K, V> Iterator for Range<'a, K, V> {
+ type Item = (&'a K, &'a V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a V)> { self.inner.next() }
+}
+impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a V)> { self.inner.next_back() }
+}
+
+impl<'a, K, V> Iterator for RangeMut<'a, K, V> {
+ type Item = (&'a K, &'a mut V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a mut V)> { self.inner.next() }
+}
+impl<'a, K, V> DoubleEndedIterator for RangeMut<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> { self.inner.next_back() }
+}
+
impl<'a, K: Ord, V> Entry<'a, K, V> {
#[unstable = "matches collection reform v2 specification, waiting for dust to settle"]
/// Returns a mutable reference to the entry if occupied, or the VacantEntry if vacant
#[stable]
pub fn iter(&self) -> Iter<K, V> {
let len = self.len();
+ // NB. The initial capacity for ringbuf is large enough to avoid reallocs in many cases.
+ let mut lca = RingBuf::new();
+ lca.push_back(Traverse::traverse(&self.root));
Iter {
inner: AbsIter {
- lca: Traverse::traverse(&self.root),
- left: RingBuf::new(),
- right: RingBuf::new(),
+ traversals: lca,
size: len,
}
}
#[stable]
pub fn iter_mut(&mut self) -> IterMut<K, V> {
let len = self.len();
+ let mut lca = RingBuf::new();
+ lca.push_back(Traverse::traverse(&mut self.root));
IterMut {
inner: AbsIter {
- lca: Traverse::traverse(&mut self.root),
- left: RingBuf::new(),
- right: RingBuf::new(),
+ traversals: lca,
size: len,
}
}
#[stable]
pub fn into_iter(self) -> IntoIter<K, V> {
let len = self.len();
+ let mut lca = RingBuf::new();
+ lca.push_back(Traverse::traverse(self.root));
IntoIter {
inner: AbsIter {
- lca: Traverse::traverse(self.root),
- left: RingBuf::new(),
- right: RingBuf::new(),
+ traversals: lca,
size: len,
}
}
pub fn is_empty(&self) -> bool { self.len() == 0 }
}
+macro_rules! range_impl {
+ ($root:expr, $min:expr, $max:expr, $as_slices_internal:ident, $iter:ident, $Range:ident,
+ $edges:ident, [$($mutability:ident)*]) => (
+ {
+ // A deque that encodes two search paths containing (left-to-right):
+ // a series of truncated-from-the-left iterators, the LCA's doubly-truncated iterator,
+ // and a series of truncated-from-the-right iterators.
+ let mut traversals = RingBuf::new();
+ let (root, min, max) = ($root, $min, $max);
+
+ let mut leftmost = None;
+ let mut rightmost = None;
+
+ match (&min, &max) {
+ (&Unbounded, &Unbounded) => {
+ traversals.push_back(Traverse::traverse(root))
+ }
+ (&Unbounded, &Included(_)) | (&Unbounded, &Excluded(_)) => {
+ rightmost = Some(root);
+ }
+ (&Included(_), &Unbounded) | (&Excluded(_), &Unbounded) => {
+ leftmost = Some(root);
+ }
+ (&Included(min_key), &Included(max_key))
+ | (&Included(min_key), &Excluded(max_key))
+ | (&Excluded(min_key), &Included(max_key))
+ | (&Excluded(min_key), &Excluded(max_key)) => {
+ // lca represents the Lowest Common Ancestor, above which we never
+ // walk, since everything else is outside the range to iterate.
+ // ___________________
+ // |__0_|_80_|_85_|_90_| (root)
+ // | | | | |
+ // |
+ // v
+ // ___________________
+ // |__5_|_15_|_30_|_73_|
+ // | | | | |
+ // |
+ // v
+ // ___________________
+ // |_33_|_58_|_63_|_68_| lca for the range [41, 65]
+ // | |\___|___/| | iterator at traversals[2]
+ // | |
+ // | v
+ // v rightmost
+ // leftmost
+ let mut is_leaf = root.is_leaf();
+ let mut lca = root.$as_slices_internal();
+ loop {
+ let slice = lca.slice_from(min_key).slice_to(max_key);
+ if let [ref $($mutability)* edge] = slice.edges {
+ // Follow the only edge that leads the node that covers the range.
+ is_leaf = edge.is_leaf();
+ lca = edge.$as_slices_internal();
+ } else {
+ let mut iter = slice.$iter();
+ if is_leaf {
+ leftmost = None;
+ rightmost = None;
+ } else {
+ // Only change the state of nodes with edges.
+ leftmost = iter.next_edge_item();
+ rightmost = iter.next_edge_item_back();
+ }
+ traversals.push_back(iter);
+ break;
+ }
+ }
+ }
+ }
+ // Keep narrowing the range by going down.
+ // ___________________
+ // |_38_|_43_|_48_|_53_|
+ // | |____|____|____/ iterator at traversals[1]
+ // |
+ // v
+ // ___________________
+ // |_39_|_40_|_41_|_42_| (leaf, the last leftmost)
+ // \_________| iterator at traversals[0]
+ match min {
+ Included(key) | Excluded(key) =>
+ while let Some(left) = leftmost {
+ let is_leaf = left.is_leaf();
+ let mut iter = left.$as_slices_internal().slice_from(key).$iter();
+ leftmost = if is_leaf {
+ None
+ } else {
+ // Only change the state of nodes with edges.
+ iter.next_edge_item()
+ };
+ traversals.push_back(iter);
+ },
+ _ => {}
+ }
+ // If the leftmost iterator starts with an element, then it was an exact match.
+ if let (Excluded(_), Some(leftmost_iter)) = (min, traversals.back_mut()) {
+ // Drop this excluded element. `next_kv_item` has no effect when
+ // the next item is an edge.
+ leftmost_iter.next_kv_item();
+ }
+
+ // The code for the right side is similar.
+ match max {
+ Included(key) | Excluded(key) =>
+ while let Some(right) = rightmost {
+ let is_leaf = right.is_leaf();
+ let mut iter = right.$as_slices_internal().slice_to(key).$iter();
+ rightmost = if is_leaf {
+ None
+ } else {
+ iter.next_edge_item_back()
+ };
+ traversals.push_front(iter);
+ },
+ _ => {}
+ }
+ if let (Excluded(_), Some(rightmost_iter)) = (max, traversals.front_mut()) {
+ rightmost_iter.next_kv_item_back();
+ }
+
+ $Range {
+ inner: AbsIter {
+ traversals: traversals,
+ size: 0, // unused
+ }
+ }
+ }
+ )
+}
+
impl<K: Ord, V> BTreeMap<K, V> {
+ /// Constructs a double-ended iterator over a sub-range of elements in the map, starting
+ /// at min, and ending at max. If min is `Unbounded`, then it will be treated as "negative
+ /// infinity", and if max is `Unbounded`, then it will be treated as "positive infinity".
+ /// Thus range(Unbounded, Unbounded) will yield the whole collection.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::Bound::{Included, Unbounded};
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(3u, "a");
+ /// map.insert(5u, "b");
+ /// map.insert(8u, "c");
+ /// for (&key, &value) in map.range(Included(&4), Included(&8)) {
+ /// println!("{}: {}", key, value);
+ /// }
+ /// assert_eq!(Some((&5u, &"b")), map.range(Included(&4), Unbounded).next());
+ /// ```
+ #[unstable = "matches collection reform specification, waiting for dust to settle"]
+ pub fn range<'a>(&'a self, min: Bound<&K>, max: Bound<&K>) -> Range<'a, K, V> {
+ range_impl!(&self.root, min, max, as_slices_internal, iter, Range, edges, [])
+ }
+
+ /// Constructs a mutable double-ended iterator over a sub-range of elements in the map, starting
+ /// at min, and ending at max. If min is `Unbounded`, then it will be treated as "negative
+ /// infinity", and if max is `Unbounded`, then it will be treated as "positive infinity".
+ /// Thus range(Unbounded, Unbounded) will yield the whole collection.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::Bound::{Included, Excluded};
+ ///
+ /// let mut map: BTreeMap<&str, i32> = ["Alice", "Bob", "Carol", "Cheryl"].iter()
+ /// .map(|&s| (s, 0))
+ /// .collect();
+ /// for (_, balance) in map.range_mut(Included(&"B"), Excluded(&"Cheryl")) {
+ /// *balance += 100;
+ /// }
+ /// for (name, balance) in map.iter() {
+ /// println!("{} => {}", name, balance);
+ /// }
+ /// ```
+ #[unstable = "matches collection reform specification, waiting for dust to settle"]
+ pub fn range_mut<'a>(&'a mut self, min: Bound<&K>, max: Bound<&K>) -> RangeMut<'a, K, V> {
+ range_impl!(&mut self.root, min, max, as_slices_internal_mut, iter_mut, RangeMut,
+ edges_mut, [mut])
+ }
+
/// Gets the given key's corresponding entry in the map for in-place manipulation.
///
/// # Examples
#[cfg(test)]
mod test {
use prelude::*;
+ use std::iter::range_inclusive;
use super::{BTreeMap, Occupied, Vacant};
+ use Bound::{self, Included, Excluded, Unbounded};
#[test]
fn test_basic_large() {
// Forwards
let mut map: BTreeMap<uint, uint> = range(0, size).map(|i| (i, i)).collect();
- {
- let mut iter = map.iter();
+ fn test<T>(size: uint, mut iter: T) where T: Iterator<Item=(uint, uint)> {
for i in range(0, size) {
assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
- assert_eq!(iter.next().unwrap(), (&i, &i));
+ assert_eq!(iter.next().unwrap(), (i, i));
}
assert_eq!(iter.size_hint(), (0, Some(0)));
assert_eq!(iter.next(), None);
}
+ test(size, map.iter().map(|(&k, &v)| (k, v)));
+ test(size, map.iter_mut().map(|(&k, &mut v)| (k, v)));
+ test(size, map.into_iter());
+ }
- {
- let mut iter = map.iter_mut();
- for i in range(0, size) {
- assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
- assert_eq!(iter.next().unwrap(), (&i, &mut (i + 0)));
- }
- assert_eq!(iter.size_hint(), (0, Some(0)));
- assert_eq!(iter.next(), None);
- }
+ #[test]
+ fn test_iter_rev() {
+ let size = 10000u;
- {
- let mut iter = map.into_iter();
+ // Forwards
+ let mut map: BTreeMap<uint, uint> = range(0, size).map(|i| (i, i)).collect();
+
+ fn test<T>(size: uint, mut iter: T) where T: Iterator<Item=(uint, uint)> {
for i in range(0, size) {
assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
- assert_eq!(iter.next().unwrap(), (i, i));
+ assert_eq!(iter.next().unwrap(), (size - i - 1, size - i - 1));
}
assert_eq!(iter.size_hint(), (0, Some(0)));
assert_eq!(iter.next(), None);
}
-
+ test(size, map.iter().rev().map(|(&k, &v)| (k, v)));
+ test(size, map.iter_mut().rev().map(|(&k, &mut v)| (k, v)));
+ test(size, map.into_iter().rev());
}
#[test]
- fn test_iter_rev() {
+ fn test_iter_mixed() {
let size = 10000u;
// Forwards
let mut map: BTreeMap<uint, uint> = range(0, size).map(|i| (i, i)).collect();
- {
- let mut iter = map.iter().rev();
- for i in range(0, size) {
- assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
- assert_eq!(iter.next().unwrap(), (&(size - i - 1), &(size - i - 1)));
+ fn test<T>(size: uint, mut iter: T)
+ where T: Iterator<Item=(uint, uint)> + DoubleEndedIterator {
+ for i in range(0, size / 4) {
+ assert_eq!(iter.size_hint(), (size - i * 2, Some(size - i * 2)));
+ assert_eq!(iter.next().unwrap(), (i, i));
+ assert_eq!(iter.next_back().unwrap(), (size - i - 1, size - i - 1));
+ }
+ for i in range(size / 4, size * 3 / 4) {
+ assert_eq!(iter.size_hint(), (size * 3 / 4 - i, Some(size * 3 / 4 - i)));
+ assert_eq!(iter.next().unwrap(), (i, i));
}
assert_eq!(iter.size_hint(), (0, Some(0)));
assert_eq!(iter.next(), None);
}
+ test(size, map.iter().map(|(&k, &v)| (k, v)));
+ test(size, map.iter_mut().map(|(&k, &mut v)| (k, v)));
+ test(size, map.into_iter());
+ }
- {
- let mut iter = map.iter_mut().rev();
- for i in range(0, size) {
- assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
- assert_eq!(iter.next().unwrap(), (&(size - i - 1), &mut(size - i - 1)));
- }
- assert_eq!(iter.size_hint(), (0, Some(0)));
- assert_eq!(iter.next(), None);
+ #[test]
+ fn test_range_small() {
+ let size = 5u;
+
+ // Forwards
+ let map: BTreeMap<uint, uint> = range(0, size).map(|i| (i, i)).collect();
+
+ let mut j = 0u;
+ for ((&k, &v), i) in map.range(Included(&2), Unbounded).zip(range(2u, size)) {
+ assert_eq!(k, i);
+ assert_eq!(v, i);
+ j += 1;
}
+ assert_eq!(j, size - 2);
+ }
- {
- let mut iter = map.into_iter().rev();
- for i in range(0, size) {
- assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
- assert_eq!(iter.next().unwrap(), (size - i - 1, size - i - 1));
+ #[test]
+ fn test_range_1000() {
+ let size = 1000u;
+ let map: BTreeMap<uint, uint> = range(0, size).map(|i| (i, i)).collect();
+
+ fn test(map: &BTreeMap<uint, uint>, size: uint, min: Bound<&uint>, max: Bound<&uint>) {
+ let mut kvs = map.range(min, max).map(|(&k, &v)| (k, v));
+ let mut pairs = range(0, size).map(|i| (i, i));
+
+ for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
+ assert_eq!(kv, pair);
}
- assert_eq!(iter.size_hint(), (0, Some(0)));
- assert_eq!(iter.next(), None);
+ assert_eq!(kvs.next(), None);
+ assert_eq!(pairs.next(), None);
}
+ test(&map, size, Included(&0), Excluded(&size));
+ test(&map, size, Unbounded, Excluded(&size));
+ test(&map, size, Included(&0), Included(&(size - 1)));
+ test(&map, size, Unbounded, Included(&(size - 1)));
+ test(&map, size, Included(&0), Unbounded);
+ test(&map, size, Unbounded, Unbounded);
+ }
+
+ #[test]
+ fn test_range() {
+ let size = 200u;
+ let map: BTreeMap<uint, uint> = range(0, size).map(|i| (i, i)).collect();
+
+ for i in range(0, size) {
+ for j in range(i, size) {
+ let mut kvs = map.range(Included(&i), Included(&j)).map(|(&k, &v)| (k, v));
+ let mut pairs = range_inclusive(i, j).map(|i| (i, i));
+ for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
+ assert_eq!(kv, pair);
+ }
+ assert_eq!(kvs.next(), None);
+ assert_eq!(pairs.next(), None);
+ }
+ }
}
#[test]
_capacity: uint,
}
+struct NodeSlice<'a, K: 'a, V: 'a> {
+ keys: &'a [K],
+ vals: &'a [V],
+ pub edges: &'a [Node<K, V>],
+ head_is_edge: bool,
+ tail_is_edge: bool,
+ has_edges: bool,
+}
+
+struct MutNodeSlice<'a, K: 'a, V: 'a> {
+ keys: &'a [K],
+ vals: &'a mut [V],
+ pub edges: &'a mut [Node<K, V>],
+ head_is_edge: bool,
+ tail_is_edge: bool,
+ has_edges: bool,
+}
+
/// Rounds up to a multiple of a power of two. Returns the closest multiple
/// of `target_alignment` that is higher or equal to `unrounded`.
///
}
#[inline]
- pub fn as_slices_internal<'a>(&'a self) -> (&'a [K], &'a [V], &'a [Node<K, V>]) {
+ pub fn as_slices_internal<'b>(&'b self) -> NodeSlice<'b, K, V> {
+ let is_leaf = self.is_leaf();
let (keys, vals) = self.as_slices();
let edges: &[_] = if self.is_leaf() {
&[]
})
}
};
- (keys, vals, edges)
+ NodeSlice {
+ keys: keys,
+ vals: vals,
+ edges: edges,
+ head_is_edge: true,
+ tail_is_edge: true,
+ has_edges: !is_leaf,
+ }
}
#[inline]
- pub fn as_slices_internal_mut<'a>(&'a mut self) -> (&'a mut [K], &'a mut [V],
- &'a mut [Node<K, V>]) {
+ pub fn as_slices_internal_mut<'b>(&'b mut self) -> MutNodeSlice<'b, K, V> {
unsafe { mem::transmute(self.as_slices_internal()) }
}
#[inline]
pub fn edges<'a>(&'a self) -> &'a [Node<K, V>] {
- self.as_slices_internal().2
+ self.as_slices_internal().edges
}
#[inline]
pub fn edges_mut<'a>(&'a mut self) -> &'a mut [Node<K, V>] {
- self.as_slices_internal_mut().2
+ self.as_slices_internal_mut().edges
}
}
// FIXME(Gankro): Tune when to search linear or binary based on B (and maybe K/V).
// For the B configured as of this writing (B = 6), binary search was *significantly*
// worse for uints.
- let (found, index) = node.search_linear(key);
- if found {
- Found(Handle {
- node: node,
- index: index
- })
- } else {
- GoDown(Handle {
- node: node,
- index: index
- })
+ match node.as_slices_internal().search_linear(key) {
+ (index, true) => Found(Handle { node: node, index: index }),
+ (index, false) => GoDown(Handle { node: node, index: index }),
}
}
-
- fn search_linear<Q: ?Sized>(&self, key: &Q) -> (bool, uint) where Q: BorrowFrom<K> + Ord {
- for (i, k) in self.keys().iter().enumerate() {
- match key.cmp(BorrowFrom::borrow_from(k)) {
- Greater => {},
- Equal => return (true, i),
- Less => return (false, i),
- }
- }
- (false, self.len())
- }
}
// Public interface
}
pub fn iter<'a>(&'a self) -> Traversal<'a, K, V> {
- let is_leaf = self.is_leaf();
- let (keys, vals, edges) = self.as_slices_internal();
- Traversal {
- inner: ElemsAndEdges(
- keys.iter().zip(vals.iter()),
- edges.iter()
- ),
- head_is_edge: true,
- tail_is_edge: true,
- has_edges: !is_leaf,
- }
+ self.as_slices_internal().iter()
}
pub fn iter_mut<'a>(&'a mut self) -> MutTraversal<'a, K, V> {
- let is_leaf = self.is_leaf();
- let (keys, vals, edges) = self.as_slices_internal_mut();
- MutTraversal {
- inner: ElemsAndEdges(
- keys.iter().zip(vals.iter_mut()),
- edges.iter_mut()
- ),
- head_is_edge: true,
- tail_is_edge: true,
- has_edges: !is_leaf,
- }
+ self.as_slices_internal_mut().iter_mut()
}
pub fn into_iter(self) -> MoveTraversal<K, V> {
/// A trait for pairs of `Iterator`s, one over edges and the other over key/value pairs. This is
/// necessary, as the `MoveTraversalImpl` needs to have a destructor that deallocates the `Node`,
/// and a pair of `Iterator`s would require two independent destructors.
-trait TraversalImpl<K, V, E> {
- fn next_kv(&mut self) -> Option<(K, V)>;
- fn next_kv_back(&mut self) -> Option<(K, V)>;
+trait TraversalImpl {
+ type Item;
+ type Edge;
+
+ fn next_kv(&mut self) -> Option<Self::Item>;
+ fn next_kv_back(&mut self) -> Option<Self::Item>;
- fn next_edge(&mut self) -> Option<E>;
- fn next_edge_back(&mut self) -> Option<E>;
+ fn next_edge(&mut self) -> Option<Self::Edge>;
+ fn next_edge_back(&mut self) -> Option<Self::Edge>;
}
/// A `TraversalImpl` that actually is backed by two iterators. This works in the non-moving case,
struct ElemsAndEdges<Elems, Edges>(Elems, Edges);
impl<K, V, E, Elems: DoubleEndedIterator, Edges: DoubleEndedIterator>
- TraversalImpl<K, V, E> for ElemsAndEdges<Elems, Edges>
+ TraversalImpl for ElemsAndEdges<Elems, Edges>
where Elems : Iterator<Item=(K, V)>, Edges : Iterator<Item=E>
{
+ type Item = (K, V);
+ type Edge = E;
fn next_kv(&mut self) -> Option<(K, V)> { self.0.next() }
fn next_kv_back(&mut self) -> Option<(K, V)> { self.0.next_back() }
is_leaf: bool
}
-impl<K, V> TraversalImpl<K, V, Node<K, V>> for MoveTraversalImpl<K, V> {
+impl<K, V> TraversalImpl for MoveTraversalImpl<K, V> {
+ type Item = (K, V);
+ type Edge = Node<K, V>;
+
fn next_kv(&mut self) -> Option<(K, V)> {
match (self.keys.next(), self.vals.next()) {
(Some(k), Some(v)) => Some((k, v)),
has_edges: bool,
}
-/// A single atomic step in a traversal. Either an element is visited, or an edge is followed
+/// A single atomic step in a traversal.
pub enum TraversalItem<K, V, E> {
- Elem(K, V),
+ /// An element is visited. This isn't written as `Elem(K, V)` just because `opt.map(Elem)`
+ /// requires the function to take a single argument. (Enum constructors are functions.)
+ Elem((K, V)),
+ /// An edge is followed.
Edge(E),
}
/// An owning traversal over a node's entries and edges
pub type MoveTraversal<K, V> = AbsTraversal<MoveTraversalImpl<K, V>>;
-#[old_impl_check]
-impl<K, V, E, Impl: TraversalImpl<K, V, E>> Iterator for AbsTraversal<Impl> {
+
+impl<K, V, E, Impl> Iterator for AbsTraversal<Impl>
+ where Impl: TraversalImpl<Item=(K, V), Edge=E> {
type Item = TraversalItem<K, V, E>;
fn next(&mut self) -> Option<TraversalItem<K, V, E>> {
- let head_is_edge = self.head_is_edge;
- self.head_is_edge = !head_is_edge;
+ self.next_edge_item().map(Edge).or_else(||
+ self.next_kv_item().map(Elem)
+ )
+ }
+}
- if head_is_edge && self.has_edges {
- self.inner.next_edge().map(|node| Edge(node))
+impl<K, V, E, Impl> DoubleEndedIterator for AbsTraversal<Impl>
+ where Impl: TraversalImpl<Item=(K, V), Edge=E> {
+ fn next_back(&mut self) -> Option<TraversalItem<K, V, E>> {
+ self.next_edge_item_back().map(Edge).or_else(||
+ self.next_kv_item_back().map(Elem)
+ )
+ }
+}
+
+impl<K, V, E, Impl> AbsTraversal<Impl>
+ where Impl: TraversalImpl<Item=(K, V), Edge=E> {
+ /// Advances the iterator and returns the item if it's an edge. Returns None
+ /// and does nothing if the first item is not an edge.
+ pub fn next_edge_item(&mut self) -> Option<E> {
+ // NB. `&& self.has_edges` might be redundant in this condition.
+ let edge = if self.head_is_edge && self.has_edges {
+ self.inner.next_edge()
+ } else {
+ None
+ };
+ self.head_is_edge = false;
+ edge
+ }
+
+ /// Advances the iterator and returns the item if it's an edge. Returns None
+ /// and does nothing if the last item is not an edge.
+ pub fn next_edge_item_back(&mut self) -> Option<E> {
+ let edge = if self.tail_is_edge && self.has_edges {
+ self.inner.next_edge_back()
+ } else {
+ None
+ };
+ self.tail_is_edge = false;
+ edge
+ }
+
+ /// Advances the iterator and returns the item if it's a key-value pair. Returns None
+ /// and does nothing if the first item is not a key-value pair.
+ pub fn next_kv_item(&mut self) -> Option<(K, V)> {
+ if !self.head_is_edge {
+ self.head_is_edge = true;
+ self.inner.next_kv()
+ } else {
+ None
+ }
+ }
+
+ /// Advances the iterator and returns the item if it's a key-value pair. Returns None
+ /// and does nothing if the last item is not a key-value pair.
+ pub fn next_kv_item_back(&mut self) -> Option<(K, V)> {
+ if !self.tail_is_edge {
+ self.tail_is_edge = true;
+ self.inner.next_kv_back()
} else {
- self.inner.next_kv().map(|(k, v)| Elem(k, v))
+ None
}
}
}
-#[old_impl_check]
-impl<K, V, E, Impl: TraversalImpl<K, V, E>> DoubleEndedIterator for AbsTraversal<Impl> {
- fn next_back(&mut self) -> Option<TraversalItem<K, V, E>> {
- let tail_is_edge = self.tail_is_edge;
- self.tail_is_edge = !tail_is_edge;
+macro_rules! node_slice_impl {
+ ($NodeSlice:ident, $Traversal:ident,
+ $as_slices_internal:ident, $slice_from:ident, $slice_to:ident, $iter:ident) => {
+ impl<'a, K: Ord + 'a, V: 'a> $NodeSlice<'a, K, V> {
+ /// Performs linear search in a slice. Returns a tuple of (index, is_exact_match).
+ fn search_linear<Q: ?Sized>(&self, key: &Q) -> (uint, bool)
+ where Q: BorrowFrom<K> + Ord {
+ for (i, k) in self.keys.iter().enumerate() {
+ match key.cmp(BorrowFrom::borrow_from(k)) {
+ Greater => {},
+ Equal => return (i, true),
+ Less => return (i, false),
+ }
+ }
+ (self.keys.len(), false)
+ }
- if tail_is_edge && self.has_edges {
- self.inner.next_edge_back().map(|node| Edge(node))
- } else {
- self.inner.next_kv_back().map(|(k, v)| Elem(k, v))
+ /// Returns a sub-slice with elements starting with `min_key`.
+ pub fn slice_from(self, min_key: &K) -> $NodeSlice<'a, K, V> {
+ // _______________
+ // |_1_|_3_|_5_|_7_|
+ // | | | | |
+ // 0 0 1 1 2 2 3 3 4 index
+ // | | | | |
+ // \___|___|___|___/ slice_from(&0); pos = 0
+ // \___|___|___/ slice_from(&2); pos = 1
+ // |___|___|___/ slice_from(&3); pos = 1; result.head_is_edge = false
+ // \___|___/ slice_from(&4); pos = 2
+ // \___/ slice_from(&6); pos = 3
+ // \|/ slice_from(&999); pos = 4
+ let (pos, pos_is_kv) = self.search_linear(min_key);
+ $NodeSlice {
+ has_edges: self.has_edges,
+ edges: if !self.has_edges {
+ self.edges
+ } else {
+ self.edges.$slice_from(pos)
+ },
+ keys: self.keys.slice_from(pos),
+ vals: self.vals.$slice_from(pos),
+ head_is_edge: !pos_is_kv,
+ tail_is_edge: self.tail_is_edge,
+ }
+ }
+
+ /// Returns a sub-slice with elements up to and including `max_key`.
+ pub fn slice_to(self, max_key: &K) -> $NodeSlice<'a, K, V> {
+ // _______________
+ // |_1_|_3_|_5_|_7_|
+ // | | | | |
+ // 0 0 1 1 2 2 3 3 4 index
+ // | | | | |
+ //\|/ | | | | slice_to(&0); pos = 0
+ // \___/ | | | slice_to(&2); pos = 1
+ // \___|___| | | slice_to(&3); pos = 1; result.tail_is_edge = false
+ // \___|___/ | | slice_to(&4); pos = 2
+ // \___|___|___/ | slice_to(&6); pos = 3
+ // \___|___|___|___/ slice_to(&999); pos = 4
+ let (pos, pos_is_kv) = self.search_linear(max_key);
+ let pos = pos + if pos_is_kv { 1 } else { 0 };
+ $NodeSlice {
+ has_edges: self.has_edges,
+ edges: if !self.has_edges {
+ self.edges
+ } else {
+ self.edges.$slice_to(pos + 1)
+ },
+ keys: self.keys.slice_to(pos),
+ vals: self.vals.$slice_to(pos),
+ head_is_edge: self.head_is_edge,
+ tail_is_edge: !pos_is_kv,
+ }
+ }
+ }
+
+ impl<'a, K: 'a, V: 'a> $NodeSlice<'a, K, V> {
+ /// Returns an iterator over key/value pairs and edges in a slice.
+ #[inline]
+ pub fn $iter(self) -> $Traversal<'a, K, V> {
+ let mut edges = self.edges.$iter();
+ // Skip edges at both ends, if excluded.
+ if !self.head_is_edge { edges.next(); }
+ if !self.tail_is_edge { edges.next_back(); }
+ // The key iterator is always immutable.
+ $Traversal {
+ inner: ElemsAndEdges(
+ self.keys.iter().zip(self.vals.$iter()),
+ edges
+ ),
+ head_is_edge: self.head_is_edge,
+ tail_is_edge: self.tail_is_edge,
+ has_edges: self.has_edges,
+ }
+ }
}
}
}
+
+node_slice_impl!(NodeSlice, Traversal, as_slices_internal, slice_from, slice_to, iter);
+node_slice_impl!(MutNodeSlice, MutTraversal, as_slices_internal_mut, slice_from_mut,
+ slice_to_mut, iter_mut);
use core::ops::{BitOr, BitAnd, BitXor, Sub};
use btree_map::{BTreeMap, Keys};
+use Bound;
// FIXME(conventions): implement bounded iterators
iter: Map<(T, ()), T, ::btree_map::IntoIter<T, ()>, fn((T, ())) -> T>
}
+/// An iterator over a sub-range of BTreeSet's items.
+pub struct Range<'a, T: 'a> {
+ iter: Map<(&'a T, &'a ()), &'a T, ::btree_map::Range<'a, T, ()>, fn((&'a T, &'a ())) -> &'a T>
+}
+
/// A lazy iterator producing elements in the set difference (in-order).
#[stable]
pub struct Difference<'a, T:'a> {
}
}
+impl<T: Ord> BTreeSet<T> {
+ /// Constructs a double-ended iterator over a sub-range of elements in the set, starting
+ /// at min, and ending at max. If min is `Unbounded`, then it will be treated as "negative
+ /// infinity", and if max is `Unbounded`, then it will be treated as "positive infinity".
+ /// Thus range(Unbounded, Unbounded) will yield the whole collection.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ /// use std::collections::Bound::{Included, Unbounded};
+ ///
+ /// let mut set = BTreeSet::new();
+ /// set.insert(3u);
+ /// set.insert(5u);
+ /// set.insert(8u);
+ /// for &elem in set.range(Included(&4), Included(&8)) {
+ /// println!("{}", elem);
+ /// }
+ /// assert_eq!(Some(&5u), set.range(Included(&4), Unbounded).next());
+ /// ```
+ #[unstable = "matches collection reform specification, waiting for dust to settle"]
+ pub fn range<'a>(&'a self, min: Bound<&T>, max: Bound<&T>) -> Range<'a, T> {
+ fn first<A, B>((a, _): (A, B)) -> A { a }
+ let first: fn((&'a T, &'a ())) -> &'a T = first; // coerce to fn pointer
+
+ Range { iter: self.map.range(min, max).map(first) }
+ }
+}
+
impl<T: Ord> BTreeSet<T> {
/// Visits the values representing the difference, in ascending order.
///
#[stable]
impl<T> ExactSizeIterator for IntoIter<T> {}
+
+impl<'a, T> Iterator for Range<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> { self.iter.next() }
+}
+impl<'a, T> DoubleEndedIterator for Range<'a, T> {
+ fn next_back(&mut self) -> Option<&'a T> { self.iter.next_back() }
+}
+
/// Compare `x` and `y`, but return `short` if x is None and `long` if y is None
fn cmp_opt<T: Ord>(x: Option<&T>, y: Option<&T>,
short: Ordering, long: Ordering) -> Ordering {
#![feature(unsafe_destructor, slicing_syntax)]
#![feature(box_syntax)]
#![feature(unboxed_closures)]
-#![feature(old_impl_check)]
#![allow(unknown_features)] #![feature(int_uint)]
#![allow(unstable)]
#![no_std]
pub use string::{String, ToString};
pub use vec::Vec;
}
+
+/// An endpoint of a range of keys.
+pub enum Bound<T> {
+ /// An inclusive bound.
+ Included(T),
+ /// An exclusive bound.
+ Excluded(T),
+ /// An infinite endpoint. Indicates that there is no bound in this direction.
+ Unbounded,
+}
impl RawPointerDerive {
pub fn new() -> RawPointerDerive {
RawPointerDerive {
- checked_raw_pointers: NodeSet::new(),
+ checked_raw_pointers: NodeSet(),
}
}
}
// collect all mutable pattern and group their NodeIDs by their Identifier to
// avoid false warnings in match arms with multiple patterns
- let mut mutables = FnvHashMap::new();
+ let mut mutables = FnvHashMap();
for p in pats.iter() {
pat_util::pat_bindings(&cx.tcx.def_map, &**p, |mode, id, _, path1| {
let ident = path1.node;
LintStore {
lints: vec!(),
passes: Some(vec!()),
- by_name: FnvHashMap::new(),
- levels: FnvHashMap::new(),
- lint_groups: FnvHashMap::new(),
+ by_name: FnvHashMap(),
+ levels: FnvHashMap(),
+ lint_groups: FnvHashMap(),
}
}
exported_items: exported_items,
lints: lint_store,
level_stack: vec![],
- node_levels: RefCell::new(FnvHashMap::new()),
+ node_levels: RefCell::new(FnvHashMap()),
}
}
impl CStore {
pub fn new(intr: Rc<IdentInterner>) -> CStore {
CStore {
- metas: RefCell::new(FnvHashMap::new()),
- extern_mod_crate_map: RefCell::new(FnvHashMap::new()),
+ metas: RefCell::new(FnvHashMap()),
+ extern_mod_crate_map: RefCell::new(FnvHashMap()),
used_crate_sources: RefCell::new(Vec::new()),
used_libraries: RefCell::new(Vec::new()),
used_link_args: RefCell::new(Vec::new()),
link_meta: link_meta,
cstore: cstore,
encode_inlined_item: RefCell::new(encode_inlined_item),
- type_abbrevs: RefCell::new(FnvHashMap::new()),
+ type_abbrevs: RefCell::new(FnvHashMap()),
reachable: reachable,
};
diag: tcx.sess.diagnostic(),
ds: def_to_string,
tcx: tcx,
- abbrevs: &RefCell::new(FnvHashMap::new())
+ abbrevs: &RefCell::new(FnvHashMap())
}, t);
String::from_utf8(wr.unwrap()).unwrap()
}
let block_exit;
let mut cfg_builder = CFGBuilder {
- exit_map: NodeMap::new(),
+ exit_map: NodeMap(),
graph: graph,
fn_exit: fn_exit,
tcx: tcx,
pub fn check_crate(tcx: &ty::ctxt) {
let mut checker = GlobalChecker {
- static_consumptions: NodeSet::new(),
- const_borrows: NodeSet::new(),
- static_interior_borrows: NodeSet::new(),
- static_local_borrows: NodeSet::new(),
+ static_consumptions: NodeSet(),
+ const_borrows: NodeSet(),
+ static_interior_borrows: NodeSet(),
+ static_local_borrows: NodeSet(),
};
{
let param_env = ty::empty_parameter_environment(tcx);
pub fn process_crate(tcx: &ty::ctxt) {
visit::walk_crate(&mut ConstEvalVisitor {
tcx: tcx,
- ccache: DefIdMap::new(),
+ ccache: DefIdMap(),
}, tcx.map.krate());
tcx.sess.abort_if_errors();
}
fn build_nodeid_to_index(decl: Option<&ast::FnDecl>,
cfg: &cfg::CFG) -> NodeMap<CFGIndex> {
- let mut index = NodeMap::new();
+ let mut index = NodeMap();
// FIXME (#6298): Would it be better to fold formals from decl
// into cfg itself? i.e. introduce a fn-based flow-graph in
config::CrateTypeExecutable | config::CrateTypeDylib => {},
}
- let mut formats = FnvHashMap::new();
+ let mut formats = FnvHashMap();
// Sweep all crates for found dylibs. Add all dylibs, as well as their
// dependencies, ensuring there are no conflicts. The only valid case for a
ast::Return(ref ret_ty) => ast::Return(
self.rebuild_arg_ty_or_output(&**ret_ty, lifetime, anon_nums, region_names)
),
+ ast::DefaultReturn(span) => ast::DefaultReturn(span),
ast::NoReturn(span) => ast::NoReturn(span)
}
}
name: String,
map: &'a ConstraintMap<'tcx>) -> ConstraintGraph<'a, 'tcx> {
let mut i = 0;
- let mut node_ids = FnvHashMap::new();
+ let mut node_ids = FnvHashMap();
{
let mut add_node = |&mut : node| {
if let Vacant(e) = node_ids.entry(node) {
impl<'a, 'tcx> dot::GraphWalk<'a, Node, Edge> for ConstraintGraph<'a, 'tcx> {
fn nodes(&self) -> dot::Nodes<Node> {
- let mut set = FnvHashSet::new();
+ let mut set = FnvHashSet();
for constraint in self.map.keys() {
let (n1, n2) = constraint_to_nodes(constraint);
set.insert(n1);
tcx: tcx,
var_origins: RefCell::new(Vec::new()),
values: RefCell::new(None),
- constraints: RefCell::new(FnvHashMap::new()),
+ constraints: RefCell::new(FnvHashMap()),
verifys: RefCell::new(Vec::new()),
- givens: RefCell::new(FnvHashSet::new()),
- lubs: RefCell::new(FnvHashMap::new()),
- glbs: RefCell::new(FnvHashMap::new()),
+ givens: RefCell::new(FnvHashSet()),
+ lubs: RefCell::new(FnvHashMap()),
+ glbs: RefCell::new(FnvHashMap()),
skolemization_count: Cell::new(0),
bound_count: Cell::new(0),
undo_log: RefCell::new(Vec::new())
values: &Vec<VarValue>,
errors: &mut Vec<RegionResolutionError<'tcx>>)
{
- let mut reg_reg_dups = FnvHashSet::new();
+ let mut reg_reg_dups = FnvHashSet();
for verify in self.verifys.borrow().iter() {
match *verify {
VerifyRegSubReg(ref origin, sub, sup) => {
dup_found: bool
}
let mut state = WalkState {
- set: FnvHashSet::new(),
+ set: FnvHashSet(),
stack: vec!(orig_node_idx),
result: Vec::new(),
dup_found: false
impl<'a> LanguageItemCollector<'a> {
pub fn new(session: &'a Session) -> LanguageItemCollector<'a> {
- let mut item_refs = FnvHashMap::new();
+ let mut item_refs = FnvHashMap();
$( item_refs.insert($name, $variant as uint); )*
tcx: tcx,
num_live_nodes: 0,
num_vars: 0,
- live_node_map: NodeMap::new(),
- variable_map: NodeMap::new(),
- capture_info_map: NodeMap::new(),
+ live_node_map: NodeMap(),
+ variable_map: NodeMap(),
+ capture_info_map: NodeMap(),
var_kinds: Vec::new(),
lnks: Vec::new(),
}
successors: repeat(invalid_node()).take(num_live_nodes).collect(),
users: repeat(invalid_users()).take(num_live_nodes * num_vars).collect(),
loop_scope: Vec::new(),
- break_ln: NodeMap::new(),
- cont_ln: NodeMap::new(),
+ break_ln: NodeMap(),
+ cont_ln: NodeMap(),
}
}
// This is used because same-named variables in alternative patterns need to
// use the NodeId of their namesake in the first pattern.
pub fn pat_id_map(dm: &DefMap, pat: &ast::Pat) -> PatIdMap {
- let mut map = FnvHashMap::new();
+ let mut map = FnvHashMap();
pat_bindings(dm, pat, |_bm, p_id, _s, path1| {
map.insert(path1.node, p_id);
});
});
ReachableContext {
tcx: tcx,
- reachable_symbols: NodeSet::new(),
+ reachable_symbols: NodeSet(),
worklist: Vec::new(),
any_library: any_library,
}
pub fn resolve_crate(sess: &Session, krate: &ast::Crate) -> RegionMaps {
let maps = RegionMaps {
- scope_map: RefCell::new(FnvHashMap::new()),
- var_map: RefCell::new(NodeMap::new()),
- free_region_map: RefCell::new(FnvHashMap::new()),
- rvalue_scopes: RefCell::new(NodeMap::new()),
- terminating_scopes: RefCell::new(FnvHashSet::new()),
+ scope_map: RefCell::new(FnvHashMap()),
+ var_map: RefCell::new(NodeMap()),
+ free_region_map: RefCell::new(FnvHashMap()),
+ rvalue_scopes: RefCell::new(NodeMap()),
+ terminating_scopes: RefCell::new(FnvHashSet()),
};
{
let mut visitor = RegionResolutionVisitor {
static ROOT_SCOPE: ScopeChain<'static> = RootScope;
pub fn krate(sess: &Session, krate: &ast::Crate, def_map: &DefMap) -> NamedRegionMap {
- let mut named_region_map = NodeMap::new();
+ let mut named_region_map = NodeMap();
visit::walk_crate(&mut LifetimeContext {
sess: sess,
named_region_map: &mut named_region_map,
pub fn build(krate: &Crate) -> Index {
let mut annotator = Annotator {
index: Index {
- local: NodeMap::new(),
- extern_cache: DefIdMap::new()
+ local: NodeMap(),
+ extern_cache: DefIdMap()
},
parent: None
};
duplicate_set: HashSet::new(),
predicates: Vec::new(),
attempted_mark: 0,
- region_obligations: NodeMap::new(),
+ region_obligations: NodeMap(),
}
}
lang_items: middle::lang_items::LanguageItems,
stability: stability::Index) -> ctxt<'tcx>
{
- let mut interner = FnvHashMap::new();
+ let mut interner = FnvHashMap();
let common_types = CommonTypes::new(&arenas.type_, &mut interner);
ctxt {
arenas: arenas,
interner: RefCell::new(interner),
- substs_interner: RefCell::new(FnvHashMap::new()),
- bare_fn_interner: RefCell::new(FnvHashMap::new()),
- region_interner: RefCell::new(FnvHashMap::new()),
+ substs_interner: RefCell::new(FnvHashMap()),
+ bare_fn_interner: RefCell::new(FnvHashMap()),
+ region_interner: RefCell::new(FnvHashMap()),
types: common_types,
named_region_map: named_region_map,
- item_variance_map: RefCell::new(DefIdMap::new()),
+ item_variance_map: RefCell::new(DefIdMap()),
variance_computed: Cell::new(false),
sess: s,
def_map: dm,
region_maps: region_maps,
- node_types: RefCell::new(FnvHashMap::new()),
- item_substs: RefCell::new(NodeMap::new()),
- trait_refs: RefCell::new(NodeMap::new()),
- trait_defs: RefCell::new(DefIdMap::new()),
- object_cast_map: RefCell::new(NodeMap::new()),
+ node_types: RefCell::new(FnvHashMap()),
+ item_substs: RefCell::new(NodeMap()),
+ trait_refs: RefCell::new(NodeMap()),
+ trait_defs: RefCell::new(DefIdMap()),
+ object_cast_map: RefCell::new(NodeMap()),
map: map,
- intrinsic_defs: RefCell::new(DefIdMap::new()),
+ intrinsic_defs: RefCell::new(DefIdMap()),
freevars: freevars,
- tcache: RefCell::new(DefIdMap::new()),
- rcache: RefCell::new(FnvHashMap::new()),
- short_names_cache: RefCell::new(FnvHashMap::new()),
- tc_cache: RefCell::new(FnvHashMap::new()),
- ast_ty_to_ty_cache: RefCell::new(NodeMap::new()),
- enum_var_cache: RefCell::new(DefIdMap::new()),
- impl_or_trait_items: RefCell::new(DefIdMap::new()),
- trait_item_def_ids: RefCell::new(DefIdMap::new()),
- trait_items_cache: RefCell::new(DefIdMap::new()),
- impl_trait_cache: RefCell::new(DefIdMap::new()),
- ty_param_defs: RefCell::new(NodeMap::new()),
- adjustments: RefCell::new(NodeMap::new()),
- normalized_cache: RefCell::new(FnvHashMap::new()),
+ tcache: RefCell::new(DefIdMap()),
+ rcache: RefCell::new(FnvHashMap()),
+ short_names_cache: RefCell::new(FnvHashMap()),
+ tc_cache: RefCell::new(FnvHashMap()),
+ ast_ty_to_ty_cache: RefCell::new(NodeMap()),
+ enum_var_cache: RefCell::new(DefIdMap()),
+ impl_or_trait_items: RefCell::new(DefIdMap()),
+ trait_item_def_ids: RefCell::new(DefIdMap()),
+ trait_items_cache: RefCell::new(DefIdMap()),
+ impl_trait_cache: RefCell::new(DefIdMap()),
+ ty_param_defs: RefCell::new(NodeMap()),
+ adjustments: RefCell::new(NodeMap()),
+ normalized_cache: RefCell::new(FnvHashMap()),
lang_items: lang_items,
- provided_method_sources: RefCell::new(DefIdMap::new()),
- struct_fields: RefCell::new(DefIdMap::new()),
- destructor_for_type: RefCell::new(DefIdMap::new()),
- destructors: RefCell::new(DefIdSet::new()),
- trait_impls: RefCell::new(DefIdMap::new()),
- inherent_impls: RefCell::new(DefIdMap::new()),
- impl_items: RefCell::new(DefIdMap::new()),
- used_unsafe: RefCell::new(NodeSet::new()),
- used_mut_nodes: RefCell::new(NodeSet::new()),
- populated_external_types: RefCell::new(DefIdSet::new()),
- populated_external_traits: RefCell::new(DefIdSet::new()),
- upvar_borrow_map: RefCell::new(FnvHashMap::new()),
- extern_const_statics: RefCell::new(DefIdMap::new()),
- extern_const_variants: RefCell::new(DefIdMap::new()),
- method_map: RefCell::new(FnvHashMap::new()),
- dependency_formats: RefCell::new(FnvHashMap::new()),
- unboxed_closures: RefCell::new(DefIdMap::new()),
- node_lint_levels: RefCell::new(FnvHashMap::new()),
+ provided_method_sources: RefCell::new(DefIdMap()),
+ struct_fields: RefCell::new(DefIdMap()),
+ destructor_for_type: RefCell::new(DefIdMap()),
+ destructors: RefCell::new(DefIdSet()),
+ trait_impls: RefCell::new(DefIdMap()),
+ inherent_impls: RefCell::new(DefIdMap()),
+ impl_items: RefCell::new(DefIdMap()),
+ used_unsafe: RefCell::new(NodeSet()),
+ used_mut_nodes: RefCell::new(NodeSet()),
+ populated_external_types: RefCell::new(DefIdSet()),
+ populated_external_traits: RefCell::new(DefIdSet()),
+ upvar_borrow_map: RefCell::new(FnvHashMap()),
+ extern_const_statics: RefCell::new(DefIdMap()),
+ extern_const_variants: RefCell::new(DefIdMap()),
+ method_map: RefCell::new(FnvHashMap()),
+ dependency_formats: RefCell::new(FnvHashMap()),
+ unboxed_closures: RefCell::new(DefIdMap()),
+ node_lint_levels: RefCell::new(FnvHashMap()),
transmute_restrictions: RefCell::new(Vec::new()),
stability: RefCell::new(stability),
capture_modes: capture_modes,
- associated_types: RefCell::new(DefIdMap::new()),
+ associated_types: RefCell::new(DefIdMap()),
selection_cache: traits::SelectionCache::new(),
- repr_hint_cache: RefCell::new(DefIdMap::new()),
+ repr_hint_cache: RefCell::new(DefIdMap()),
type_impls_copy_cache: RefCell::new(HashMap::new()),
type_impls_sized_cache: RefCell::new(HashMap::new()),
- object_safety_cache: RefCell::new(DefIdMap::new()),
+ object_safety_cache: RefCell::new(DefIdMap()),
}
}
pub fn type_contents<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>) -> TypeContents {
return memoized(&cx.tc_cache, ty, |ty| {
- tc_ty(cx, ty, &mut FnvHashMap::new())
+ tc_ty(cx, ty, &mut FnvHashMap())
});
fn tc_ty<'tcx>(cx: &ctxt<'tcx>,
{
debug!("replace_late_bound_regions({})", binder.repr(tcx));
- let mut map = FnvHashMap::new();
+ let mut map = FnvHashMap();
// Note: fold the field `0`, not the binder, so that late-bound
// regions bound by `binder` are considered free.
local_crate_source_file: local_crate_source_file,
working_dir: os::getcwd().unwrap(),
lint_store: RefCell::new(lint::LintStore::new()),
- lints: RefCell::new(NodeMap::new()),
+ lints: RefCell::new(NodeMap()),
crate_types: RefCell::new(Vec::new()),
crate_metadata: RefCell::new(Vec::new()),
features: RefCell::new(feature_gate::Features::new()),
use std::collections::hash_state::{DefaultState};
use std::collections::{HashMap, HashSet};
use std::default::Default;
-use std::hash::{Hasher, Writer};
+use std::hash::{Hasher, Writer, Hash};
use syntax::ast;
pub type FnvHashMap<K, V> = HashMap<K, V, DefaultState<FnvHasher>>;
pub type NodeSet = FnvHashSet<ast::NodeId>;
pub type DefIdSet = FnvHashSet<ast::DefId>;
-// Hacks to get good names
-pub mod FnvHashMap {
- use std::hash::Hash;
- use std::default::Default;
- pub fn new<K: Hash<super::FnvHasher> + Eq, V>() -> super::FnvHashMap<K, V> {
- Default::default()
- }
-}
-pub mod FnvHashSet {
- use std::hash::Hash;
- use std::default::Default;
- pub fn new<V: Hash<super::FnvHasher> + Eq>() -> super::FnvHashSet<V> {
- Default::default()
- }
+pub fn FnvHashMap<K: Hash<FnvHasher> + Eq, V>() -> FnvHashMap<K, V> {
+ Default::default()
}
-pub mod NodeMap {
- pub fn new<T>() -> super::NodeMap<T> {
- super::FnvHashMap::new()
- }
-}
-pub mod DefIdMap {
- pub fn new<T>() -> super::DefIdMap<T> {
- super::FnvHashMap::new()
- }
-}
-pub mod NodeSet {
- pub fn new() -> super::NodeSet {
- super::FnvHashSet::new()
- }
-}
-pub mod DefIdSet {
- pub fn new() -> super::DefIdSet {
- super::FnvHashSet::new()
- }
+pub fn FnvHashSet<V: Hash<FnvHasher> + Eq>() -> FnvHashSet<V> {
+ Default::default()
}
+pub fn NodeMap<T>() -> NodeMap<T> { FnvHashMap() }
+pub fn DefIdMap<T>() -> DefIdMap<T> { FnvHashMap() }
+pub fn NodeSet() -> NodeSet { FnvHashSet() }
+pub fn DefIdSet() -> DefIdSet { FnvHashSet() }
+
/// A speedy hash algorithm for node ids and def ids. The hashmap in
/// libcollections by default uses SipHash which isn't quite as speedy as we
/// want. In the compiler we're not really worried about DOS attempts, so we
pub fn new() -> MoveData<'tcx> {
MoveData {
paths: RefCell::new(Vec::new()),
- path_map: RefCell::new(FnvHashMap::new()),
+ path_map: RefCell::new(FnvHashMap()),
moves: RefCell::new(Vec::new()),
path_assignments: RefCell::new(Vec::new()),
var_assignments: RefCell::new(Vec::new()),
variant_matches: RefCell::new(Vec::new()),
- assignee_ids: RefCell::new(NodeSet::new()),
+ assignee_ids: RefCell::new(NodeSet()),
fragments: RefCell::new(fragments::FragmentSets::new()),
}
}
// Figure out who everyone's parent is
let mut visitor = ParentVisitor {
- parents: NodeMap::new(),
+ parents: NodeMap(),
curparent: ast::DUMMY_NODE_ID,
};
visit::walk_crate(&mut visitor, krate);
// items which are reachable from external crates based on visibility.
let mut visitor = EmbargoVisitor {
tcx: tcx,
- exported_items: NodeSet::new(),
- public_items: NodeSet::new(),
- reexports: NodeSet::new(),
+ exported_items: NodeSet(),
+ public_items: NodeSet(),
+ reexports: NodeSet(),
export_map: export_map,
prev_exported: true,
prev_public: true,
// These items live in the type namespace.
ItemTy(..) => {
let name_bindings =
- self.add_child(name, parent, ForbidDuplicateTypesAndModules, sp);
+ self.add_child(name, parent, ForbidDuplicateTypesAndModules,
+ sp);
- name_bindings.define_type(DefTy(local_def(item.id), false), sp, modifiers);
+ name_bindings.define_type(DefTy(local_def(item.id), false), sp,
+ modifiers);
+
+ let parent_link = self.get_parent_link(parent, name);
+ name_bindings.set_module_kind(parent_link,
+ Some(local_def(item.id)),
+ TypeModuleKind,
+ false,
+ is_public,
+ sp);
parent.clone()
}
}
};
- match mod_name {
+ let mod_name = match mod_name {
+ Some(mod_name) => mod_name,
None => {
self.resolve_error(ty.span,
"inherent implementations may \
only be implemented in the same \
module as the type they are \
- implemented for")
+ implemented for");
+ return parent.clone();
}
- Some(mod_name) => {
- // Create the module and add all methods.
- let parent_opt = parent.children.borrow().get(&mod_name).cloned();
- let new_parent = match parent_opt {
- // It already exists
- Some(ref child) if child.get_module_if_available()
- .is_some() &&
- (child.get_module().kind.get() == ImplModuleKind ||
- child.get_module().kind.get() == TraitModuleKind) => {
- child.get_module()
- }
- Some(ref child) if child.get_module_if_available()
- .is_some() &&
- child.get_module().kind.get() ==
- EnumModuleKind => child.get_module(),
- // Create the module
- _ => {
- let name_bindings =
- self.add_child(mod_name, parent, ForbidDuplicateModules, sp);
-
- let parent_link = self.get_parent_link(parent, name);
- let def_id = local_def(item.id);
- let ns = TypeNS;
- let is_public =
- !name_bindings.defined_in_namespace(ns) ||
- name_bindings.defined_in_public_namespace(ns);
-
- name_bindings.define_module(parent_link,
- Some(def_id),
- ImplModuleKind,
- false,
- is_public,
- sp);
-
- name_bindings.get_module()
- }
- };
+ };
+ // Create the module and add all methods.
+ let child_opt = parent.children.borrow().get(&mod_name)
+ .and_then(|m| m.get_module_if_available());
+ let new_parent = match child_opt {
+ // It already exists
+ Some(ref child) if (child.kind.get() == ImplModuleKind ||
+ child.kind.get() == TraitModuleKind) => {
+ child.clone()
+ }
+ Some(ref child) if child.kind.get() == EnumModuleKind ||
+ child.kind.get() == TypeModuleKind => {
+ child.clone()
+ }
+ // Create the module
+ _ => {
+ let name_bindings =
+ self.add_child(mod_name, parent, ForbidDuplicateModules, sp);
+
+ let parent_link = self.get_parent_link(parent, name);
+ let def_id = local_def(item.id);
+ let ns = TypeNS;
+ let is_public =
+ !name_bindings.defined_in_namespace(ns) ||
+ name_bindings.defined_in_public_namespace(ns);
+
+ name_bindings.define_module(parent_link,
+ Some(def_id),
+ ImplModuleKind,
+ false,
+ is_public,
+ sp);
- // For each implementation item...
- for impl_item in impl_items.iter() {
- match *impl_item {
- MethodImplItem(ref method) => {
- // Add the method to the module.
- let name = method.pe_ident().name;
- let method_name_bindings =
- self.add_child(name,
- &new_parent,
- ForbidDuplicateValues,
- method.span);
- let def = match method.pe_explicit_self()
- .node {
- SelfStatic => {
- // Static methods become
- // `DefStaticMethod`s.
- DefStaticMethod(local_def(method.id),
- FromImpl(local_def(item.id)))
- }
- _ => {
- // Non-static methods become
- // `DefMethod`s.
- DefMethod(local_def(method.id),
- None,
- FromImpl(local_def(item.id)))
- }
- };
+ name_bindings.get_module()
+ }
+ };
- // NB: not IMPORTABLE
- let modifiers = if method.pe_vis() == ast::Public {
- PUBLIC
- } else {
- DefModifiers::empty()
- };
- method_name_bindings.define_value(
- def,
- method.span,
- modifiers);
- }
- TypeImplItem(ref typedef) => {
- // Add the typedef to the module.
- let name = typedef.ident.name;
- let typedef_name_bindings =
- self.add_child(
- name,
- &new_parent,
- ForbidDuplicateTypesAndModules,
- typedef.span);
- let def = DefAssociatedTy(local_def(
- typedef.id));
- // NB: not IMPORTABLE
- let modifiers = if typedef.vis == ast::Public {
- PUBLIC
- } else {
- DefModifiers::empty()
- };
- typedef_name_bindings.define_type(
- def,
- typedef.span,
- modifiers);
- }
- }
+ // For each implementation item...
+ for impl_item in impl_items.iter() {
+ match *impl_item {
+ MethodImplItem(ref method) => {
+ // Add the method to the module.
+ let name = method.pe_ident().name;
+ let method_name_bindings =
+ self.add_child(name,
+ &new_parent,
+ ForbidDuplicateValues,
+ method.span);
+ let def = match method.pe_explicit_self()
+ .node {
+ SelfStatic => {
+ // Static methods become
+ // `DefStaticMethod`s.
+ DefStaticMethod(local_def(method.id),
+ FromImpl(local_def(item.id)))
+ }
+ _ => {
+ // Non-static methods become
+ // `DefMethod`s.
+ DefMethod(local_def(method.id),
+ None,
+ FromImpl(local_def(item.id)))
+ }
+ };
+
+ // NB: not IMPORTABLE
+ let modifiers = if method.pe_vis() == ast::Public {
+ PUBLIC
+ } else {
+ DefModifiers::empty()
+ };
+ method_name_bindings.define_value(
+ def,
+ method.span,
+ modifiers);
+ }
+ TypeImplItem(ref typedef) => {
+ // Add the typedef to the module.
+ let name = typedef.ident.name;
+ let typedef_name_bindings =
+ self.add_child(
+ name,
+ &new_parent,
+ ForbidDuplicateTypesAndModules,
+ typedef.span);
+ let def = DefAssociatedTy(local_def(
+ typedef.id));
+ // NB: not IMPORTABLE
+ let modifiers = if typedef.vis == ast::Public {
+ PUBLIC
+ } else {
+ DefModifiers::empty()
+ };
+ typedef_name_bindings.define_type(
+ def,
+ typedef.span,
+ modifiers);
}
}
}
-
parent.clone()
}
let kind = match def {
DefTy(_, true) => EnumModuleKind,
- DefStruct(..) | DefTy(..) => ImplModuleKind,
+ DefTy(_, false) => TypeModuleKind,
+ DefStruct(..) => ImplModuleKind,
_ => NormalModuleKind
};
TraitModuleKind,
ImplModuleKind,
EnumModuleKind,
+ TypeModuleKind,
AnonymousModuleKind,
}
children: RefCell::new(HashMap::new()),
imports: RefCell::new(Vec::new()),
external_module_children: RefCell::new(HashMap::new()),
- anonymous_children: RefCell::new(NodeMap::new()),
+ anonymous_children: RefCell::new(NodeMap()),
import_resolutions: RefCell::new(HashMap::new()),
glob_count: Cell::new(0),
resolved_import_count: Cell::new(0),
graph_root: graph_root,
- trait_item_map: FnvHashMap::new(),
- structs: FnvHashMap::new(),
+ trait_item_map: FnvHashMap(),
+ structs: FnvHashMap(),
unresolved_imports: 0,
primitive_type_table: PrimitiveTypeTable::new(),
- def_map: RefCell::new(NodeMap::new()),
- freevars: RefCell::new(NodeMap::new()),
- freevars_seen: RefCell::new(NodeMap::new()),
- capture_mode_map: NodeMap::new(),
- export_map: NodeMap::new(),
- trait_map: NodeMap::new(),
+ def_map: RefCell::new(NodeMap()),
+ freevars: RefCell::new(NodeMap()),
+ freevars_seen: RefCell::new(NodeMap()),
+ capture_mode_map: NodeMap(),
+ export_map: NodeMap(),
+ trait_map: NodeMap(),
used_imports: HashSet::new(),
used_crates: HashSet::new(),
- external_exports: DefIdSet::new(),
- last_private: NodeMap::new(),
+ external_exports: DefIdSet(),
+ last_private: NodeMap(),
emit_errors: true,
make_glob_map: make_glob_map == MakeGlobMap::Yes,
TraitModuleKind |
ImplModuleKind |
EnumModuleKind |
+ TypeModuleKind |
AnonymousModuleKind => {
search_module = parent_module_node.upgrade().unwrap();
}
TraitModuleKind |
ImplModuleKind |
EnumModuleKind |
+ TypeModuleKind |
AnonymousModuleKind => module_ = new_module,
}
}
TraitModuleKind |
ImplModuleKind |
EnumModuleKind |
+ TypeModuleKind |
AnonymousModuleKind => {
match self.get_nearest_normal_module_parent(module_.clone()) {
None => module_,
let mut seen = self.freevars_seen.borrow_mut();
let seen = match seen.entry(function_id) {
Occupied(v) => v.into_mut(),
- Vacant(v) => v.insert(NodeSet::new()),
+ Vacant(v) => v.insert(NodeSet()),
};
if seen.contains(&node_id) {
continue;
let ccx = bcx.ccx();
let tcx = bcx.tcx();
let reassigned = is_discr_reassigned(bcx, discr, body);
- let mut bindings_map = FnvHashMap::new();
+ let mut bindings_map = FnvHashMap();
pat_bindings(&tcx.def_map, &*pat, |bm, p_id, span, path1| {
let ident = path1.node;
let variable_ty = node_id_type(bcx, p_id);
needs_ret_allocas: nested_returns,
personality: Cell::new(None),
caller_expects_out_pointer: uses_outptr,
- lllocals: RefCell::new(NodeMap::new()),
- llupvars: RefCell::new(NodeMap::new()),
+ lllocals: RefCell::new(NodeMap()),
+ llupvars: RefCell::new(NodeMap()),
id: id,
param_substs: param_substs,
span: sp,
// Build version of path with cycles removed.
// Pass 1: scan table mapping str -> rightmost pos.
- let mut mm = FnvHashMap::new();
+ let mut mm = FnvHashMap();
let len = v.len();
let mut i = 0u;
while i < len {
// cleanups.
pub fn type_needs_unwind_cleanup<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> bool {
return memoized(ccx.needs_unwind_cleanup_cache(), ty, |ty| {
- type_needs_unwind_cleanup_(ccx.tcx(), ty, &mut FnvHashSet::new())
+ type_needs_unwind_cleanup_(ccx.tcx(), ty, &mut FnvHashSet())
});
fn type_needs_unwind_cleanup_<'tcx>(tcx: &ty::ctxt<'tcx>,
metadata_llcx: metadata_llcx,
export_map: export_map,
reachable: reachable,
- item_symbols: RefCell::new(NodeMap::new()),
+ item_symbols: RefCell::new(NodeMap()),
link_meta: link_meta,
symbol_hasher: RefCell::new(symbol_hasher),
tcx: tcx,
n_inlines: Cell::new(0u),
n_closures: Cell::new(0u),
n_llvm_insns: Cell::new(0u),
- llvm_insns: RefCell::new(FnvHashMap::new()),
+ llvm_insns: RefCell::new(FnvHashMap()),
fn_stats: RefCell::new(Vec::new()),
},
- available_monomorphizations: RefCell::new(FnvHashSet::new()),
- available_drop_glues: RefCell::new(FnvHashMap::new()),
+ available_monomorphizations: RefCell::new(FnvHashSet()),
+ available_drop_glues: RefCell::new(FnvHashMap()),
};
for i in range(0, local_count) {
llcx: llcx,
td: td,
tn: TypeNames::new(),
- externs: RefCell::new(FnvHashMap::new()),
- item_vals: RefCell::new(NodeMap::new()),
- needs_unwind_cleanup_cache: RefCell::new(FnvHashMap::new()),
- fn_pointer_shims: RefCell::new(FnvHashMap::new()),
- drop_glues: RefCell::new(FnvHashMap::new()),
- tydescs: RefCell::new(FnvHashMap::new()),
+ externs: RefCell::new(FnvHashMap()),
+ item_vals: RefCell::new(NodeMap()),
+ needs_unwind_cleanup_cache: RefCell::new(FnvHashMap()),
+ fn_pointer_shims: RefCell::new(FnvHashMap()),
+ drop_glues: RefCell::new(FnvHashMap()),
+ tydescs: RefCell::new(FnvHashMap()),
finished_tydescs: Cell::new(false),
- external: RefCell::new(DefIdMap::new()),
- external_srcs: RefCell::new(NodeMap::new()),
- monomorphized: RefCell::new(FnvHashMap::new()),
- monomorphizing: RefCell::new(DefIdMap::new()),
- vtables: RefCell::new(FnvHashMap::new()),
- const_cstr_cache: RefCell::new(FnvHashMap::new()),
- const_globals: RefCell::new(FnvHashMap::new()),
- const_values: RefCell::new(NodeMap::new()),
- static_values: RefCell::new(NodeMap::new()),
- extern_const_values: RefCell::new(DefIdMap::new()),
- impl_method_cache: RefCell::new(FnvHashMap::new()),
- closure_bare_wrapper_cache: RefCell::new(FnvHashMap::new()),
- lltypes: RefCell::new(FnvHashMap::new()),
- llsizingtypes: RefCell::new(FnvHashMap::new()),
- adt_reprs: RefCell::new(FnvHashMap::new()),
- type_hashcodes: RefCell::new(FnvHashMap::new()),
- all_llvm_symbols: RefCell::new(FnvHashSet::new()),
+ external: RefCell::new(DefIdMap()),
+ external_srcs: RefCell::new(NodeMap()),
+ monomorphized: RefCell::new(FnvHashMap()),
+ monomorphizing: RefCell::new(DefIdMap()),
+ vtables: RefCell::new(FnvHashMap()),
+ const_cstr_cache: RefCell::new(FnvHashMap()),
+ const_globals: RefCell::new(FnvHashMap()),
+ const_values: RefCell::new(NodeMap()),
+ static_values: RefCell::new(NodeMap()),
+ extern_const_values: RefCell::new(DefIdMap()),
+ impl_method_cache: RefCell::new(FnvHashMap()),
+ closure_bare_wrapper_cache: RefCell::new(FnvHashMap()),
+ lltypes: RefCell::new(FnvHashMap()),
+ llsizingtypes: RefCell::new(FnvHashMap()),
+ adt_reprs: RefCell::new(FnvHashMap()),
+ type_hashcodes: RefCell::new(FnvHashMap()),
+ all_llvm_symbols: RefCell::new(FnvHashSet()),
int_type: Type::from_ref(ptr::null_mut()),
opaque_vec_type: Type::from_ref(ptr::null_mut()),
builder: BuilderRef_res(llvm::LLVMCreateBuilderInContext(llcx)),
- unboxed_closure_vals: RefCell::new(FnvHashMap::new()),
+ unboxed_closure_vals: RefCell::new(FnvHashMap()),
dbg_cx: dbg_cx,
eh_personality: RefCell::new(None),
- intrinsics: RefCell::new(FnvHashMap::new()),
+ intrinsics: RefCell::new(FnvHashMap()),
n_llvm_insns: Cell::new(0u),
- trait_cache: RefCell::new(FnvHashMap::new()),
+ trait_cache: RefCell::new(FnvHashMap()),
};
local_ccx.int_type = Type::int(&local_ccx.dummy_ccx(shared));
fn new() -> TypeMap<'tcx> {
TypeMap {
unique_id_interner: Interner::new(),
- type_to_metadata: FnvHashMap::new(),
- unique_id_to_metadata: FnvHashMap::new(),
- type_to_unique_id: FnvHashMap::new(),
+ type_to_metadata: FnvHashMap(),
+ unique_id_to_metadata: FnvHashMap(),
+ type_to_unique_id: FnvHashMap(),
}
}
llcontext: llcontext,
builder: builder,
current_debug_location: Cell::new(UnknownLocation),
- created_files: RefCell::new(FnvHashMap::new()),
- created_enum_disr_types: RefCell::new(DefIdMap::new()),
+ created_files: RefCell::new(FnvHashMap()),
+ created_enum_disr_types: RefCell::new(DefIdMap()),
type_map: RefCell::new(TypeMap::new()),
- namespace_map: RefCell::new(FnvHashMap::new()),
- composite_types_completed: RefCell::new(FnvHashSet::new()),
+ namespace_map: RefCell::new(FnvHashMap()),
+ composite_types_completed: RefCell::new(FnvHashSet()),
};
}
}
let mut signature = Vec::with_capacity(fn_decl.inputs.len() + 1);
// Return type -- llvm::DIBuilder wants this at index 0
- match fn_decl.output {
- ast::Return(ref ret_ty) if ret_ty.node == ast::TyTup(vec![]) =>
- signature.push(ptr::null_mut()),
- _ => {
- assert_type_for_node_id(cx, fn_ast_id, error_reporting_span);
-
- let return_type = ty::node_id_to_type(cx.tcx(), fn_ast_id);
- let return_type = monomorphize::apply_param_substs(cx.tcx(),
- param_substs,
- &return_type);
- signature.push(type_metadata(cx, return_type, codemap::DUMMY_SP));
- }
+ assert_type_for_node_id(cx, fn_ast_id, error_reporting_span);
+ let return_type = ty::node_id_to_type(cx.tcx(), fn_ast_id);
+ let return_type = monomorphize::apply_param_substs(cx.tcx(),
+ param_substs,
+ &return_type);
+ if ty::type_is_nil(return_type) {
+ signature.push(ptr::null_mut())
+ } else {
+ signature.push(type_metadata(cx, return_type, codemap::DUMMY_SP));
}
// Arguments types
fn_metadata: DISubprogram,
fn_ast_id: ast::NodeId)
-> NodeMap<DIScope> {
- let mut scope_map = NodeMap::new();
+ let mut scope_map = NodeMap();
let def_map = &cx.tcx().def_map;
for (input, ty) in decl.inputs.iter().zip(sig.inputs.iter()) {
check(&*input.ty, *ty)
}
- match decl.output {
- ast::NoReturn(_) => {}
- ast::Return(ref ty) => check(&**ty, sig.output.unwrap())
+ if let ast::Return(ref ty) = decl.output {
+ check(&**ty, sig.output.unwrap())
}
}
}
}
(_, "init") => {
let tp_ty = *substs.types.get(FnSpace, 0);
- let lltp_ty = type_of::arg_type_of(ccx, tp_ty);
- if return_type_is_void(ccx, tp_ty) {
- C_nil(ccx)
- } else {
- C_null(lltp_ty)
+ if !return_type_is_void(ccx, tp_ty) {
+ // Just zero out the stack slot. (See comment on base::memzero for explaination)
+ zero_mem(bcx, llresult, tp_ty);
}
+ C_nil(ccx)
}
// Effectively no-ops
(_, "uninit") | (_, "forget") => {
impl TypeNames {
pub fn new() -> TypeNames {
TypeNames {
- named_types: RefCell::new(FnvHashMap::new())
+ named_types: RefCell::new(FnvHashMap())
}
}
implied_output_region,
lifetimes_for_params,
&**output)),
- ast::NoReturn(_) => ty::FnDiverging
+ ast::DefaultReturn(..) => ty::FnConverging(ty::mk_nil(this.tcx())),
+ ast::NoReturn(..) => ty::FnDiverging
};
(ty::BareFnTy {
let expected_ret_ty = expected_sig.map(|e| e.output);
+ let is_infer = match decl.output {
+ ast::Return(ref output) if output.node == ast::TyInfer => true,
+ ast::DefaultReturn(..) => true,
+ _ => false
+ };
+
let output_ty = match decl.output {
- ast::Return(ref output) if output.node == ast::TyInfer && expected_ret_ty.is_some() =>
+ _ if is_infer && expected_ret_ty.is_some() =>
expected_ret_ty.unwrap(),
- ast::Return(ref output) if output.node == ast::TyInfer =>
- ty::FnConverging(this.ty_infer(output.span)),
+ _ if is_infer =>
+ ty::FnConverging(this.ty_infer(decl.output.span())),
ast::Return(ref output) =>
ty::FnConverging(ast_ty_to_ty(this, &rb, &**output)),
- ast::NoReturn(_) => ty::FnDiverging
+ ast::DefaultReturn(..) => unreachable!(),
+ ast::NoReturn(..) => ty::FnDiverging
};
debug!("ty_of_closure: input_tys={}", input_tys.repr(this.tcx()));
let mut builtin_bounds = ty::empty_builtin_bounds();
let mut region_bounds = Vec::new();
let mut trait_bounds = Vec::new();
- let mut trait_def_ids = DefIdMap::new();
+ let mut trait_def_ids = DefIdMap();
for ast_bound in ast_bounds.iter() {
match *ast_bound {
ast::TraitTyParamBound(ref b, ast::TraitBoundModifier::None) => {
.collect::<FnvHashMap<_, _>>();
// Keep track of which fields have already appeared in the pattern.
- let mut used_fields = FnvHashMap::new();
+ let mut used_fields = FnvHashMap();
// Typecheck each field.
for &Spanned { node: ref field, span } in fields.iter() {
-> Inherited<'a, 'tcx> {
Inherited {
infcx: infer::new_infer_ctxt(tcx),
- locals: RefCell::new(NodeMap::new()),
+ locals: RefCell::new(NodeMap()),
param_env: param_env,
- node_types: RefCell::new(NodeMap::new()),
- item_substs: RefCell::new(NodeMap::new()),
- adjustments: RefCell::new(NodeMap::new()),
- method_map: RefCell::new(FnvHashMap::new()),
- object_cast_map: RefCell::new(NodeMap::new()),
- upvar_borrow_map: RefCell::new(FnvHashMap::new()),
- unboxed_closures: RefCell::new(DefIdMap::new()),
- fn_sig_map: RefCell::new(NodeMap::new()),
+ node_types: RefCell::new(NodeMap()),
+ item_substs: RefCell::new(NodeMap()),
+ adjustments: RefCell::new(NodeMap()),
+ method_map: RefCell::new(FnvHashMap()),
+ object_cast_map: RefCell::new(NodeMap()),
+ upvar_borrow_map: RefCell::new(FnvHashMap()),
+ unboxed_closures: RefCell::new(DefIdMap()),
+ fn_sig_map: RefCell::new(NodeMap()),
fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()),
}
}
enum_id_opt: Option<ast::DefId>) {
let tcx = fcx.ccx.tcx;
- let mut class_field_map = FnvHashMap::new();
+ let mut class_field_map = FnvHashMap();
let mut fields_found = 0;
for field in field_types.iter() {
class_field_map.insert(field.name, (field.id, false));
CoherenceChecker {
crate_context: crate_context,
inference_context: new_infer_ctxt(crate_context.tcx),
- inherent_impls: RefCell::new(FnvHashMap::new()),
+ inherent_impls: RefCell::new(FnvHashMap()),
}.check(crate_context.tcx.map.krate());
impls::check(crate_context.tcx);
unsafety::check(crate_context.tcx);
rcvr_ty_generics.repr(ccx.tcx));
let tcx = ccx.tcx;
- let mut seen_methods = FnvHashSet::new();
+ let mut seen_methods = FnvHashSet();
for m in ms {
if !seen_methods.insert(m.pe_ident().repr(tcx)) {
tcx.sess.span_err(m.span, "duplicate method in trait impl");
let tcx = ccx.tcx;
// Write the type of each of the members and check for duplicate fields.
- let mut seen_fields: FnvHashMap<ast::Name, Span> = FnvHashMap::new();
+ let mut seen_fields: FnvHashMap<ast::Name, Span> = FnvHashMap();
let field_tys = struct_def.fields.iter().map(|f| {
let result = convert_field(ccx, &scheme.generics, f, local_def(id));
let output = match decl.output {
ast::Return(ref ty) =>
ty::FnConverging(ast_ty_to_ty(ccx, &rb, &**ty)),
- ast::NoReturn(_) =>
+ ast::DefaultReturn(..) =>
+ ty::FnConverging(ty::mk_nil(ccx.tcx)),
+ ast::NoReturn(..) =>
ty::FnDiverging
};
let mut terms_cx = TermsContext {
tcx: tcx,
arena: arena,
- inferred_map: NodeMap::new(),
+ inferred_map: NodeMap(),
inferred_infos: Vec::new(),
// cache and share the variance struct used for items with
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Show)]
pub enum FunctionRetTy {
Return(Type),
+ DefaultReturn,
NoReturn
}
fn clean(&self, cx: &DocContext) -> FunctionRetTy {
match *self {
ast::Return(ref typ) => Return(typ.clean(cx)),
- ast::NoReturn(_) => NoReturn
+ ast::DefaultReturn(..) => DefaultReturn,
+ ast::NoReturn(..) => NoReturn
}
}
}
match *self {
clean::Return(clean::Tuple(ref tys)) if tys.is_empty() => Ok(()),
clean::Return(ref ty) => write!(f, " -> {}", ty),
+ clean::DefaultReturn => Ok(()),
clean::NoReturn => write!(f, " -> !")
}
}
let analysis = ::ANALYSISKEY.with(|a| a.clone());
let analysis = analysis.borrow();
let public_items = analysis.as_ref().map(|a| a.public_items.clone());
- let public_items = public_items.unwrap_or(NodeSet::new());
+ let public_items = public_items.unwrap_or(NodeSet());
let paths: HashMap<ast::DefId, (Vec<String>, ItemType)> =
analysis.as_ref().map(|a| {
let paths = a.external_paths.borrow_mut().take().unwrap();
#![stable]
+pub use core_collections::Bound;
pub use core_collections::{BinaryHeap, Bitv, BitvSet, BTreeMap, BTreeSet};
pub use core_collections::{DList, RingBuf, VecMap};
/// Lazily open a dynamic library. When passed None it gives a
/// handle to the calling process
pub fn open(filename: Option<&Path>) -> Result<DynamicLibrary, String> {
- unsafe {
- let maybe_library = dl::check_for_errors_in(|| {
- match filename {
- Some(name) => dl::open_external(name.as_vec()),
- None => dl::open_internal()
- }
- });
-
- // The dynamic library must not be constructed if there is
- // an error opening the library so the destructor does not
- // run.
- match maybe_library {
- Err(err) => Err(err),
- Ok(handle) => Ok(DynamicLibrary { handle: handle })
- }
+ let maybe_library = dl::open(filename.map(|path| path.as_vec()));
+
+ // The dynamic library must not be constructed if there is
+ // an error opening the library so the destructor does not
+ // run.
+ match maybe_library {
+ Err(err) => Err(err),
+ Ok(handle) => Ok(DynamicLibrary { handle: handle })
}
}
target_os = "ios",
target_os = "freebsd",
target_os = "dragonfly"))]
-pub mod dl {
- pub use self::Rtld::*;
+mod dl {
use prelude::v1::*;
use ffi::{self, CString};
use libc;
use ptr;
- pub unsafe fn open_external(filename: &[u8]) -> *mut u8 {
+ pub fn open(filename: Option<&[u8]>) -> Result<*mut u8, String> {
+ check_for_errors_in(|| {
+ unsafe {
+ match filename {
+ Some(filename) => open_external(filename),
+ None => open_internal(),
+ }
+ }
+ })
+ }
+
+ const LAZY: libc::c_int = 1;
+
+ unsafe fn open_external(filename: &[u8]) -> *mut u8 {
let s = CString::from_slice(filename);
- dlopen(s.as_ptr(), Lazy as libc::c_int) as *mut u8
+ dlopen(s.as_ptr(), LAZY) as *mut u8
}
- pub unsafe fn open_internal() -> *mut u8 {
- dlopen(ptr::null(), Lazy as libc::c_int) as *mut u8
+ unsafe fn open_internal() -> *mut u8 {
+ dlopen(ptr::null(), LAZY) as *mut u8
}
pub fn check_for_errors_in<T, F>(f: F) -> Result<T, String> where
dlclose(handle as *mut libc::c_void); ()
}
- #[derive(Copy)]
- pub enum Rtld {
- Lazy = 1,
- Now = 2,
- Global = 256,
- Local = 0,
- }
-
#[link_name = "dl"]
extern {
fn dlopen(filename: *const libc::c_char,
}
#[cfg(target_os = "windows")]
-pub mod dl {
+mod dl {
use iter::IteratorExt;
use libc;
+ use libc::consts::os::extra::ERROR_CALL_NOT_IMPLEMENTED;
use ops::FnOnce;
use os;
+ use option::Option::{self, Some, None};
use ptr;
use result::Result;
use result::Result::{Ok, Err};
use str;
use string::String;
use vec::Vec;
+ use sys::c::compat::kernel32::SetThreadErrorMode;
+
+ pub fn open(filename: Option<&[u8]>) -> Result<*mut u8, String> {
+ // disable "dll load failed" error dialog.
+ let mut use_thread_mode = true;
+ let prev_error_mode = unsafe {
+ // SEM_FAILCRITICALERRORS 0x01
+ let new_error_mode = 1;
+ let mut prev_error_mode = 0;
+ // Windows >= 7 supports thread error mode.
+ let result = SetThreadErrorMode(new_error_mode, &mut prev_error_mode);
+ if result == 0 {
+ let err = os::errno();
+ if err as libc::c_int == ERROR_CALL_NOT_IMPLEMENTED {
+ use_thread_mode = false;
+ // SetThreadErrorMode not found. use fallback solution: SetErrorMode()
+ // Note that SetErrorMode is process-wide so this can cause race condition!
+ // However, since even Windows APIs do not care of such problem (#20650),
+ // we just assume SetErrorMode race is not a great deal.
+ prev_error_mode = SetErrorMode(new_error_mode);
+ }
+ }
+ prev_error_mode
+ };
- pub unsafe fn open_external(filename: &[u8]) -> *mut u8 {
- // Windows expects Unicode data
- let filename_str = str::from_utf8(filename).unwrap();
- let mut filename_str: Vec<u16> = filename_str.utf16_units().collect();
- filename_str.push(0);
- LoadLibraryW(filename_str.as_ptr() as *const libc::c_void) as *mut u8
- }
+ unsafe {
+ SetLastError(0);
+ }
+
+ let result = match filename {
+ Some(filename) => {
+ let filename_str = str::from_utf8(filename).unwrap();
+ let mut filename_str: Vec<u16> = filename_str.utf16_units().collect();
+ filename_str.push(0);
+ let result = unsafe {
+ LoadLibraryW(filename_str.as_ptr() as *const libc::c_void)
+ };
+ // beware: Vec/String may change errno during drop!
+ // so we get error here.
+ if result == ptr::null_mut() {
+ let errno = os::errno();
+ Err(os::error_string(errno))
+ } else {
+ Ok(result as *mut u8)
+ }
+ }
+ None => {
+ let mut handle = ptr::null_mut();
+ let succeeded = unsafe {
+ GetModuleHandleExW(0 as libc::DWORD, ptr::null(), &mut handle)
+ };
+ if succeeded == libc::FALSE {
+ let errno = os::errno();
+ Err(os::error_string(errno))
+ } else {
+ Ok(handle as *mut u8)
+ }
+ }
+ };
+
+ unsafe {
+ if use_thread_mode {
+ SetThreadErrorMode(prev_error_mode, ptr::null_mut());
+ } else {
+ SetErrorMode(prev_error_mode);
+ }
+ }
- pub unsafe fn open_internal() -> *mut u8 {
- let mut handle = ptr::null_mut();
- GetModuleHandleExW(0 as libc::DWORD, ptr::null(), &mut handle);
- handle as *mut u8
+ result
}
pub fn check_for_errors_in<T, F>(f: F) -> Result<T, String> where
fn SetLastError(error: libc::size_t);
fn LoadLibraryW(name: *const libc::c_void) -> *mut libc::c_void;
fn GetModuleHandleExW(dwFlags: libc::DWORD, name: *const u16,
- handle: *mut *mut libc::c_void)
- -> *mut libc::c_void;
+ handle: *mut *mut libc::c_void) -> libc::BOOL;
fn GetProcAddress(handle: *mut libc::c_void,
name: *const libc::c_char) -> *mut libc::c_void;
fn FreeLibrary(handle: *mut libc::c_void);
+ fn SetErrorMode(uMode: libc::c_uint) -> libc::c_uint;
}
}
/// * `CreateSymbolicLinkW`: Windows XP, Windows Server 2003
/// * `GetFinalPathNameByHandleW`: Windows XP, Windows Server 2003
pub mod kernel32 {
+ use libc::c_uint;
use libc::types::os::arch::extra::{DWORD, LPCWSTR, BOOLEAN, HANDLE};
use libc::consts::os::extra::ERROR_CALL_NOT_IMPLEMENTED;
unsafe { SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); 0 }
}
}
+
+ compat_fn! {
+ kernel32::SetThreadErrorMode(_dwNewMode: DWORD, _lpOldMode: *mut DWORD) -> c_uint {
+ unsafe { SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); 0 }
+ }
+ }
}
}
/// Functions with return type ! that always
/// raise an error or exit (i.e. never return to the caller)
NoReturn(Span),
+ /// Return type is not specified. Functions default to () and
+ /// closures default to inference. Span points to where return
+ /// type would be inserted.
+ DefaultReturn(Span),
/// Everything else
Return(P<Ty>),
}
pub fn span(&self) -> Span {
match *self {
NoReturn(span) => span,
+ DefaultReturn(span) => span,
Return(ref ty) => ty.span
}
}
inputs: inputs.move_map(|x| fld.fold_arg(x)),
output: match output {
Return(ty) => Return(fld.fold_ty(ty)),
+ DefaultReturn(span) => DefaultReturn(span),
NoReturn(span) => NoReturn(span)
},
variadic: variadic
attrs: attrs.move_map(|x| folder.fold_attribute(x)),
node: match node {
ForeignItemFn(fdec, generics) => {
- ForeignItemFn(fdec.map(|FnDecl {inputs, output, variadic}| FnDecl {
- inputs: inputs.move_map(|a| folder.fold_arg(a)),
- output: match output {
- Return(ty) => Return(folder.fold_ty(ty)),
- NoReturn(span) => NoReturn(span)
- },
- variadic: variadic
- }), folder.fold_generics(generics))
+ ForeignItemFn(folder.fold_fn_decl(fdec), folder.fold_generics(generics))
}
ForeignItemStatic(t, m) => {
ForeignItemStatic(folder.fold_ty(t), m)
}),
id: ast::DUMMY_NODE_ID
}),
- output: ast::Return(P(ast::Ty{id: ast::DUMMY_NODE_ID,
- node: ast::TyTup(vec![]),
- span:sp(15,15)})), // not sure
+ output: ast::DefaultReturn(sp(15, 15)),
variadic: false
}),
ast::Unsafety::Normal,
use ast::{BiBitAnd, BiBitOr, BiBitXor, BiRem, BiLt, BiGt, Block};
use ast::{BlockCheckMode, CaptureByRef, CaptureByValue, CaptureClause};
use ast::{Crate, CrateConfig, Decl, DeclItem};
-use ast::{DeclLocal, DefaultBlock, UnDeref, BiDiv, EMPTY_CTXT, EnumDef, ExplicitSelf};
+use ast::{DeclLocal, DefaultBlock, DefaultReturn};
+use ast::{UnDeref, BiDiv, EMPTY_CTXT, EnumDef, ExplicitSelf};
use ast::{Expr, Expr_, ExprAddrOf, ExprMatch, ExprAgain};
use ast::{ExprAssign, ExprAssignOp, ExprBinary, ExprBlock, ExprBox};
use ast::{ExprBreak, ExprCall, ExprCast};
}
} else {
let pos = self.span.lo;
- Return(P(Ty {
- id: ast::DUMMY_NODE_ID,
- node: TyTup(vec![]),
- span: mk_sp(pos, pos),
- }))
+ DefaultReturn(mk_sp(pos, pos))
}
}
break;
}
- let bind_type = if self.eat_keyword(keywords::Mut) {
- BindByValue(MutMutable)
- } else if self.eat_keyword(keywords::Ref) {
- BindByRef(self.parse_mutability())
- } else {
- BindByValue(MutImmutable)
- };
-
- let fieldname = self.parse_ident();
-
- let (subpat, is_shorthand) = if self.check(&token::Colon) {
- match bind_type {
- BindByRef(..) | BindByValue(MutMutable) => {
- let token_str = self.this_token_to_string();
- self.fatal(&format!("unexpected `{}`",
- token_str)[])
- }
- _ => {}
- }
-
+ // Check if a colon exists one ahead. This means we're parsing a fieldname.
+ let (subpat, fieldname, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) {
+ // Parsing a pattern of the form "fieldname: pat"
+ let fieldname = self.parse_ident();
self.bump();
let pat = self.parse_pat();
hi = pat.span.hi;
- (pat, false)
+ (pat, fieldname, false)
} else {
+ // Parsing a pattern of the form "(box) (ref) (mut) fieldname"
+ let is_box = self.eat_keyword(keywords::Box);
+ let boxed_span_lo = self.span.lo;
+ let is_ref = self.eat_keyword(keywords::Ref);
+ let is_mut = self.eat_keyword(keywords::Mut);
+ let fieldname = self.parse_ident();
hi = self.last_span.hi;
- let fieldpath = codemap::Spanned{span:self.last_span, node: fieldname};
- (P(ast::Pat {
+
+ let bind_type = match (is_ref, is_mut) {
+ (true, true) => BindByRef(MutMutable),
+ (true, false) => BindByRef(MutImmutable),
+ (false, true) => BindByValue(MutMutable),
+ (false, false) => BindByValue(MutImmutable),
+ };
+ let fieldpath = codemap::Spanned{span:self.last_span, node:fieldname};
+ let fieldpat = P(ast::Pat{
id: ast::DUMMY_NODE_ID,
node: PatIdent(bind_type, fieldpath, None),
- span: self.last_span
- }), true)
+ span: mk_sp(boxed_span_lo, hi),
+ });
+
+ let subpat = if is_box {
+ P(ast::Pat{
+ id: ast::DUMMY_NODE_ID,
+ node: PatBox(fieldpat),
+ span: mk_sp(lo, hi),
+ })
+ } else {
+ fieldpat
+ };
+ (subpat, fieldname, true)
};
+
fields.push(codemap::Spanned { span: mk_sp(lo, hi),
node: ast::FieldPat { ident: fieldname,
pat: subpat,
(optional_unboxed_closure_kind, args)
}
};
- let output = if self.check(&token::RArrow) {
- self.parse_ret_ty()
- } else {
- Return(P(Ty {
- id: ast::DUMMY_NODE_ID,
- node: TyInfer,
- span: self.span,
- }))
- };
+ let output = self.parse_ret_ty();
(P(FnDecl {
inputs: inputs_captures,
seq_sep_trailing_allowed(token::Comma),
|p| p.parse_fn_block_arg());
- let output = if self.check(&token::RArrow) {
- self.parse_ret_ty()
- } else {
- Return(P(Ty {
- id: ast::DUMMY_NODE_ID,
- node: TyInfer,
- span: self.span,
- }))
- };
+ let output = self.parse_ret_ty();
P(FnDecl {
inputs: inputs,
try!(self.print_fn_args(decl, None));
try!(word(&mut self.s, "|"));
- if let ast::Return(ref ty) = decl.output {
- if ty.node == ast::TyInfer {
- return self.maybe_print_comment(ty.span.lo);
- }
+ if let ast::DefaultReturn(..) = decl.output {
+ return Ok(());
}
try!(self.space_if_not_bol());
try!(self.print_type(&**ty));
self.maybe_print_comment(ty.span.lo)
}
+ ast::DefaultReturn(..) => unreachable!(),
ast::NoReturn(span) => {
try!(self.word_nbsp("!"));
self.maybe_print_comment(span.lo)
try!(self.print_fn_args(decl, None));
try!(word(&mut self.s, ")"));
- if let ast::Return(ref ty) = decl.output {
- if ty.node == ast::TyInfer {
- return self.maybe_print_comment(ty.span.lo);
- }
+ if let ast::DefaultReturn(..) = decl.output {
+ return Ok(());
}
try!(self.space_if_not_bol());
try!(self.print_type(&**ty));
self.maybe_print_comment(ty.span.lo)
}
+ ast::DefaultReturn(..) => unreachable!(),
ast::NoReturn(span) => {
try!(self.word_nbsp("!"));
self.maybe_print_comment(span.lo)
}
pub fn print_fn_output(&mut self, decl: &ast::FnDecl) -> IoResult<()> {
- if let ast::Return(ref ty) = decl.output {
- match ty.node {
- ast::TyTup(ref tys) if tys.is_empty() => {
- return self.maybe_print_comment(ty.span.lo);
- }
- _ => ()
- }
+ if let ast::DefaultReturn(..) = decl.output {
+ return Ok(());
}
try!(self.space_if_not_bol());
match decl.output {
ast::NoReturn(_) =>
try!(self.word_nbsp("!")),
+ ast::DefaultReturn(..) => unreachable!(),
ast::Return(ref ty) =>
try!(self.print_type(&**ty))
}
let decl = ast::FnDecl {
inputs: Vec::new(),
- output: ast::Return(P(ast::Ty {id: 0,
- node: ast::TyTup(vec![]),
- span: codemap::DUMMY_SP})),
+ output: ast::DefaultReturn(codemap::DUMMY_SP),
variadic: false
};
let generics = ast_util::empty_generics();
match &i.node {
&ast::ItemFn(ref decl, _, _, ref generics, _) => {
let no_output = match decl.output {
- ast::Return(ref ret_ty) => match ret_ty.node {
- ast::TyTup(ref tys) if tys.is_empty() => true,
- _ => false,
- },
- ast::NoReturn(_) => false
+ ast::DefaultReturn(..) => true,
+ _ => false
};
if decl.inputs.is_empty()
&& no_output
ast::ItemFn(ref decl, _, _, ref generics, _) => {
let input_cnt = decl.inputs.len();
let no_output = match decl.output {
- ast::Return(ref ret_ty) => match ret_ty.node {
- ast::TyTup(ref tys) if tys.is_empty() => true,
- _ => false,
- },
- ast::NoReturn(_) => false
+ ast::DefaultReturn(..) => true,
+ _ => false
};
let tparm_cnt = generics.ty_params.len();
// NB: inadequate check, but we're running
fn main() {
struct Foo { x: isize }
match (Foo { x: 10 }) {
- Foo { ref x: ref x } => {}, //~ ERROR unexpected `:`
+ Foo { ref x: ref x } => {}, //~ ERROR expected `,`, found `:`
_ => {}
}
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct T;
+
+mod t1 {
+ type Foo = ::T;
+ mod Foo {} //~ ERROR: duplicate definition of type or module `Foo`
+}
+
+mod t2 {
+ type Foo = ::T;
+ struct Foo; //~ ERROR: duplicate definition of type or module `Foo`
+}
+
+mod t3 {
+ type Foo = ::T;
+ enum Foo {} //~ ERROR: duplicate definition of type or module `Foo`
+}
+
+mod t4 {
+ type Foo = ::T;
+ fn Foo() {} // ok
+}
+
+mod t5 {
+ type Bar<T> = T;
+ mod Bar {} //~ ERROR: duplicate definition of type or module `Bar`
+}
+
+mod t6 {
+ type Foo = ::T;
+ impl Foo {} // ok
+}
+
+
+fn main() {}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// pp-exact
+
+// Check that `fn f() -> () { }` does not print as `fn f() { }`.
+
+fn f() -> () { }
+
+fn main() { }
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Makes sure that zero-initializing large types is reasonably fast,
+// Doing it incorrectly causes massive slowdown in LLVM during
+// optimisation.
+
+#![feature(intrinsics)]
+
+extern "rust-intrinsic" {
+ pub fn init<T>() -> T;
+}
+
+const SIZE: usize = 1024 * 1024;
+
+fn main() {
+ let _memory: [u8; SIZE] = unsafe { init() };
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+#![feature(box_syntax)]
+
+enum E {
+ StructVar { boxed: Box<i32> }
+}
+
+fn main() {
+
+ // Test matching each shorthand notation for field patterns.
+ let mut a = E::StructVar { boxed: box 3 };
+ match a {
+ E::StructVar { box boxed } => { }
+ }
+ match a {
+ E::StructVar { box ref boxed } => { }
+ }
+ match a {
+ E::StructVar { box mut boxed } => { }
+ }
+ match a {
+ E::StructVar { box ref mut boxed } => { }
+ }
+ match a {
+ E::StructVar { ref boxed } => { }
+ }
+ match a {
+ E::StructVar { ref mut boxed } => { }
+ }
+ match a {
+ E::StructVar { mut boxed } => { }
+ }
+
+ // Test matching non shorthand notation. Recreate a since last test
+ // moved `boxed`
+ let mut a = E::StructVar { boxed: box 3 };
+ match a {
+ E::StructVar { boxed: box ref mut num } => { }
+ }
+ match a {
+ E::StructVar { boxed: ref mut num } => { }
+ }
+
+}