This is groundwork for #30587 (typestrong constant integrals), but imo it's a change that in itself is good, too, since we don't just juggle `u64`s around anymore.
`ty::Disr` will be changed to a `ConstInt` in #30587
* `aapcs`
* `cdecl`
* `fastcall`
+* `vectorcall`
+This is currently hidden behind the `abi_vectorcall` gate and is subject to change.
* `Rust`
* `rust-intrinsic`
* `system`
* - `type_ascription` - Allows type ascription expressions `expr: Type`.
+* - `abi_vectorcall` - Allows the usage of the vectorcall calling convention
+ (e.g. `extern "vectorcall" func fn_();`)
+
If a feature is promoted to a language feature, then all existing programs will
start to receive compilation warnings about `#![feature]` directives which enabled
the new feature (because the directive is no longer necessary). However, if a
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// This implementation is largely based on the high-level description and analysis of B-Trees
-// found in *Open Data Structures* (ODS). Although our implementation does not use any of
-// the source found in ODS, if one wishes to review the high-level design of this structure, it
-// can be freely downloaded at http://opendatastructures.org/. Its contents are as of this
-// writing (August 2014) freely licensed under the following Creative Commons Attribution
-// License: [CC BY 2.5 CA](http://creativecommons.org/licenses/by/2.5/ca/).
-
-use self::Entry::*;
-
use core::cmp::Ordering;
use core::fmt::Debug;
use core::hash::{Hash, Hasher};
-use core::iter::{Map, FromIterator};
+use core::iter::FromIterator;
use core::ops::Index;
-use core::{fmt, mem, usize};
-use Bound::{self, Included, Excluded, Unbounded};
+use core::{fmt, intrinsics, mem, ptr};
use borrow::Borrow;
-use vec_deque::VecDeque;
+use Bound::{self, Included, Excluded, Unbounded};
+
+use super::node::{self, NodeRef, Handle, marker};
+use super::search;
-use self::Continuation::{Continue, Finished};
-use self::StackOp::*;
-use super::node::ForceResult::{Leaf, Internal};
-use super::node::TraversalItem::{self, Elem, Edge};
-use super::node::{Traversal, MutTraversal, MoveTraversal};
-use super::node::{self, Node, Found, GoDown};
+use super::node::InsertResult::*;
+use super::node::ForceResult::*;
+use super::search::SearchResult::*;
+use self::UnderflowResult::*;
+use self::Entry::*;
/// A map based on a B-Tree.
///
/// It is a logic error for a key to be modified in such a way that the key's ordering relative to
/// any other key, as determined by the `Ord` trait, changes while it is in the map. This is
/// normally only possible through `Cell`, `RefCell`, global state, I/O, or unsafe code.
-#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct BTreeMap<K, V> {
- root: Node<K, V>,
- length: usize,
- depth: usize,
- b: usize,
+ root: node::Root<K, V>,
+ length: usize
+}
+
+impl<K, V> Drop for BTreeMap<K, V> {
+ #[unsafe_destructor_blind_to_params]
+ fn drop(&mut self) {
+ unsafe {
+ for _ in ptr::read(self).into_iter() { }
+ }
+ }
+}
+
+impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
+ fn clone(&self) -> BTreeMap<K, V> {
+ fn clone_subtree<K: Clone, V: Clone>(
+ node: node::NodeRef<marker::Immut, K, V, marker::LeafOrInternal>)
+ -> BTreeMap<K, V> {
+
+ match node.force() {
+ Leaf(leaf) => {
+ let mut out_tree = BTreeMap {
+ root: node::Root::new_leaf(),
+ length: 0
+ };
+
+ {
+ let mut out_node = match out_tree.root.as_mut().force() {
+ Leaf(leaf) => leaf,
+ Internal(_) => unreachable!()
+ };
+
+ let mut in_edge = leaf.first_edge();
+ while let Ok(kv) = in_edge.right_kv() {
+ let (k, v) = kv.into_kv();
+ in_edge = kv.right_edge();
+
+ out_node.push(k.clone(), v.clone());
+ out_tree.length += 1;
+ }
+ }
+
+ out_tree
+ },
+ Internal(internal) => {
+ let mut out_tree = clone_subtree(internal.first_edge().descend());
+
+ {
+ let mut out_node = out_tree.root.push_level();
+ let mut in_edge = internal.first_edge();
+ while let Ok(kv) = in_edge.right_kv() {
+ let (k, v) = kv.into_kv();
+ in_edge = kv.right_edge();
+
+ let k = (*k).clone();
+ let v = (*v).clone();
+ let subtree = clone_subtree(in_edge.descend());
+
+ // We can't destructure subtree directly
+ // because BTreeMap implements Drop
+ let (subroot, sublength) = unsafe {
+ let root = ptr::read(&subtree.root);
+ let length = subtree.length;
+ mem::forget(subtree);
+ (root, length)
+ };
+
+ out_node.push(k, v, subroot);
+ out_tree.length += 1 + sublength;
+ }
+ }
+
+ out_tree
+ }
+ }
+ }
+
+ clone_subtree(self.root.as_ref())
+ }
}
-/// An abstract base over-which all other BTree iterators are built.
-#[derive(Clone)]
-struct AbsIter<T> {
- traversals: VecDeque<T>,
- size: usize,
+impl<K, Q: ?Sized> super::Recover<Q> for BTreeMap<K, ()>
+ where K: Borrow<Q> + Ord,
+ Q: Ord
+{
+ type Key = K;
+
+ fn get(&self, key: &Q) -> Option<&K> {
+ match search::search_tree(self.root.as_ref(), key) {
+ Found(handle) => Some(handle.into_kv().0),
+ GoDown(_) => None
+ }
+ }
+
+ fn take(&mut self, key: &Q) -> Option<K> {
+ match search::search_tree(self.root.as_mut(), key) {
+ Found(handle) => {
+ Some(OccupiedEntry {
+ handle: handle,
+ length: &mut self.length
+ }.remove_kv().0)
+ },
+ GoDown(_) => None
+ }
+ }
+
+ fn replace(&mut self, key: K) -> Option<K> {
+ match search::search_tree::<marker::Mut, K, (), K>(self.root.as_mut(), &key) {
+ Found(handle) => Some(mem::replace(handle.into_kv_mut().0, key)),
+ GoDown(handle) => {
+ VacantEntry {
+ key: key,
+ handle: handle,
+ length: &mut self.length
+ }.insert(());
+ None
+ }
+ }
+ }
}
/// An iterator over a BTreeMap's entries.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, K: 'a, V: 'a> {
- inner: AbsIter<Traversal<'a, K, V>>,
+ range: Range<'a, K, V>,
+ length: usize
}
/// A mutable iterator over a BTreeMap's entries.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IterMut<'a, K: 'a, V: 'a> {
- inner: AbsIter<MutTraversal<'a, K, V>>,
+ range: RangeMut<'a, K, V>,
+ length: usize
}
/// An owning iterator over a BTreeMap's entries.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<K, V> {
- inner: AbsIter<MoveTraversal<K, V>>,
+ front: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
+ back: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
+ length: usize
}
/// An iterator over a BTreeMap's keys.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Keys<'a, K: 'a, V: 'a> {
- inner: Map<Iter<'a, K, V>, fn((&'a K, &'a V)) -> &'a K>,
+ inner: Iter<'a, K, V>,
}
/// An iterator over a BTreeMap's values.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Values<'a, K: 'a, V: 'a> {
- inner: Map<Iter<'a, K, V>, fn((&'a K, &'a V)) -> &'a V>,
+ inner: Iter<'a, K, V>,
}
/// An iterator over a sub-range of BTreeMap's entries.
pub struct Range<'a, K: 'a, V: 'a> {
- inner: AbsIter<Traversal<'a, K, V>>,
+ front: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>,
+ back: Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>
}
/// A mutable iterator over a sub-range of BTreeMap's entries.
pub struct RangeMut<'a, K: 'a, V: 'a> {
- inner: AbsIter<MutTraversal<'a, K, V>>,
+ front: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
+ back: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>
}
/// A view into a single entry in a map, which may either be vacant or occupied.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct VacantEntry<'a, K: 'a, V: 'a> {
key: K,
- stack: stack::SearchStack<'a, K, V, node::handle::Edge, node::handle::Leaf>,
+ handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
+ length: &'a mut usize
}
/// An occupied Entry.
#[stable(feature = "rust1", since = "1.0.0")]
pub struct OccupiedEntry<'a, K: 'a, V: 'a> {
- stack: stack::SearchStack<'a, K, V, node::handle::KV, node::handle::LeafOrInternal>,
+ handle: Handle<NodeRef<
+ marker::Mut<'a>,
+ K, V,
+ marker::LeafOrInternal
+ >, marker::KV>,
+
+ length: &'a mut usize
}
impl<K: Ord, V> BTreeMap<K, V> {
/// Makes a new empty BTreeMap with a reasonable choice for B.
#[stable(feature = "rust1", since = "1.0.0")]
- #[allow(deprecated)]
pub fn new() -> BTreeMap<K, V> {
BTreeMap {
- length: 0,
- depth: 1,
- root: Node::make_leaf_root(6),
- // FIXME(Gankro): Tune this as a function of size_of<K/V>?
- b: 6,
+ root: node::Root::new_leaf(),
+ length: 0
}
-
}
/// Clears the map, removing all values.
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn clear(&mut self) {
- // avoid recursive destructors by manually traversing the tree
- for _ in mem::replace(self, BTreeMap::new()) {}
+ // FIXME(gereeter) .clear() allocates
+ *self = BTreeMap::new();
}
- // Searching in a B-Tree is pretty straightforward.
- //
- // Start at the root. Try to find the key in the current node. If we find it, return it.
- // If it's not in there, follow the edge *before* the smallest key larger than
- // the search key. If no such key exists (they're *all* smaller), then just take the last
- // edge in the node. If we're in a leaf and we don't find our key, then it's not
- // in the tree.
-
/// Returns a reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but the ordering
/// assert_eq!(map.get(&2), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V>
- where K: Borrow<Q>,
- Q: Ord
- {
- let mut cur_node = &self.root;
- loop {
- match Node::search(cur_node, key) {
- Found(handle) => return Some(handle.into_kv().1),
- GoDown(handle) => {
- match handle.force() {
- Leaf(_) => return None,
- Internal(internal_handle) => {
- cur_node = internal_handle.into_edge();
- continue;
- }
- }
- }
- }
+ pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V> where K: Borrow<Q>, Q: Ord {
+ match search::search_tree(self.root.as_ref(), key) {
+ Found(handle) => Some(handle.into_kv().1),
+ GoDown(_) => None
}
}
/// assert_eq!(map.contains_key(&2), false);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool
- where K: Borrow<Q>,
- Q: Ord
- {
+ pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool where K: Borrow<Q>, Q: Ord {
self.get(key).is_some()
}
/// ```
// See `get` for implementation notes, this is basically a copy-paste with mut's added
#[stable(feature = "rust1", since = "1.0.0")]
- pub fn get_mut<Q: ?Sized>(&mut self, key: &Q) -> Option<&mut V>
- where K: Borrow<Q>,
- Q: Ord
- {
- // temp_node is a Borrowck hack for having a mutable value outlive a loop iteration
- let mut temp_node = &mut self.root;
- loop {
- let cur_node = temp_node;
- match Node::search(cur_node, key) {
- Found(handle) => return Some(handle.into_kv_mut().1),
- GoDown(handle) => {
- match handle.force() {
- Leaf(_) => return None,
- Internal(internal_handle) => {
- temp_node = internal_handle.into_edge_mut();
- continue;
- }
- }
- }
- }
+ pub fn get_mut<Q: ?Sized>(&mut self, key: &Q) -> Option<&mut V> where K: Borrow<Q>, Q: Ord {
+ match search::search_tree(self.root.as_mut(), key) {
+ Found(handle) => Some(handle.into_kv_mut().1),
+ GoDown(_) => None
}
}
- // Insertion in a B-Tree is a bit complicated.
- //
- // First we do the same kind of search described in `find`. But we need to maintain a stack of
- // all the nodes/edges in our search path. If we find a match for the key we're trying to
- // insert, just swap the vals and return the old ones. However, when we bottom out in a leaf,
- // we attempt to insert our key-value pair at the same location we would want to follow another
- // edge.
- //
- // If the node has room, then this is done in the obvious way by shifting elements. However,
- // if the node itself is full, we split node into two, and give its median key-value
- // pair to its parent to insert the new node with. Of course, the parent may also be
- // full, and insertion can propagate until we reach the root. If we reach the root, and
- // it is *also* full, then we split the root and place the two nodes under a newly made root.
- //
- // Note that we subtly deviate from Open Data Structures in our implementation of split.
- // ODS describes inserting into the node *regardless* of its capacity, and then
- // splitting *afterwards* if it happens to be overfull. However, this is inefficient.
- // Instead, we split beforehand, and then insert the key-value pair into the appropriate
- // result node. This has two consequences:
- //
- // 1) While ODS produces a left node of size B-1, and a right node of size B,
- // we may potentially reverse this. However, this shouldn't effect the analysis.
- //
- // 2) While ODS may potentially return the pair we *just* inserted after
- // the split, we will never do this. Again, this shouldn't effect the analysis.
-
/// Inserts a key-value pair into the map.
///
/// If the map did not have this key present, `None` is returned.
/// assert_eq!(map[&37], "c");
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- pub fn insert(&mut self, mut key: K, mut value: V) -> Option<V> {
- // This is a stack of rawptrs to nodes paired with indices, respectively
- // representing the nodes and edges of our search path. We have to store rawptrs
- // because as far as Rust is concerned, we can mutate aliased data with such a
- // stack. It is of course correct, but what it doesn't know is that we will only
- // be popping and using these ptrs one at a time in child-to-parent order. The alternative
- // to doing this is to take the Nodes from their parents. This actually makes
- // borrowck *really* happy and everything is pretty smooth. However, this creates
- // *tons* of pointless writes, and requires us to always walk all the way back to
- // the root after an insertion, even if we only needed to change a leaf. Therefore,
- // we accept this potential unsafety and complexity in the name of performance.
- //
- // Regardless, the actual dangerous logic is completely abstracted away from BTreeMap
- // by the stack module. All it can do is immutably read nodes, and ask the search stack
- // to proceed down some edge by index. This makes the search logic we'll be reusing in a
- // few different methods much neater, and of course drastically improves safety.
- let mut stack = stack::PartialSearchStack::new(self);
-
- loop {
- let result = stack.with(move |pusher, node| {
- // Same basic logic as found in `find`, but with PartialSearchStack mediating the
- // actual nodes for us
- match Node::search(node, &key) {
- Found(mut handle) => {
- // Perfect match, swap the values and return the old one
- mem::swap(handle.val_mut(), &mut value);
- Finished(Some(value))
- }
- GoDown(handle) => {
- // We need to keep searching, try to get the search stack
- // to go down further
- match handle.force() {
- Leaf(leaf_handle) => {
- // We've reached a leaf, perform the insertion here
- pusher.seal(leaf_handle).insert(key, value);
- Finished(None)
- }
- Internal(internal_handle) => {
- // We've found the subtree to insert this key/value pair in,
- // keep searching
- Continue((pusher.push(internal_handle), key, value))
- }
- }
- }
- }
- });
- match result {
- Finished(ret) => return ret,
- Continue((new_stack, renewed_key, renewed_val)) => {
- stack = new_stack;
- key = renewed_key;
- value = renewed_val;
- }
+ pub fn insert(&mut self, key: K, value: V) -> Option<V> {
+ match self.entry(key) {
+ Occupied(mut entry) => Some(entry.insert(value)),
+ Vacant(entry) => {
+ entry.insert(value);
+ None
}
}
}
- // Deletion is the most complicated operation for a B-Tree.
- //
- // First we do the same kind of search described in
- // `find`. But we need to maintain a stack of all the nodes/edges in our search path.
- // If we don't find the key, then we just return `None` and do nothing. If we do find the
- // key, we perform two operations: remove the item, and then possibly handle underflow.
- //
- // # removing the item
- // If the node is a leaf, we just remove the item, and shift
- // any items after it back to fill the hole.
- //
- // If the node is an internal node, we *swap* the item with the smallest item in
- // in its right subtree (which must reside in a leaf), and then revert to the leaf
- // case
- //
- // # handling underflow
- // After removing an item, there may be too few items in the node. We want nodes
- // to be mostly full for efficiency, although we make an exception for the root, which
- // may have as few as one item. If this is the case, we may first try to steal
- // an item from our left or right neighbour.
- //
- // To steal from the left (right) neighbour,
- // we take the largest (smallest) item and child from it. We then swap the taken item
- // with the item in their mutual parent that separates them, and then insert the
- // parent's item and the taken child into the first (last) index of the underflowed node.
- //
- // However, stealing has the possibility of underflowing our neighbour. If this is the
- // case, we instead *merge* with our neighbour. This of course reduces the number of
- // children in the parent. Therefore, we also steal the item that separates the now
- // merged nodes, and insert it into the merged node.
- //
- // Merging may cause the parent to underflow. If this is the case, then we must repeat
- // the underflow handling process on the parent. If merging merges the last two children
- // of the root, then we replace the root with the merged node.
-
/// Removes a key from the map, returning the value at the key if the key
/// was previously in the map.
///
/// assert_eq!(map.remove(&1), None);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
- pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<V>
- where K: Borrow<Q>,
- Q: Ord
- {
- // See `swap` for a more thorough description of the stuff going on in here
- let mut stack = stack::PartialSearchStack::new(self);
- loop {
- let result = stack.with(move |pusher, node| {
- match Node::search(node, key) {
- Found(handle) => {
- // Perfect match. Terminate the stack here, and remove the entry
- Finished(Some(pusher.seal(handle).remove()))
- }
- GoDown(handle) => {
- // We need to keep searching, try to go down the next edge
- match handle.force() {
- // We're at a leaf; the key isn't in here
- Leaf(_) => Finished(None),
- Internal(internal_handle) => Continue(pusher.push(internal_handle)),
- }
- }
- }
- });
- match result {
- Finished(ret) => return ret.map(|(_, v)| v),
- Continue(new_stack) => stack = new_stack,
- }
+ pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<V> where K: Borrow<Q>, Q: Ord {
+ match search::search_tree(self.root.as_mut(), key) {
+ Found(handle) => {
+ Some(OccupiedEntry {
+ handle: handle,
+ length: &mut self.length
+ }.remove())
+ },
+ GoDown(_) => None
}
}
-}
-
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K, V> IntoIterator for BTreeMap<K, V> {
- type Item = (K, V);
- type IntoIter = IntoIter<K, V>;
- /// Gets an owning iterator over the entries of the map.
+ /// Constructs a double-ended iterator over a sub-range of elements in the map, starting
+ /// at min, and ending at max. If min is `Unbounded`, then it will be treated as "negative
+ /// infinity", and if max is `Unbounded`, then it will be treated as "positive infinity".
+ /// Thus range(Unbounded, Unbounded) will yield the whole collection.
///
/// # Examples
///
/// ```
+ /// #![feature(btree_range, collections_bound)]
+ ///
/// use std::collections::BTreeMap;
+ /// use std::collections::Bound::{Included, Unbounded};
///
/// let mut map = BTreeMap::new();
- /// map.insert(1, "a");
- /// map.insert(2, "b");
- /// map.insert(3, "c");
- ///
- /// for (key, value) in map.into_iter() {
+ /// map.insert(3, "a");
+ /// map.insert(5, "b");
+ /// map.insert(8, "c");
+ /// for (&key, &value) in map.range(Included(&4), Included(&8)) {
/// println!("{}: {}", key, value);
/// }
+ /// assert_eq!(Some((&5, &"b")), map.range(Included(&4), Unbounded).next());
/// ```
- fn into_iter(self) -> IntoIter<K, V> {
- let len = self.len();
- let mut lca = VecDeque::new();
- lca.push_back(Traverse::traverse(self.root));
- IntoIter {
- inner: AbsIter {
- traversals: lca,
- size: len,
+ #[unstable(feature = "btree_range",
+ reason = "matches collection reform specification, waiting for dust to settle",
+ issue = "27787")]
+ pub fn range<Min: ?Sized + Ord, Max: ?Sized + Ord>(&self,
+ min: Bound<&Min>,
+ max: Bound<&Max>)
+ -> Range<K, V>
+ where K: Borrow<Min> + Borrow<Max>,
+ {
+ let front = match min {
+ Included(key) => match search::search_tree(self.root.as_ref(), key) {
+ Found(kv_handle) => match kv_handle.left_edge().force() {
+ Leaf(bottom) => bottom,
+ Internal(internal) => last_leaf_edge(internal.descend())
+ },
+ GoDown(bottom) => bottom
+ },
+ Excluded(key) => match search::search_tree(self.root.as_ref(), key) {
+ Found(kv_handle) => match kv_handle.right_edge().force() {
+ Leaf(bottom) => bottom,
+ Internal(internal) => first_leaf_edge(internal.descend())
+ },
+ GoDown(bottom) => bottom
+ },
+ Unbounded => first_leaf_edge(self.root.as_ref())
+ };
+
+ let back = match max {
+ Included(key) => match search::search_tree(self.root.as_ref(), key) {
+ Found(kv_handle) => match kv_handle.right_edge().force() {
+ Leaf(bottom) => bottom,
+ Internal(internal) => first_leaf_edge(internal.descend())
+ },
+ GoDown(bottom) => bottom
+ },
+ Excluded(key) => match search::search_tree(self.root.as_ref(), key) {
+ Found(kv_handle) => match kv_handle.left_edge().force() {
+ Leaf(bottom) => bottom,
+ Internal(internal) => last_leaf_edge(internal.descend())
+ },
+ GoDown(bottom) => bottom
+ },
+ Unbounded => last_leaf_edge(self.root.as_ref())
+ };
+
+ Range {
+ front: front,
+ back: back
+ }
+ }
+
+ /// Constructs a mutable double-ended iterator over a sub-range of elements in the map, starting
+ /// at min, and ending at max. If min is `Unbounded`, then it will be treated as "negative
+ /// infinity", and if max is `Unbounded`, then it will be treated as "positive infinity".
+ /// Thus range(Unbounded, Unbounded) will yield the whole collection.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(btree_range, collections_bound)]
+ ///
+ /// use std::collections::BTreeMap;
+ /// use std::collections::Bound::{Included, Excluded};
+ ///
+ /// let mut map: BTreeMap<&str, i32> = ["Alice", "Bob", "Carol", "Cheryl"].iter()
+ /// .map(|&s| (s, 0))
+ /// .collect();
+ /// for (_, balance) in map.range_mut(Included("B"), Excluded("Cheryl")) {
+ /// *balance += 100;
+ /// }
+ /// for (name, balance) in &map {
+ /// println!("{} => {}", name, balance);
+ /// }
+ /// ```
+ #[unstable(feature = "btree_range",
+ reason = "matches collection reform specification, waiting for dust to settle",
+ issue = "27787")]
+ pub fn range_mut<Min: ?Sized + Ord, Max: ?Sized + Ord>(&mut self,
+ min: Bound<&Min>,
+ max: Bound<&Max>)
+ -> RangeMut<K, V>
+ where K: Borrow<Min> + Borrow<Max>,
+ {
+ let root1 = self.root.as_mut();
+ let root2 = unsafe { ptr::read(&root1) };
+
+ let front = match min {
+ Included(key) => match search::search_tree(root1, key) {
+ Found(kv_handle) => match kv_handle.left_edge().force() {
+ Leaf(bottom) => bottom,
+ Internal(internal) => last_leaf_edge(internal.descend())
+ },
+ GoDown(bottom) => bottom
+ },
+ Excluded(key) => match search::search_tree(root1, key) {
+ Found(kv_handle) => match kv_handle.right_edge().force() {
+ Leaf(bottom) => bottom,
+ Internal(internal) => first_leaf_edge(internal.descend())
+ },
+ GoDown(bottom) => bottom
+ },
+ Unbounded => first_leaf_edge(root1)
+ };
+
+ let back = match max {
+ Included(key) => match search::search_tree(root2, key) {
+ Found(kv_handle) => match kv_handle.right_edge().force() {
+ Leaf(bottom) => bottom,
+ Internal(internal) => first_leaf_edge(internal.descend())
+ },
+ GoDown(bottom) => bottom
+ },
+ Excluded(key) => match search::search_tree(root2, key) {
+ Found(kv_handle) => match kv_handle.left_edge().force() {
+ Leaf(bottom) => bottom,
+ Internal(internal) => last_leaf_edge(internal.descend())
+ },
+ GoDown(bottom) => bottom
},
+ Unbounded => last_leaf_edge(root2)
+ };
+
+ RangeMut {
+ front: front,
+ back: back
+ }
+ }
+
+ /// Gets the given key's corresponding entry in the map for in-place manipulation.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut count: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// // count the number of occurrences of letters in the vec
+ /// for x in vec!["a","b","a","c","a","b"] {
+ /// *count.entry(x).or_insert(0) += 1;
+ /// }
+ ///
+ /// assert_eq!(count["a"], 3);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn entry(&mut self, key: K) -> Entry<K, V> {
+ match search::search_tree(self.root.as_mut(), &key) {
+ Found(handle) => Occupied(OccupiedEntry {
+ handle: handle,
+ length: &mut self.length
+ }),
+ GoDown(handle) => Vacant(VacantEntry {
+ key: key,
+ handle: handle,
+ length: &mut self.length
+ })
}
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> IntoIterator for &'a BTreeMap<K, V> {
+impl<'a, K: 'a, V: 'a> IntoIterator for &'a BTreeMap<K, V> {
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> IntoIterator for &'a mut BTreeMap<K, V> {
- type Item = (&'a K, &'a mut V);
- type IntoIter = IterMut<'a, K, V>;
+impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> {
+ type Item = (&'a K, &'a V);
- fn into_iter(mut self) -> IterMut<'a, K, V> {
- self.iter_mut()
+ fn next(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_unchecked()) }
+ }
}
-}
-/// A helper enum useful for deciding whether to continue a loop since we can't
-/// return from a closure
-enum Continuation<A, B> {
- Continue(A),
- Finished(B),
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
}
-/// The stack module provides a safe interface for constructing and manipulating a stack of ptrs
-/// to nodes. By using this module much better safety guarantees can be made, and more search
-/// boilerplate gets cut out.
-mod stack {
- use core::marker;
- use core::mem;
- use core::ops::{Deref, DerefMut};
- use super::BTreeMap;
- use super::super::node::{self, Node, Fit, Split, Internal, Leaf};
- use super::super::node::handle;
- use vec::Vec;
-
- struct InvariantLifetime<'id>(marker::PhantomData<::core::cell::Cell<&'id ()>>);
-
- impl<'id> InvariantLifetime<'id> {
- fn new() -> InvariantLifetime<'id> {
- InvariantLifetime(marker::PhantomData)
+impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Iter<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_back_unchecked()) }
}
}
+}
- /// A generic mutable reference, identical to `&mut` except for the fact that its lifetime
- /// parameter is invariant. This means that wherever an `IdRef` is expected, only an `IdRef`
- /// with the exact requested lifetime can be used. This is in contrast to normal references,
- /// where `&'static` can be used in any function expecting any lifetime reference.
- pub struct IdRef<'id, T: 'id> {
- inner: &'id mut T,
- _marker: InvariantLifetime<'id>,
- }
-
- impl<'id, T> Deref for IdRef<'id, T> {
- type Target = T;
+impl<'a, K: 'a, V: 'a> ExactSizeIterator for Iter<'a, K, V> {
+ fn len(&self) -> usize { self.length }
+}
- fn deref(&self) -> &T {
- &*self.inner
+impl<'a, K, V> Clone for Iter<'a, K, V> {
+ fn clone(&self) -> Iter<'a, K, V> {
+ Iter {
+ range: self.range.clone(),
+ length: self.length
}
}
+}
- impl<'id, T> DerefMut for IdRef<'id, T> {
- fn deref_mut(&mut self) -> &mut T {
- &mut *self.inner
- }
+impl<'a, K: 'a, V: 'a> IntoIterator for &'a mut BTreeMap<K, V> {
+ type Item = (&'a K, &'a mut V);
+ type IntoIter = IterMut<'a, K, V>;
+
+ fn into_iter(self) -> IterMut<'a, K, V> {
+ self.iter_mut()
}
+}
- type StackItem<K, V> = node::Handle<*mut Node<K, V>, handle::Edge, handle::Internal>;
- type Stack<K, V> = Vec<StackItem<K, V>>;
+impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> {
+ type Item = (&'a K, &'a mut V);
- /// A `PartialSearchStack` handles the construction of a search stack.
- pub struct PartialSearchStack<'a, K: 'a, V: 'a> {
- map: &'a mut BTreeMap<K, V>,
- stack: Stack<K, V>,
- next: *mut Node<K, V>,
+ fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_unchecked()) }
+ }
}
- /// A `SearchStack` represents a full path to an element or an edge of interest. It provides
- /// methods depending on the type of what the path points to for removing an element, inserting
- /// a new element, and manipulating to element at the top of the stack.
- pub struct SearchStack<'a, K: 'a, V: 'a, Type, NodeType> {
- map: &'a mut BTreeMap<K, V>,
- stack: Stack<K, V>,
- top: node::Handle<*mut Node<K, V>, Type, NodeType>,
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
}
+}
- /// A `PartialSearchStack` that doesn't hold a reference to the next node, and is just
- /// just waiting for a `Handle` to that next node to be pushed. See `PartialSearchStack::with`
- /// for more details.
- pub struct Pusher<'id, 'a, K: 'a, V: 'a> {
- map: &'a mut BTreeMap<K, V>,
- stack: Stack<K, V>,
- _marker: InvariantLifetime<'id>,
+impl<'a, K: 'a, V: 'a> DoubleEndedIterator for IterMut<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_back_unchecked()) }
+ }
}
+}
- impl<'a, K, V> PartialSearchStack<'a, K, V> {
- /// Creates a new PartialSearchStack from a BTreeMap by initializing the stack with the
- /// root of the tree.
- pub fn new(map: &'a mut BTreeMap<K, V>) -> PartialSearchStack<'a, K, V> {
- let depth = map.depth;
+impl<'a, K: 'a, V: 'a> ExactSizeIterator for IterMut<'a, K, V> {
+ fn len(&self) -> usize { self.length }
+}
- PartialSearchStack {
- next: &mut map.root as *mut _,
- map: map,
- stack: Vec::with_capacity(depth),
- }
- }
+impl<K, V> IntoIterator for BTreeMap<K, V> {
+ type Item = (K, V);
+ type IntoIter = IntoIter<K, V>;
- /// Breaks up the stack into a `Pusher` and the next `Node`, allowing the given closure
- /// to interact with, search, and finally push the `Node` onto the stack. The passed in
- /// closure must be polymorphic on the `'id` lifetime parameter, as this statically
- /// ensures that only `Handle`s from the correct `Node` can be pushed.
- ///
- /// The reason this works is that the `Pusher` has an `'id` parameter, and will only accept
- /// handles with the same `'id`. The closure could only get references with that lifetime
- /// through its arguments or through some other `IdRef` that it has lying around. However,
- /// no other `IdRef` could possibly work - because the `'id` is held in an invariant
- /// parameter, it would need to have precisely the correct lifetime, which would mean that
- /// at least one of the calls to `with` wouldn't be properly polymorphic, wanting a
- /// specific lifetime instead of the one that `with` chooses to give it.
- ///
- /// See also Haskell's `ST` monad, which uses a similar trick.
- pub fn with<T, F: for<'id> FnOnce(Pusher<'id, 'a, K, V>,
- IdRef<'id, Node<K, V>>) -> T>(self, closure: F) -> T {
- let pusher = Pusher {
- map: self.map,
- stack: self.stack,
- _marker: InvariantLifetime::new(),
- };
- let node = IdRef {
- inner: unsafe { &mut *self.next },
- _marker: InvariantLifetime::new(),
- };
+ fn into_iter(self) -> IntoIter<K, V> {
+ let root1 = unsafe { ptr::read(&self.root).into_ref() };
+ let root2 = unsafe { ptr::read(&self.root).into_ref() };
+ let len = self.length;
+ mem::forget(self);
- closure(pusher, node)
+ IntoIter {
+ front: first_leaf_edge(root1),
+ back: last_leaf_edge(root2),
+ length: len
}
}
+}
- impl<'id, 'a, K, V> Pusher<'id, 'a, K, V> {
- /// Pushes the requested child of the stack's current top on top of the stack. If the child
- /// exists, then a new PartialSearchStack is yielded. Otherwise, a VacantSearchStack is
- /// yielded.
- pub fn push(mut self,
- mut edge: node::Handle<IdRef<'id, Node<K, V>>, handle::Edge, handle::Internal>)
- -> PartialSearchStack<'a, K, V> {
- self.stack.push(edge.as_raw());
- PartialSearchStack {
- map: self.map,
- stack: self.stack,
- next: edge.edge_mut() as *mut _,
- }
- }
-
- /// Converts the PartialSearchStack into a SearchStack.
- pub fn seal<Type, NodeType>(self,
- mut handle: node::Handle<IdRef<'id, Node<K, V>>,
- Type,
- NodeType>)
- -> SearchStack<'a, K, V, Type, NodeType> {
- SearchStack {
- map: self.map,
- stack: self.stack,
- top: handle.as_raw(),
+impl<K, V> Drop for IntoIter<K, V> {
+ fn drop(&mut self) {
+ for _ in &mut *self { }
+ unsafe {
+ let leaf_node = ptr::read(&self.front).into_node();
+ if let Some(first_parent) = leaf_node.deallocate_and_ascend() {
+ let mut cur_node = first_parent.into_node();
+ while let Some(parent) = cur_node.deallocate_and_ascend() {
+ cur_node = parent.into_node()
+ }
}
}
}
+}
- impl<'a, K, V, NodeType> SearchStack<'a, K, V, handle::KV, NodeType> {
- /// Gets a reference to the value the stack points to.
- pub fn peek(&self) -> &V {
- unsafe { self.top.from_raw().into_kv().1 }
- }
-
- /// Gets a mutable reference to the value the stack points to.
- pub fn peek_mut(&mut self) -> &mut V {
- unsafe { self.top.from_raw_mut().into_kv_mut().1 }
- }
+impl<K, V> Iterator for IntoIter<K, V> {
+ type Item = (K, V);
- /// Converts the stack into a mutable reference to the value it points to, with a lifetime
- /// tied to the original tree.
- pub fn into_top(mut self) -> &'a mut V {
- unsafe { &mut *(self.top.from_raw_mut().val_mut() as *mut V) }
+ fn next(&mut self) -> Option<(K, V)> {
+ if self.length == 0 {
+ return None;
+ } else {
+ self.length -= 1;
}
- }
- impl<'a, K, V> SearchStack<'a, K, V, handle::KV, handle::Leaf> {
- /// Removes the key and value in the top element of the stack, then handles underflows as
- /// described in BTree's pop function.
- fn remove_leaf(mut self) -> (K, V) {
- self.map.length -= 1;
+ let handle = unsafe { ptr::read(&self.front) };
- // Remove the key-value pair from the leaf that this search stack points to.
- // Then, note if the leaf is underfull, and promptly forget the leaf and its ptr
- // to avoid ownership issues.
- let (key_val, mut underflow) = unsafe {
- let key_val = self.top.from_raw_mut().remove_as_leaf();
- let underflow = self.top.from_raw().node().is_underfull();
- (key_val, underflow)
- };
+ let mut cur_handle = match handle.right_kv() {
+ Ok(kv) => {
+ let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
+ let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
+ self.front = kv.right_edge();
+ return Some((k, v));
+ },
+ Err(last_edge) => unsafe {
+ unwrap_unchecked(last_edge.into_node().deallocate_and_ascend())
+ }
+ };
- loop {
- match self.stack.pop() {
- None => {
- // We've reached the root, so no matter what, we're done. We manually
- // access the root via the tree itself to avoid creating any dangling
- // pointers.
- if self.map.root.is_empty() && !self.map.root.is_leaf() {
- // We've emptied out the root, so make its only child the new root.
- // If it's a leaf, we just let it become empty.
- self.map.depth -= 1;
- self.map.root.hoist_lone_child();
- }
- return key_val;
- }
- Some(mut handle) => {
- if underflow {
- // Underflow! Handle it!
- unsafe {
- handle.from_raw_mut().handle_underflow();
- underflow = handle.from_raw().node().is_underfull();
- }
- } else {
- // All done!
- return key_val;
- }
- }
+ loop {
+ match cur_handle.right_kv() {
+ Ok(kv) => {
+ let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
+ let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
+ self.front = first_leaf_edge(kv.right_edge().descend());
+ return Some((k, v));
+ },
+ Err(last_edge) => unsafe {
+ cur_handle = unwrap_unchecked(last_edge.into_node().deallocate_and_ascend());
}
}
}
}
- impl<'a, K, V> SearchStack<'a, K, V, handle::KV, handle::LeafOrInternal> {
- /// Removes the key and value in the top element of the stack, then handles underflows as
- /// described in BTree's pop function.
- pub fn remove(self) -> (K, V) {
- // Ensure that the search stack goes to a leaf. This is necessary to perform deletion
- // in a BTree. Note that this may put the tree in an inconsistent state (further
- // described in into_leaf's comments), but this is immediately fixed by the
- // removing the value we want to remove
- self.into_leaf().remove_leaf()
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
+}
+
+impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
+ fn next_back(&mut self) -> Option<(K, V)> {
+ if self.length == 0 {
+ return None;
+ } else {
+ self.length -= 1;
}
- /// Subroutine for removal. Takes a search stack for a key that might terminate at an
- /// internal node, and mutates the tree and search stack to *make* it a search stack
- /// for that same key that *does* terminates at a leaf. If the mutation occurs, then this
- /// leaves the tree in an inconsistent state that must be repaired by the caller by
- /// removing the entry in question. Specifically the key-value pair and its successor will
- /// become swapped.
- fn into_leaf(mut self) -> SearchStack<'a, K, V, handle::KV, handle::Leaf> {
- unsafe {
- let mut top_raw = self.top;
- let mut top = top_raw.from_raw_mut();
-
- let key_ptr = top.key_mut() as *mut _;
- let val_ptr = top.val_mut() as *mut _;
-
- // Try to go into the right subtree of the found key to find its successor
- match top.force() {
- Leaf(mut leaf_handle) => {
- // We're a proper leaf stack, nothing to do
- return SearchStack {
- map: self.map,
- stack: self.stack,
- top: leaf_handle.as_raw(),
- };
- }
- Internal(mut internal_handle) => {
- let mut right_handle = internal_handle.right_edge();
-
- // We're not a proper leaf stack, let's get to work.
- self.stack.push(right_handle.as_raw());
-
- let mut temp_node = right_handle.edge_mut();
- loop {
- // Walk into the smallest subtree of this node
- let node = temp_node;
-
- match node.kv_handle(0).force() {
- Leaf(mut handle) => {
- // This node is a leaf, do the swap and return
- mem::swap(handle.key_mut(), &mut *key_ptr);
- mem::swap(handle.val_mut(), &mut *val_ptr);
- return SearchStack {
- map: self.map,
- stack: self.stack,
- top: handle.as_raw(),
- };
- }
- Internal(kv_handle) => {
- // This node is internal, go deeper
- let mut handle = kv_handle.into_left_edge();
- self.stack.push(handle.as_raw());
- temp_node = handle.into_edge_mut();
- }
- }
- }
- }
+ let handle = unsafe { ptr::read(&self.back) };
+
+ let mut cur_handle = match handle.left_kv() {
+ Ok(kv) => {
+ let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
+ let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
+ self.back = kv.left_edge();
+ return Some((k, v));
+ },
+ Err(last_edge) => unsafe {
+ unwrap_unchecked(last_edge.into_node().deallocate_and_ascend())
+ }
+ };
+
+ loop {
+ match cur_handle.left_kv() {
+ Ok(kv) => {
+ let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
+ let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
+ self.back = last_leaf_edge(kv.left_edge().descend());
+ return Some((k, v));
+ },
+ Err(last_edge) => unsafe {
+ cur_handle = unwrap_unchecked(last_edge.into_node().deallocate_and_ascend());
}
}
}
}
+}
- impl<'a, K, V> SearchStack<'a, K, V, handle::Edge, handle::Leaf> {
- /// Inserts the key and value into the top element in the stack, and if that node has to
- /// split recursively inserts the split contents into the next element stack until
- /// splits stop.
- ///
- /// Assumes that the stack represents a search path from the root to a leaf.
- ///
- /// An &mut V is returned to the inserted value, for callers that want a reference to this.
- pub fn insert(mut self, key: K, val: V) -> &'a mut V {
- unsafe {
- self.map.length += 1;
-
- // Insert the key and value into the leaf at the top of the stack
- let (mut insertion, inserted_ptr) = self.top
- .from_raw_mut()
- .insert_as_leaf(key, val);
-
- loop {
- match insertion {
- Fit => {
- // The last insertion went off without a hitch, no splits! We can stop
- // inserting now.
- return &mut *inserted_ptr;
- }
- Split(key, val, right) => {
- match self.stack.pop() {
- // The last insertion triggered a split, so get the next element on
- // the stack to recursively insert the split node into.
- None => {
- // The stack was empty; we've split the root, and need to make a
- // a new one. This is done in-place because we can't move the
- // root out of a reference to the tree.
- Node::make_internal_root(&mut self.map.root,
- self.map.b,
- key,
- val,
- right);
-
- self.map.depth += 1;
- return &mut *inserted_ptr;
- }
- Some(mut handle) => {
- // The stack wasn't empty, do the insertion and recurse
- insertion = handle.from_raw_mut()
- .insert_as_internal(key, val, right);
- continue;
- }
- }
- }
- }
- }
- }
- }
- }
+impl<K, V> ExactSizeIterator for IntoIter<K, V> {
+ fn len(&self) -> usize { self.length }
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Ord, V> FromIterator<(K, V)> for BTreeMap<K, V> {
- fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> BTreeMap<K, V> {
- let mut map = BTreeMap::new();
- map.extend(iter);
- map
- }
-}
+impl<'a, K, V> Iterator for Keys<'a, K, V> {
+ type Item = &'a K;
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Ord, V> Extend<(K, V)> for BTreeMap<K, V> {
- #[inline]
- fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
- for (k, v) in iter {
- self.insert(k, v);
- }
+ fn next(&mut self) -> Option<&'a K> {
+ self.inner.next().map(|(k, _)| k)
}
-}
-#[stable(feature = "extend_ref", since = "1.2.0")]
-impl<'a, K: Ord + Copy, V: Copy> Extend<(&'a K, &'a V)> for BTreeMap<K, V> {
- fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
- self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Hash, V: Hash> Hash for BTreeMap<K, V> {
- fn hash<H: Hasher>(&self, state: &mut H) {
- for elt in self {
- elt.hash(state);
- }
+impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> {
+ fn next_back(&mut self) -> Option<&'a K> {
+ self.inner.next_back().map(|(k, _)| k)
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Ord, V> Default for BTreeMap<K, V> {
- fn default() -> BTreeMap<K, V> {
- BTreeMap::new()
+impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> {
+ fn len(&self) -> usize {
+ self.inner.len()
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: PartialEq, V: PartialEq> PartialEq for BTreeMap<K, V> {
- fn eq(&self, other: &BTreeMap<K, V>) -> bool {
- self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b)
+impl<'a, K, V> Clone for Keys<'a, K, V> {
+ fn clone(&self) -> Keys<'a, K, V> {
+ Keys {
+ inner: self.inner.clone()
+ }
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Eq, V: Eq> Eq for BTreeMap<K, V> {}
+impl<'a, K, V> Iterator for Values<'a, K, V> {
+ type Item = &'a V;
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: PartialOrd, V: PartialOrd> PartialOrd for BTreeMap<K, V> {
- #[inline]
- fn partial_cmp(&self, other: &BTreeMap<K, V>) -> Option<Ordering> {
- self.iter().partial_cmp(other.iter())
+ fn next(&mut self) -> Option<&'a V> {
+ self.inner.next().map(|(_, v)| v)
}
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Ord, V: Ord> Ord for BTreeMap<K, V> {
- #[inline]
- fn cmp(&self, other: &BTreeMap<K, V>) -> Ordering {
- self.iter().cmp(other.iter())
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K: Debug, V: Debug> Debug for BTreeMap<K, V> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_map().entries(self.iter()).finish()
+impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> {
+ fn next_back(&mut self) -> Option<&'a V> {
+ self.inner.next_back().map(|(_, v)| v)
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K: Ord, Q: ?Sized, V> Index<&'a Q> for BTreeMap<K, V>
- where K: Borrow<Q>,
- Q: Ord
-{
- type Output = V;
-
- #[inline]
- fn index(&self, key: &Q) -> &V {
- self.get(key).expect("no entry found for key")
+impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> {
+ fn len(&self) -> usize {
+ self.inner.len()
}
}
-/// Genericizes over how to get the correct type of iterator from the correct type
-/// of Node ownership.
-trait Traverse<N> {
- fn traverse(node: N) -> Self;
-}
-
-impl<'a, K, V> Traverse<&'a Node<K, V>> for Traversal<'a, K, V> {
- fn traverse(node: &'a Node<K, V>) -> Traversal<'a, K, V> {
- node.iter()
+impl<'a, K, V> Clone for Values<'a, K, V> {
+ fn clone(&self) -> Values<'a, K, V> {
+ Values {
+ inner: self.inner.clone()
+ }
}
}
-impl<'a, K, V> Traverse<&'a mut Node<K, V>> for MutTraversal<'a, K, V> {
- fn traverse(node: &'a mut Node<K, V>) -> MutTraversal<'a, K, V> {
- node.iter_mut()
- }
-}
+impl<'a, K, V> Iterator for Range<'a, K, V> {
+ type Item = (&'a K, &'a V);
-impl<K, V> Traverse<Node<K, V>> for MoveTraversal<K, V> {
- fn traverse(node: Node<K, V>) -> MoveTraversal<K, V> {
- node.into_iter()
+ fn next(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.front == self.back {
+ None
+ } else {
+ unsafe { Some(self.next_unchecked()) }
+ }
}
}
-/// Represents an operation to perform inside the following iterator methods.
-/// This is necessary to use in `next` because we want to modify `self.traversals` inside
-/// a match that borrows it. Similarly in `next_back`. Instead, we use this enum to note
-/// what we want to do, and do it after the match.
-enum StackOp<T> {
- Push(T),
- Pop,
-}
-impl<K, V, E, T> Iterator for AbsIter<T>
- where T: DoubleEndedIterator<Item = TraversalItem<K, V, E>> + Traverse<E>
-{
- type Item = (K, V);
+impl<'a, K, V> Range<'a, K, V> {
+ unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
+ let handle = self.front;
- // Our iterator represents a queue of all ancestors of elements we have
- // yet to yield, from smallest to largest. Note that the design of these
- // iterators permits an *arbitrary* initial pair of min and max, making
- // these arbitrary sub-range iterators.
- fn next(&mut self) -> Option<(K, V)> {
- loop {
- // We want the smallest element, so try to get the back of the queue
- let op = match self.traversals.back_mut() {
- None => return None,
- // The queue wasn't empty, so continue along the node in its head
- Some(iter) => {
- match iter.next() {
- // The head is empty, so Pop it off and continue the process
- None => Pop,
- // The head yielded an edge, so make that the new head
- Some(Edge(next)) => Push(Traverse::traverse(next)),
- // The head yielded an entry, so yield that
- Some(Elem(kv)) => {
- self.size -= 1;
- return Some(kv);
- }
- }
- }
- };
+ let mut cur_handle = match handle.right_kv() {
+ Ok(kv) => {
+ let ret = kv.into_kv();
+ self.front = kv.right_edge();
+ return ret;
+ },
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ unwrap_unchecked(next_level)
+ }
+ };
- // Handle any operation as necessary, without a conflicting borrow of the queue
- match op {
- Push(item) => {
- self.traversals.push_back(item);
- }
- Pop => {
- self.traversals.pop_back();
+ loop {
+ match cur_handle.right_kv() {
+ Ok(kv) => {
+ let ret = kv.into_kv();
+ self.front = first_leaf_edge(kv.right_edge().descend());
+ return ret;
+ },
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ cur_handle = unwrap_unchecked(next_level);
}
}
}
}
+}
- fn size_hint(&self) -> (usize, Option<usize>) {
- (self.size, Some(self.size))
+impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.front == self.back {
+ None
+ } else {
+ unsafe { Some(self.next_back_unchecked()) }
+ }
}
}
-impl<K, V, E, T> DoubleEndedIterator for AbsIter<T>
- where T: DoubleEndedIterator<Item = TraversalItem<K, V, E>> + Traverse<E>
-{
- // next_back is totally symmetric to next
- #[inline]
- fn next_back(&mut self) -> Option<(K, V)> {
- loop {
- let op = match self.traversals.front_mut() {
- None => return None,
- Some(iter) => {
- match iter.next_back() {
- None => Pop,
- Some(Edge(next)) => Push(Traverse::traverse(next)),
- Some(Elem(kv)) => {
- self.size -= 1;
- return Some(kv);
- }
- }
- }
- };
+impl<'a, K, V> Range<'a, K, V> {
+ unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
+ let handle = self.back;
- match op {
- Push(item) => {
- self.traversals.push_front(item);
- }
- Pop => {
- self.traversals.pop_front();
+ let mut cur_handle = match handle.left_kv() {
+ Ok(kv) => {
+ let ret = kv.into_kv();
+ self.back = kv.left_edge();
+ return ret;
+ },
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ unwrap_unchecked(next_level)
+ }
+ };
+
+ loop {
+ match cur_handle.left_kv() {
+ Ok(kv) => {
+ let ret = kv.into_kv();
+ self.back = last_leaf_edge(kv.left_edge().descend());
+ return ret;
+ },
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ cur_handle = unwrap_unchecked(next_level);
}
}
}
}
}
-impl<'a, K, V> Clone for Iter<'a, K, V> {
- fn clone(&self) -> Iter<'a, K, V> {
- Iter { inner: self.inner.clone() }
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> Iterator for Iter<'a, K, V> {
- type Item = (&'a K, &'a V);
-
- fn next(&mut self) -> Option<(&'a K, &'a V)> {
- self.inner.next()
- }
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.inner.size_hint()
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> DoubleEndedIterator for Iter<'a, K, V> {
- fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
- self.inner.next_back()
+impl<'a, K, V> Clone for Range<'a, K, V> {
+ fn clone(&self) -> Range<'a, K, V> {
+ Range {
+ front: self.front,
+ back: self.back
+ }
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> {}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> Iterator for IterMut<'a, K, V> {
+impl<'a, K, V> Iterator for RangeMut<'a, K, V> {
type Item = (&'a K, &'a mut V);
fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
- self.inner.next()
- }
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.inner.size_hint()
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> DoubleEndedIterator for IterMut<'a, K, V> {
- fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
- self.inner.next_back()
+ if self.front == self.back {
+ None
+ } else {
+ unsafe { Some (self.next_unchecked()) }
+ }
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> {}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K, V> Iterator for IntoIter<K, V> {
- type Item = (K, V);
+impl<'a, K, V> RangeMut<'a, K, V> {
+ unsafe fn next_unchecked(&mut self) -> (&'a K, &'a mut V) {
+ let handle = ptr::read(&self.front);
- fn next(&mut self) -> Option<(K, V)> {
- self.inner.next()
- }
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.inner.size_hint()
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
- fn next_back(&mut self) -> Option<(K, V)> {
- self.inner.next_back()
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<K, V> ExactSizeIterator for IntoIter<K, V> {}
+ let mut cur_handle = match handle.right_kv() {
+ Ok(kv) => {
+ let (k, v) = ptr::read(&kv).into_kv_mut();
+ self.front = kv.right_edge();
+ return (k, v);
+ },
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ unwrap_unchecked(next_level)
+ }
+ };
-impl<'a, K, V> Clone for Keys<'a, K, V> {
- fn clone(&self) -> Keys<'a, K, V> {
- Keys { inner: self.inner.clone() }
+ loop {
+ match cur_handle.right_kv() {
+ Ok(kv) => {
+ let (k, v) = ptr::read(&kv).into_kv_mut();
+ self.front = first_leaf_edge(kv.right_edge().descend());
+ return (k, v);
+ },
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ cur_handle = unwrap_unchecked(next_level);
+ }
+ }
+ }
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> Iterator for Keys<'a, K, V> {
- type Item = &'a K;
- fn next(&mut self) -> Option<(&'a K)> {
- self.inner.next()
- }
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.inner.size_hint()
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> {
- fn next_back(&mut self) -> Option<(&'a K)> {
- self.inner.next_back()
+impl<'a, K, V> DoubleEndedIterator for RangeMut<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.front == self.back {
+ None
+ } else {
+ unsafe { Some(self.next_back_unchecked()) }
+ }
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> {}
+impl<'a, K, V> RangeMut<'a, K, V> {
+ unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a mut V) {
+ let handle = ptr::read(&self.back);
-impl<'a, K, V> Clone for Values<'a, K, V> {
- fn clone(&self) -> Values<'a, K, V> {
- Values { inner: self.inner.clone() }
- }
-}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> Iterator for Values<'a, K, V> {
- type Item = &'a V;
+ let mut cur_handle = match handle.left_kv() {
+ Ok(kv) => {
+ let (k, v) = ptr::read(&kv).into_kv_mut();
+ self.back = kv.left_edge();
+ return (k, v);
+ },
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ unwrap_unchecked(next_level)
+ }
+ };
- fn next(&mut self) -> Option<(&'a V)> {
- self.inner.next()
- }
- fn size_hint(&self) -> (usize, Option<usize>) {
- self.inner.size_hint()
+ loop {
+ match cur_handle.left_kv() {
+ Ok(kv) => {
+ let (k, v) = ptr::read(&kv).into_kv_mut();
+ self.back = last_leaf_edge(kv.left_edge().descend());
+ return (k, v);
+ },
+ Err(last_edge) => {
+ let next_level = last_edge.into_node().ascend().ok();
+ cur_handle = unwrap_unchecked(next_level);
+ }
+ }
+ }
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> {
- fn next_back(&mut self) -> Option<(&'a V)> {
- self.inner.next_back()
+
+impl<K: Ord, V> FromIterator<(K, V)> for BTreeMap<K, V> {
+ fn from_iter<T: IntoIterator<Item=(K, V)>>(iter: T) -> BTreeMap<K, V> {
+ let mut map = BTreeMap::new();
+ map.extend(iter);
+ map
}
}
-#[stable(feature = "rust1", since = "1.0.0")]
-impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> {}
-impl<'a, K, V> Clone for Range<'a, K, V> {
- fn clone(&self) -> Range<'a, K, V> {
- Range { inner: self.inner.clone() }
+impl<K: Ord, V> Extend<(K, V)> for BTreeMap<K, V> {
+ #[inline]
+ fn extend<T: IntoIterator<Item=(K, V)>>(&mut self, iter: T) {
+ for (k, v) in iter {
+ self.insert(k, v);
+ }
}
}
-impl<'a, K, V> Iterator for Range<'a, K, V> {
- type Item = (&'a K, &'a V);
- fn next(&mut self) -> Option<(&'a K, &'a V)> {
- self.inner.next()
+impl<'a, K: Ord + Copy, V: Copy> Extend<(&'a K, &'a V)> for BTreeMap<K, V> {
+ fn extend<I: IntoIterator<Item=(&'a K, &'a V)>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
}
}
-impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
- fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
- self.inner.next_back()
+
+impl<K: Hash, V: Hash> Hash for BTreeMap<K, V> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ for elt in self {
+ elt.hash(state);
+ }
}
}
-impl<'a, K, V> Iterator for RangeMut<'a, K, V> {
- type Item = (&'a K, &'a mut V);
-
- fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
- self.inner.next()
+impl<K: Ord, V> Default for BTreeMap<K, V> {
+ fn default() -> BTreeMap<K, V> {
+ BTreeMap::new()
}
}
-impl<'a, K, V> DoubleEndedIterator for RangeMut<'a, K, V> {
- fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
- self.inner.next_back()
+
+impl<K: PartialEq, V: PartialEq> PartialEq for BTreeMap<K, V> {
+ fn eq(&self, other: &BTreeMap<K, V>) -> bool {
+ self.len() == other.len() &&
+ self.iter().zip(other).all(|(a, b)| a == b)
}
}
-impl<'a, K: Ord, V> Entry<'a, K, V> {
- #[stable(feature = "rust1", since = "1.0.0")]
- /// Ensures a value is in the entry by inserting the default if empty, and returns
- /// a mutable reference to the value in the entry.
- pub fn or_insert(self, default: V) -> &'a mut V {
- match self {
- Occupied(entry) => entry.into_mut(),
- Vacant(entry) => entry.insert(default),
- }
- }
+impl<K: Eq, V: Eq> Eq for BTreeMap<K, V> {}
- #[stable(feature = "rust1", since = "1.0.0")]
- /// Ensures a value is in the entry by inserting the result of the default function if empty,
- /// and returns a mutable reference to the value in the entry.
- pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
- match self {
- Occupied(entry) => entry.into_mut(),
- Vacant(entry) => entry.insert(default()),
- }
+impl<K: PartialOrd, V: PartialOrd> PartialOrd for BTreeMap<K, V> {
+ #[inline]
+ fn partial_cmp(&self, other: &BTreeMap<K, V>) -> Option<Ordering> {
+ self.iter().partial_cmp(other.iter())
}
}
-impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
- /// Sets the value of the entry with the VacantEntry's key,
- /// and returns a mutable reference to it.
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn insert(self, value: V) -> &'a mut V {
- self.stack.insert(self.key, value)
+impl<K: Ord, V: Ord> Ord for BTreeMap<K, V> {
+ #[inline]
+ fn cmp(&self, other: &BTreeMap<K, V>) -> Ordering {
+ self.iter().cmp(other.iter())
}
}
-impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
- /// Gets a reference to the value in the entry.
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn get(&self) -> &V {
- self.stack.peek()
+impl<K: Debug, V: Debug> Debug for BTreeMap<K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_map().entries(self.iter()).finish()
}
+}
- /// Gets a mutable reference to the value in the entry.
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn get_mut(&mut self) -> &mut V {
- self.stack.peek_mut()
- }
+impl<'a, K: Ord, Q: ?Sized, V> Index<&'a Q> for BTreeMap<K, V>
+ where K: Borrow<Q>, Q: Ord
+{
+ type Output = V;
- /// Converts the entry into a mutable reference to its value.
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn into_mut(self) -> &'a mut V {
- self.stack.into_top()
+ #[inline]
+ fn index(&self, key: &Q) -> &V {
+ self.get(key).expect("no entry found for key")
}
+}
- /// Sets the value of the entry with the OccupiedEntry's key,
- /// and returns the entry's old value.
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn insert(&mut self, mut value: V) -> V {
- mem::swap(self.stack.peek_mut(), &mut value);
- value
+fn first_leaf_edge<BorrowType, K, V>(
+ mut node: NodeRef<BorrowType,
+ K, V,
+ marker::LeafOrInternal>
+ ) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ loop {
+ match node.force() {
+ Leaf(leaf) => return leaf.first_edge(),
+ Internal(internal) => {
+ node = internal.first_edge().descend();
+ }
+ }
}
+}
- /// Takes the value of the entry out of the map, and returns it.
- #[stable(feature = "rust1", since = "1.0.0")]
- pub fn remove(self) -> V {
- self.stack.remove().1
+fn last_leaf_edge<BorrowType, K, V>(
+ mut node: NodeRef<BorrowType,
+ K, V,
+ marker::LeafOrInternal>
+ ) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ loop {
+ match node.force() {
+ Leaf(leaf) => return leaf.last_edge(),
+ Internal(internal) => {
+ node = internal.last_edge().descend();
+ }
+ }
}
}
+#[inline(always)]
+unsafe fn unwrap_unchecked<T>(val: Option<T>) -> T {
+ val.unwrap_or_else(|| {
+ if cfg!(debug_assertions) {
+ panic!("'unchecked' unwrap on None in BTreeMap");
+ } else {
+ intrinsics::unreachable();
+ }
+ })
+}
+
impl<K, V> BTreeMap<K, V> {
/// Gets an iterator over the entries of the map.
///
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<K, V> {
- let len = self.len();
- // NB. The initial capacity for ringbuf is large enough to avoid reallocs in many cases.
- let mut lca = VecDeque::new();
- lca.push_back(Traverse::traverse(&self.root));
Iter {
- inner: AbsIter {
- traversals: lca,
- size: len,
+ range: Range {
+ front: first_leaf_edge(self.root.as_ref()),
+ back: last_leaf_edge(self.root.as_ref())
},
+ length: self.length
}
}
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter_mut(&mut self) -> IterMut<K, V> {
- let len = self.len();
- let mut lca = VecDeque::new();
- lca.push_back(Traverse::traverse(&mut self.root));
+ let root1 = self.root.as_mut();
+ let root2 = unsafe { ptr::read(&root1) };
IterMut {
- inner: AbsIter {
- traversals: lca,
- size: len,
+ range: RangeMut {
+ front: first_leaf_edge(root1),
+ back: last_leaf_edge(root2),
},
+ length: self.length
}
}
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn keys<'a>(&'a self) -> Keys<'a, K, V> {
- fn first<A, B>((a, _): (A, B)) -> A {
- a
- }
- let first: fn((&'a K, &'a V)) -> &'a K = first; // coerce to fn pointer
-
- Keys { inner: self.iter().map(first) }
+ Keys { inner: self.iter() }
}
/// Gets an iterator over the values of the map.
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn values<'a>(&'a self) -> Values<'a, K, V> {
- fn second<A, B>((_, b): (A, B)) -> B {
- b
- }
- let second: fn((&'a K, &'a V)) -> &'a V = second; // coerce to fn pointer
-
- Values { inner: self.iter().map(second) }
+ Values { inner: self.iter() }
}
/// Returns the number of elements in the map.
}
}
-macro_rules! range_impl {
- ($root:expr, $min:expr, $max:expr, $as_slices_internal:ident, $iter:ident, $Range:ident,
- $edges:ident, [$($mutability:ident)*]) => (
- {
- // A deque that encodes two search paths containing (left-to-right):
- // a series of truncated-from-the-left iterators, the LCA's doubly-truncated iterator,
- // and a series of truncated-from-the-right iterators.
- let mut traversals = VecDeque::new();
- let (root, min, max) = ($root, $min, $max);
-
- let mut leftmost = None;
- let mut rightmost = None;
+impl<'a, K: Ord, V> Entry<'a, K, V> {
+ /// Ensures a value is in the entry by inserting the default if empty, and returns
+ /// a mutable reference to the value in the entry.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn or_insert(self, default: V) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => entry.insert(default),
+ }
+ }
- match (&min, &max) {
- (&Unbounded, &Unbounded) => {
- traversals.push_back(Traverse::traverse(root))
- }
- (&Unbounded, &Included(_)) | (&Unbounded, &Excluded(_)) => {
- rightmost = Some(root);
- }
- (&Included(_), &Unbounded) | (&Excluded(_), &Unbounded) => {
- leftmost = Some(root);
- }
- (&Included(min_key), &Included(max_key))
- | (&Included(min_key), &Excluded(max_key))
- | (&Excluded(min_key), &Included(max_key))
- | (&Excluded(min_key), &Excluded(max_key)) => {
- // lca represents the Lowest Common Ancestor, above which we never
- // walk, since everything else is outside the range to iterate.
- // ___________________
- // |__0_|_80_|_85_|_90_| (root)
- // | | | | |
- // |
- // v
- // ___________________
- // |__5_|_15_|_30_|_73_|
- // | | | | |
- // |
- // v
- // ___________________
- // |_33_|_58_|_63_|_68_| lca for the range [41, 65]
- // | |\___|___/| | iterator at traversals[2]
- // | |
- // | v
- // v rightmost
- // leftmost
- let mut is_leaf = root.is_leaf();
- let mut lca = root.$as_slices_internal();
- loop {
- let slice = lca.slice_from(min_key).slice_to(max_key);
- if let [ref $($mutability)* edge] = slice.edges {
- // Follow the only edge that leads the node that covers the range.
- is_leaf = edge.is_leaf();
- lca = edge.$as_slices_internal();
- } else {
- let mut iter = slice.$iter();
- if is_leaf {
- leftmost = None;
- rightmost = None;
- } else {
- // Only change the state of nodes with edges.
- leftmost = iter.next_edge_item();
- rightmost = iter.next_edge_item_back();
- }
- traversals.push_back(iter);
- break;
- }
- }
- }
- }
- // Keep narrowing the range by going down.
- // ___________________
- // |_38_|_43_|_48_|_53_|
- // | |____|____|____/ iterator at traversals[1]
- // |
- // v
- // ___________________
- // |_39_|_40_|_41_|_42_| (leaf, the last leftmost)
- // \_________| iterator at traversals[0]
- match min {
- Included(key) | Excluded(key) =>
- while let Some(left) = leftmost {
- let is_leaf = left.is_leaf();
- let mut iter = left.$as_slices_internal().slice_from(key).$iter();
- leftmost = if is_leaf {
- None
- } else {
- // Only change the state of nodes with edges.
- iter.next_edge_item()
- };
- traversals.push_back(iter);
- },
- _ => {}
- }
- // If the leftmost iterator starts with an element, then it was an exact match.
- if let (Excluded(_), Some(leftmost_iter)) = (min, traversals.back_mut()) {
- // Drop this excluded element. `next_kv_item` has no effect when
- // the next item is an edge.
- leftmost_iter.next_kv_item();
- }
+ /// Ensures a value is in the entry by inserting the result of the default function if empty,
+ /// and returns a mutable reference to the value in the entry.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => entry.insert(default()),
+ }
+ }
+}
- // The code for the right side is similar.
- match max {
- Included(key) | Excluded(key) =>
- while let Some(right) = rightmost {
- let is_leaf = right.is_leaf();
- let mut iter = right.$as_slices_internal().slice_to(key).$iter();
- rightmost = if is_leaf {
- None
- } else {
- iter.next_edge_item_back()
- };
- traversals.push_front(iter);
- },
- _ => {}
- }
- if let (Excluded(_), Some(rightmost_iter)) = (max, traversals.front_mut()) {
- rightmost_iter.next_kv_item_back();
+impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
+ /// Sets the value of the entry with the VacantEntry's key,
+ /// and returns a mutable reference to it.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(self, value: V) -> &'a mut V {
+ *self.length += 1;
+
+ let out_ptr;
+
+ let mut ins_k;
+ let mut ins_v;
+ let mut ins_edge;
+
+ let mut cur_parent = match self.handle.insert(self.key, value) {
+ (Fit(handle), _) => return handle.into_kv_mut().1,
+ (Split(left, k, v, right), ptr) => {
+ ins_k = k;
+ ins_v = v;
+ ins_edge = right;
+ out_ptr = ptr;
+ left.ascend().map_err(|n| n.into_root_mut())
}
+ };
- $Range {
- inner: AbsIter {
- traversals: traversals,
- size: usize::MAX, // unused
+ loop {
+ match cur_parent {
+ Ok(parent) => match parent.insert(ins_k, ins_v, ins_edge) {
+ Fit(_) => return unsafe { &mut *out_ptr },
+ Split(left, k, v, right) => {
+ ins_k = k;
+ ins_v = v;
+ ins_edge = right;
+ cur_parent = left.ascend().map_err(|n| n.into_root_mut());
+ }
+ },
+ Err(root) => {
+ root.push_level().push(ins_k, ins_v, ins_edge);
+ return unsafe { &mut *out_ptr };
}
}
}
- )
+ }
}
-impl<K: Ord, V> BTreeMap<K, V> {
- /// Constructs a double-ended iterator over a sub-range of elements in the map, starting
- /// at min, and ending at max. If min is `Unbounded`, then it will be treated as "negative
- /// infinity", and if max is `Unbounded`, then it will be treated as "positive infinity".
- /// Thus range(Unbounded, Unbounded) will yield the whole collection.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(btree_range, collections_bound)]
- ///
- /// use std::collections::BTreeMap;
- /// use std::collections::Bound::{Included, Unbounded};
- ///
- /// let mut map = BTreeMap::new();
- /// map.insert(3, "a");
- /// map.insert(5, "b");
- /// map.insert(8, "c");
- /// for (&key, &value) in map.range(Included(&4), Included(&8)) {
- /// println!("{}: {}", key, value);
- /// }
- /// assert_eq!(Some((&5, &"b")), map.range(Included(&4), Unbounded).next());
- /// ```
- #[unstable(feature = "btree_range",
- reason = "matches collection reform specification, waiting for dust to settle",
- issue = "27787")]
- pub fn range<Min: ?Sized + Ord, Max: ?Sized + Ord>(&self,
- min: Bound<&Min>,
- max: Bound<&Max>)
- -> Range<K, V>
- where K: Borrow<Min> + Borrow<Max>
- {
- range_impl!(&self.root,
- min,
- max,
- as_slices_internal,
- iter,
- Range,
- edges,
- [])
+impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
+ /// Gets a reference to the value in the entry.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get(&self) -> &V {
+ self.handle.reborrow().into_kv().1
}
- /// Constructs a mutable double-ended iterator over a sub-range of elements in the map, starting
- /// at min, and ending at max. If min is `Unbounded`, then it will be treated as "negative
- /// infinity", and if max is `Unbounded`, then it will be treated as "positive infinity".
- /// Thus range(Unbounded, Unbounded) will yield the whole collection.
- ///
- /// # Examples
- ///
- /// ```
- /// #![feature(btree_range, collections_bound)]
- ///
- /// use std::collections::BTreeMap;
- /// use std::collections::Bound::{Included, Excluded};
- ///
- /// let mut map: BTreeMap<&str, i32> = ["Alice", "Bob", "Carol", "Cheryl"].iter()
- /// .map(|&s| (s, 0))
- /// .collect();
- /// for (_, balance) in map.range_mut(Included("B"), Excluded("Cheryl")) {
- /// *balance += 100;
- /// }
- /// for (name, balance) in &map {
- /// println!("{} => {}", name, balance);
- /// }
- /// ```
- #[unstable(feature = "btree_range",
- reason = "matches collection reform specification, waiting for dust to settle",
- issue = "27787")]
- pub fn range_mut<Min: ?Sized + Ord, Max: ?Sized + Ord>(&mut self,
- min: Bound<&Min>,
- max: Bound<&Max>)
- -> RangeMut<K, V>
- where K: Borrow<Min> + Borrow<Max>
- {
- range_impl!(&mut self.root,
- min,
- max,
- as_slices_internal_mut,
- iter_mut,
- RangeMut,
- edges_mut,
- [mut])
+ /// Gets a mutable reference to the value in the entry.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get_mut(&mut self) -> &mut V {
+ self.handle.kv_mut().1
}
- /// Gets the given key's corresponding entry in the map for in-place manipulation.
- ///
- /// # Examples
- ///
- /// ```
- /// use std::collections::BTreeMap;
- ///
- /// let mut count: BTreeMap<&str, usize> = BTreeMap::new();
- ///
- /// // count the number of occurrences of letters in the vec
- /// for x in vec!["a","b","a","c","a","b"] {
- /// *count.entry(x).or_insert(0) += 1;
- /// }
- ///
- /// assert_eq!(count["a"], 3);
- /// ```
+ /// Converts the entry into a mutable reference to its value.
#[stable(feature = "rust1", since = "1.0.0")]
- pub fn entry(&mut self, mut key: K) -> Entry<K, V> {
- // same basic logic of `swap` and `pop`, blended together
- let mut stack = stack::PartialSearchStack::new(self);
- loop {
- let result = stack.with(move |pusher, node| {
- match Node::search(node, &key) {
- Found(handle) => {
- // Perfect match
- Finished(Occupied(OccupiedEntry { stack: pusher.seal(handle) }))
- }
- GoDown(handle) => {
- match handle.force() {
- Leaf(leaf_handle) => {
- Finished(Vacant(VacantEntry {
- stack: pusher.seal(leaf_handle),
- key: key,
- }))
- }
- Internal(internal_handle) => {
- Continue((pusher.push(internal_handle), key))
- }
- }
- }
- }
- });
- match result {
- Finished(finished) => return finished,
- Continue((new_stack, renewed_key)) => {
- stack = new_stack;
- key = renewed_key;
- }
- }
- }
+ pub fn into_mut(self) -> &'a mut V {
+ self.handle.into_kv_mut().1
}
-}
-impl<K, Q: ?Sized> super::Recover<Q> for BTreeMap<K, ()>
- where K: Borrow<Q> + Ord,
- Q: Ord
-{
- type Key = K;
+ /// Sets the value of the entry with the OccupiedEntry's key,
+ /// and returns the entry's old value.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(&mut self, value: V) -> V {
+ mem::replace(self.get_mut(), value)
+ }
- fn get(&self, key: &Q) -> Option<&K> {
- let mut cur_node = &self.root;
- loop {
- match Node::search(cur_node, key) {
- Found(handle) => return Some(handle.into_kv().0),
- GoDown(handle) => {
- match handle.force() {
- Leaf(_) => return None,
- Internal(internal_handle) => {
- cur_node = internal_handle.into_edge();
- continue;
- }
- }
- }
- }
- }
+ /// Takes the value of the entry out of the map, and returns it.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove(self) -> V {
+ self.remove_kv().1
}
- fn take(&mut self, key: &Q) -> Option<K> {
- // See `remove` for an explanation of this.
+ fn remove_kv(self) -> (K, V) {
+ *self.length -= 1;
- let mut stack = stack::PartialSearchStack::new(self);
- loop {
- let result = stack.with(move |pusher, node| {
- match Node::search(node, key) {
- Found(handle) => {
- // Perfect match. Terminate the stack here, and remove the entry
- Finished(Some(pusher.seal(handle).remove()))
- }
- GoDown(handle) => {
- // We need to keep searching, try to go down the next edge
- match handle.force() {
- // We're at a leaf; the key isn't in here
- Leaf(_) => Finished(None),
- Internal(internal_handle) => Continue(pusher.push(internal_handle)),
- }
- }
- }
- });
- match result {
- Finished(ret) => return ret.map(|(k, _)| k),
- Continue(new_stack) => stack = new_stack,
+ let (small_leaf, old_key, old_val) = match self.handle.force() {
+ Leaf(leaf) => {
+ let (hole, old_key, old_val) = leaf.remove();
+ (hole.into_node(), old_key, old_val)
+ },
+ Internal(mut internal) => {
+ let key_loc = internal.kv_mut().0 as *mut K;
+ let val_loc = internal.kv_mut().1 as *mut V;
+
+ let to_remove = first_leaf_edge(internal.right_edge().descend()).right_kv().ok();
+ let to_remove = unsafe { unwrap_unchecked(to_remove) };
+
+ let (hole, key, val) = to_remove.remove();
+
+ let old_key = unsafe {
+ mem::replace(&mut *key_loc, key)
+ };
+ let old_val = unsafe {
+ mem::replace(&mut *val_loc, val)
+ };
+
+ (hole.into_node(), old_key, old_val)
+ }
+ };
+
+ // Handle underflow
+ let mut cur_node = small_leaf.forget_type();
+ while cur_node.len() < node::CAPACITY / 2 {
+ match handle_underfull_node(cur_node) {
+ AtRoot => break,
+ EmptyParent(_) => unreachable!(),
+ Merged(parent) => if parent.len() == 0 {
+ // We must be at the root
+ parent.into_root_mut().pop_level();
+ break;
+ } else {
+ cur_node = parent.forget_type();
+ },
+ Stole(_) => break
}
}
+
+ (old_key, old_val)
}
+}
- fn replace(&mut self, mut key: K) -> Option<K> {
- // See `insert` for an explanation of this.
+enum UnderflowResult<'a, K, V> {
+ AtRoot,
+ EmptyParent(NodeRef<marker::Mut<'a>, K, V, marker::Internal>),
+ Merged(NodeRef<marker::Mut<'a>, K, V, marker::Internal>),
+ Stole(NodeRef<marker::Mut<'a>, K, V, marker::Internal>)
+}
- let mut stack = stack::PartialSearchStack::new(self);
+fn handle_underfull_node<'a, K, V>(node: NodeRef<marker::Mut<'a>,
+ K, V,
+ marker::LeafOrInternal>)
+ -> UnderflowResult<'a, K, V> {
+ let parent = if let Ok(parent) = node.ascend() {
+ parent
+ } else {
+ return AtRoot;
+ };
+
+ let (is_left, mut handle) = match parent.left_kv() {
+ Ok(left) => (true, left),
+ Err(parent) => match parent.right_kv() {
+ Ok(right) => (false, right),
+ Err(parent) => {
+ return EmptyParent(parent.into_node());
+ }
+ }
+ };
+
+ if handle.can_merge() {
+ return Merged(handle.merge().into_node());
+ } else {
+ unsafe {
+ let (k, v, edge) = if is_left {
+ handle.reborrow_mut().left_edge().descend().pop()
+ } else {
+ handle.reborrow_mut().right_edge().descend().pop_front()
+ };
- loop {
- let result = stack.with(move |pusher, node| {
- match Node::search::<K, _>(node, &key) {
- Found(mut handle) => {
- mem::swap(handle.key_mut(), &mut key);
- Finished(Some(key))
- }
- GoDown(handle) => {
- match handle.force() {
- Leaf(leaf_handle) => {
- pusher.seal(leaf_handle).insert(key, ());
- Finished(None)
- }
- Internal(internal_handle) => {
- Continue((pusher.push(internal_handle), key, ()))
- }
- }
- }
+ let k = mem::replace(handle.reborrow_mut().into_kv_mut().0, k);
+ let v = mem::replace(handle.reborrow_mut().into_kv_mut().1, v);
+
+ // FIXME: reuse cur_node?
+ if is_left {
+ match handle.reborrow_mut().right_edge().descend().force() {
+ Leaf(mut leaf) => leaf.push_front(k, v),
+ Internal(mut internal) => internal.push_front(k, v, edge.unwrap())
}
- });
- match result {
- Finished(ret) => return ret,
- Continue((new_stack, renewed_key, _)) => {
- stack = new_stack;
- key = renewed_key;
+ } else {
+ match handle.reborrow_mut().left_edge().descend().force() {
+ Leaf(mut leaf) => leaf.push(k, v),
+ Internal(mut internal) => internal.push(k, v, edge.unwrap())
}
}
}
+
+ return Stole(handle.into_node());
}
}
// except according to those terms.
mod node;
+mod search;
pub mod map;
pub mod set;
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// This module represents all the internal representation and logic for a B-Tree's node
-// with a safe interface, so that BTreeMap itself does not depend on any of these details.
-
-pub use self::InsertionResult::*;
-pub use self::SearchResult::*;
-pub use self::ForceResult::*;
-pub use self::TraversalItem::*;
+// This is an attempt at an implementation following the ideal
+//
+// ```
+// struct BTreeMap<K, V> {
+// height: usize,
+// root: Option<Box<Node<K, V, height>>>
+// }
+//
+// struct Node<K, V, height: usize> {
+// keys: [K; 2 * B - 1],
+// vals: [V; 2 * B - 1],
+// edges: if height > 0 {
+// [Box<Node<K, V, height - 1>>; 2 * B]
+// } else { () },
+// parent: *const Node<K, V, height + 1>,
+// parent_idx: u16,
+// len: u16,
+// }
+// ```
+//
+// Since Rust doesn't acutally have dependent types and polymorphic recursion,
+// we make do with lots of unsafety.
-use core::cmp::Ordering::{Greater, Less, Equal};
-use core::intrinsics::arith_offset;
-use core::iter::Zip;
-use core::marker::PhantomData;
-use core::ops::{Deref, DerefMut, Index, IndexMut};
-use core::ptr::Unique;
-use core::{slice, mem, ptr, cmp};
use alloc::heap;
-
-use borrow::Borrow;
-
-/// Represents the result of an Insertion: either the item fit, or the node had to split
-pub enum InsertionResult<K, V> {
- /// The inserted element fit
- Fit,
- /// The inserted element did not fit, so the node was split
- Split(K, V, Node<K, V>),
-}
-
-/// Represents the result of a search for a key in a single node
-pub enum SearchResult<NodeRef> {
- /// The element was found at the given index
- Found(Handle<NodeRef, handle::KV, handle::LeafOrInternal>),
- /// The element wasn't found, but if it's anywhere, it must be beyond this edge
- GoDown(Handle<NodeRef, handle::Edge, handle::LeafOrInternal>),
-}
-
-/// A B-Tree Node. We keep keys/edges/values separate to optimize searching for keys.
-#[unsafe_no_drop_flag]
-pub struct Node<K, V> {
- // To avoid the need for multiple allocations, we allocate a single buffer with enough space
- // for `capacity` keys, `capacity` values, and (in internal nodes) `capacity + 1` edges.
- // Despite this, we store three separate pointers to the three "chunks" of the buffer because
- // the performance drops significantly if the locations of the vals and edges need to be
- // recalculated upon access.
- //
- // These will never be null during normal usage of a `Node`. However, to avoid the need for a
- // drop flag, `Node::drop` zeroes `keys`, signaling that the `Node` has already been cleaned
- // up.
- keys: Unique<K>,
- vals: Unique<V>,
-
- // In leaf nodes, this will be None, and no space will be allocated for edges.
- edges: Option<Unique<Node<K, V>>>,
-
- // At any given time, there will be `_len` keys, `_len` values, and (in an internal node)
- // `_len + 1` edges. In a leaf node, there will never be any edges.
- //
- // Note: instead of accessing this field directly, please call the `len()` method, which should
- // be more stable in the face of representation changes.
- _len: usize,
-
- // FIXME(gereeter) It shouldn't be necessary to store the capacity in every node, as it should
- // be constant throughout the tree. Once a solution to this is found, it might be possible to
- // also pass down the offsets into the buffer that vals and edges are stored at, removing the
- // need for those two pointers.
- //
- // Note: instead of accessing this field directly, please call the `capacity()` method, which
- // should be more stable in the face of representation changes.
- _capacity: usize,
-}
-
-pub struct NodeSlice<'a, K: 'a, V: 'a> {
- keys: &'a [K],
- vals: &'a [V],
- pub edges: &'a [Node<K, V>],
- head_is_edge: bool,
- tail_is_edge: bool,
- has_edges: bool,
-}
-
-pub struct MutNodeSlice<'a, K: 'a, V: 'a> {
- keys: &'a [K],
- vals: &'a mut [V],
- pub edges: &'a mut [Node<K, V>],
- head_is_edge: bool,
- tail_is_edge: bool,
- has_edges: bool,
-}
-
-/// Rounds up to a multiple of a power of two. Returns the closest multiple
-/// of `target_alignment` that is higher or equal to `unrounded`.
-///
-/// # Panics
-///
-/// Fails if `target_alignment` is not a power of two.
-#[inline]
-fn round_up_to_next(unrounded: usize, target_alignment: usize) -> usize {
- assert!(target_alignment.is_power_of_two());
- (unrounded + target_alignment - 1) & !(target_alignment - 1)
-}
-
-#[test]
-fn test_rounding() {
- assert_eq!(round_up_to_next(0, 4), 0);
- assert_eq!(round_up_to_next(1, 4), 4);
- assert_eq!(round_up_to_next(2, 4), 4);
- assert_eq!(round_up_to_next(3, 4), 4);
- assert_eq!(round_up_to_next(4, 4), 4);
- assert_eq!(round_up_to_next(5, 4), 8);
-}
-
-// Returns a tuple of (val_offset, edge_offset),
-// from the start of a mallocated array.
-#[inline]
-fn calculate_offsets(keys_size: usize,
- vals_size: usize,
- vals_align: usize,
- edges_align: usize)
- -> (usize, usize) {
- let vals_offset = round_up_to_next(keys_size, vals_align);
- let end_of_vals = vals_offset + vals_size;
-
- let edges_offset = round_up_to_next(end_of_vals, edges_align);
-
- (vals_offset, edges_offset)
-}
-
-// Returns a tuple of (minimum required alignment, array_size),
-// from the start of a mallocated array.
-#[inline]
-fn calculate_allocation(keys_size: usize,
- keys_align: usize,
- vals_size: usize,
- vals_align: usize,
- edges_size: usize,
- edges_align: usize)
- -> (usize, usize) {
- let (_, edges_offset) = calculate_offsets(keys_size, vals_size, vals_align, edges_align);
- let end_of_edges = edges_offset + edges_size;
-
- let min_align = cmp::max(keys_align, cmp::max(vals_align, edges_align));
-
- (min_align, end_of_edges)
-}
-
-#[test]
-fn test_offset_calculation() {
- assert_eq!(calculate_allocation(128, 8, 15, 1, 4, 4), (8, 148));
- assert_eq!(calculate_allocation(3, 1, 2, 1, 1, 1), (1, 6));
- assert_eq!(calculate_allocation(6, 2, 12, 4, 24, 8), (8, 48));
- assert_eq!(calculate_offsets(128, 15, 1, 4), (128, 144));
- assert_eq!(calculate_offsets(3, 2, 1, 1), (3, 5));
- assert_eq!(calculate_offsets(6, 12, 4, 8), (8, 24));
+use core::marker::PhantomData;
+use core::mem;
+use core::nonzero::NonZero;
+use core::ptr::{self, Unique};
+use core::slice;
+
+use boxed::Box;
+
+const B: usize = 6;
+pub const CAPACITY: usize = 2 * B - 1;
+
+struct LeafNode<K, V> {
+ keys: [K; CAPACITY],
+ vals: [V; CAPACITY],
+ parent: *const InternalNode<K, V>,
+ parent_idx: u16,
+ len: u16,
}
-fn calculate_allocation_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
- let (keys_size, keys_align) = (capacity * mem::size_of::<K>(), mem::align_of::<K>());
- let (vals_size, vals_align) = (capacity * mem::size_of::<V>(), mem::align_of::<V>());
- let (edges_size, edges_align) = if is_leaf {
- // allocate one edge to ensure that we don't pass size 0 to `heap::allocate`
- if mem::size_of::<K>() == 0 && mem::size_of::<V>() == 0 {
- (1, mem::align_of::<Node<K, V>>())
- } else {
- (0, 1)
+impl<K, V> LeafNode<K, V> {
+ unsafe fn new() -> Self {
+ LeafNode {
+ keys: mem::uninitialized(),
+ vals: mem::uninitialized(),
+ parent: ptr::null(),
+ parent_idx: mem::uninitialized(),
+ len: 0
}
- } else {
- ((capacity + 1) * mem::size_of::<Node<K, V>>(),
- mem::align_of::<Node<K, V>>())
- };
-
- calculate_allocation(keys_size,
- keys_align,
- vals_size,
- vals_align,
- edges_size,
- edges_align)
-}
-
-fn calculate_offsets_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
- let keys_size = capacity * mem::size_of::<K>();
- let vals_size = capacity * mem::size_of::<V>();
- let vals_align = mem::align_of::<V>();
- let edges_align = if is_leaf {
- 1
- } else {
- mem::align_of::<Node<K, V>>()
- };
-
- calculate_offsets(keys_size, vals_size, vals_align, edges_align)
+ }
}
-/// An iterator over a slice that owns the elements of the slice but not the allocation.
-struct RawItems<T> {
- head: *const T,
- tail: *const T,
+// We use repr(C) so that a pointer to an internal node can be
+// directly used as a pointer to a leaf node
+#[repr(C)]
+struct InternalNode<K, V> {
+ data: LeafNode<K, V>,
+ edges: [BoxedNode<K, V>; 2 * B],
}
-impl<T> RawItems<T> {
- unsafe fn from_slice(slice: &[T]) -> RawItems<T> {
- RawItems::from_parts(slice.as_ptr(), slice.len())
- }
-
- unsafe fn from_parts(ptr: *const T, len: usize) -> RawItems<T> {
- if mem::size_of::<T>() == 0 {
- RawItems {
- head: ptr,
- tail: arith_offset(ptr as *const i8, len as isize) as *const T,
- }
- } else {
- RawItems {
- head: ptr,
- tail: ptr.offset(len as isize),
- }
- }
- }
-
- unsafe fn push(&mut self, val: T) {
- ptr::write(self.tail as *mut T, val);
-
- if mem::size_of::<T>() == 0 {
- self.tail = arith_offset(self.tail as *const i8, 1) as *const T;
- } else {
- self.tail = self.tail.offset(1);
+impl<K, V> InternalNode<K, V> {
+ unsafe fn new() -> Self {
+ InternalNode {
+ data: LeafNode::new(),
+ edges: mem::uninitialized()
}
}
}
-impl<T> Iterator for RawItems<T> {
- type Item = T;
-
- fn next(&mut self) -> Option<T> {
- if self.head == self.tail {
- None
- } else {
- unsafe {
- let ret = Some(ptr::read(self.head));
-
- if mem::size_of::<T>() == 0 {
- self.head = arith_offset(self.head as *const i8, 1) as *const T;
- } else {
- self.head = self.head.offset(1);
- }
+struct BoxedNode<K, V> {
+ ptr: Unique<LeafNode<K, V>> // we don't know if this points to a leaf node or an internal node
+}
- ret
- }
+impl<K, V> BoxedNode<K, V> {
+ fn from_leaf(node: Box<LeafNode<K, V>>) -> Self {
+ unsafe {
+ BoxedNode { ptr: Unique::new(Box::into_raw(node)) }
}
}
-}
-
-impl<T> DoubleEndedIterator for RawItems<T> {
- fn next_back(&mut self) -> Option<T> {
- if self.head == self.tail {
- None
- } else {
- unsafe {
- if mem::size_of::<T>() == 0 {
- self.tail = arith_offset(self.tail as *const i8, -1) as *const T;
- } else {
- self.tail = self.tail.offset(-1);
- }
- Some(ptr::read(self.tail))
- }
+ fn from_internal(node: Box<InternalNode<K, V>>) -> Self {
+ unsafe {
+ BoxedNode { ptr: Unique::new(Box::into_raw(node) as *mut LeafNode<K, V>) }
}
}
-}
-impl<T> Drop for RawItems<T> {
- #[unsafe_destructor_blind_to_params]
- fn drop(&mut self) {
- for _ in self {}
+ unsafe fn from_ptr(ptr: NonZero<*mut LeafNode<K, V>>) -> Self {
+ BoxedNode { ptr: Unique::new(*ptr) }
}
-}
-impl<K, V> Drop for Node<K, V> {
- #[unsafe_destructor_blind_to_params]
- fn drop(&mut self) {
- if self.keys.is_null() ||
- (unsafe { self.keys.get() as *const K as usize == mem::POST_DROP_USIZE }) {
- // Since we have #[unsafe_no_drop_flag], we have to watch
- // out for the sentinel value being stored in self.keys. (Using
- // null is technically a violation of the `Unique`
- // requirements, though.)
- return;
- }
-
- // Do the actual cleanup.
+ fn as_ptr(&self) -> NonZero<*mut LeafNode<K, V>> {
unsafe {
- drop(RawItems::from_slice(self.keys()));
- drop(RawItems::from_slice(self.vals()));
- drop(RawItems::from_slice(self.edges()));
-
- self.destroy();
+ NonZero::new(*self.ptr)
}
-
- self.keys = unsafe { Unique::new(ptr::null_mut()) };
}
}
-impl<K, V> Node<K, V> {
- /// Make a new internal node. The caller must initialize the result to fix the invariant that
- /// there are `len() + 1` edges.
- unsafe fn new_internal(capacity: usize) -> Node<K, V> {
- let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, false);
-
- let buffer = heap::allocate(size, alignment);
- if buffer.is_null() {
- ::alloc::oom();
- }
+/// An owned tree. Note that despite being owned, this does not have a destructor,
+/// and must be cleaned up manually.
+pub struct Root<K, V> {
+ node: BoxedNode<K, V>,
+ height: usize
+}
- let (vals_offset, edges_offset) = calculate_offsets_generic::<K, V>(capacity, false);
+unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> { }
+unsafe impl<K: Send, V: Send> Send for Root<K, V> { }
- Node {
- keys: Unique::new(buffer as *mut K),
- vals: Unique::new(buffer.offset(vals_offset as isize) as *mut V),
- edges: Some(Unique::new(buffer.offset(edges_offset as isize) as *mut Node<K, V>)),
- _len: 0,
- _capacity: capacity,
+impl<K, V> Root<K, V> {
+ pub fn new_leaf() -> Self {
+ Root {
+ node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })),
+ height: 0
}
}
- /// Make a new leaf node
- fn new_leaf(capacity: usize) -> Node<K, V> {
- let (alignment, size) = calculate_allocation_generic::<K, V>(capacity, true);
-
- let buffer = unsafe { heap::allocate(size, alignment) };
- if buffer.is_null() {
- ::alloc::oom();
- }
-
- let (vals_offset, _) = calculate_offsets_generic::<K, V>(capacity, true);
-
- Node {
- keys: unsafe { Unique::new(buffer as *mut K) },
- vals: unsafe { Unique::new(buffer.offset(vals_offset as isize) as *mut V) },
- edges: None,
- _len: 0,
- _capacity: capacity,
+ pub fn as_ref(&self)
+ -> NodeRef<marker::Immut, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: self as *const _ as *mut _,
+ _marker: PhantomData,
}
}
- unsafe fn destroy(&mut self) {
- let (alignment, size) = calculate_allocation_generic::<K, V>(self.capacity(),
- self.is_leaf());
- heap::deallocate(*self.keys as *mut u8, size, alignment);
- }
-
- #[inline]
- pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
- unsafe {
- (slice::from_raw_parts(*self.keys, self.len()),
- slice::from_raw_parts(*self.vals, self.len()))
+ pub fn as_mut(&mut self)
+ -> NodeRef<marker::Mut, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: self as *mut _,
+ _marker: PhantomData,
}
}
- #[inline]
- pub fn as_slices_mut<'a>(&'a mut self) -> (&'a mut [K], &'a mut [V]) {
- unsafe {
- (slice::from_raw_parts_mut(*self.keys, self.len()),
- slice::from_raw_parts_mut(*self.vals, self.len()))
+ pub fn into_ref(self)
+ -> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: ptr::null_mut(), // FIXME: Is there anything better to do here?
+ _marker: PhantomData,
}
}
- #[inline]
- pub fn as_slices_internal<'b>(&'b self) -> NodeSlice<'b, K, V> {
- let is_leaf = self.is_leaf();
- let (keys, vals) = self.as_slices();
- let edges: &[_] = if self.is_leaf() {
- &[]
- } else {
- unsafe {
- let data = match self.edges {
- None => heap::EMPTY as *const Node<K, V>,
- Some(ref p) => **p as *const Node<K, V>,
- };
- slice::from_raw_parts(data, self.len() + 1)
- }
- };
- NodeSlice {
- keys: keys,
- vals: vals,
- edges: edges,
- head_is_edge: true,
- tail_is_edge: true,
- has_edges: !is_leaf,
- }
- }
+ /// Add a new internal node with a single edge, pointing to the previous root, and make that
+ /// new node the root. This increases the height by 1 and is the opposite of `pop_level`.
+ pub fn push_level(&mut self)
+ -> NodeRef<marker::Mut, K, V, marker::Internal> {
+ let mut new_node = Box::new(unsafe { InternalNode::new() });
+ new_node.edges[0] = unsafe { BoxedNode::from_ptr(self.node.as_ptr()) };
- #[inline]
- pub fn as_slices_internal_mut<'b>(&'b mut self) -> MutNodeSlice<'b, K, V> {
- let len = self.len();
- let is_leaf = self.is_leaf();
- let keys = unsafe { slice::from_raw_parts_mut(*self.keys, len) };
- let vals = unsafe { slice::from_raw_parts_mut(*self.vals, len) };
- let edges: &mut [_] = if is_leaf {
- &mut []
- } else {
- unsafe {
- let data = match self.edges {
- None => heap::EMPTY as *mut Node<K, V>,
- Some(ref mut p) => **p as *mut Node<K, V>,
- };
- slice::from_raw_parts_mut(data, len + 1)
- }
- };
- MutNodeSlice {
- keys: keys,
- vals: vals,
- edges: edges,
- head_is_edge: true,
- tail_is_edge: true,
- has_edges: !is_leaf,
- }
- }
+ self.node = BoxedNode::from_internal(new_node);
+ self.height += 1;
- #[inline]
- pub fn keys<'a>(&'a self) -> &'a [K] {
- self.as_slices().0
- }
+ let mut ret = NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: self as *mut _,
+ _marker: PhantomData
+ };
- #[inline]
- pub fn keys_mut<'a>(&'a mut self) -> &'a mut [K] {
- self.as_slices_mut().0
- }
+ unsafe {
+ ret.reborrow_mut().first_edge().correct_parent_link();
+ }
- #[inline]
- pub fn vals<'a>(&'a self) -> &'a [V] {
- self.as_slices().1
+ ret
}
- #[inline]
- pub fn vals_mut<'a>(&'a mut self) -> &'a mut [V] {
- self.as_slices_mut().1
- }
+ ///Â Remove the root node, using its first child as the new root. This cannot be called when
+ /// the tree consists only of a leaf node. As it is intended only to be called when the root
+ /// has only one edge, no cleanup is done on any of the other children are elements of the root.
+ /// This decreases the height by 1 and is the opposite of `push_level`.
+ pub fn pop_level(&mut self) {
+ debug_assert!(self.height > 0);
- #[inline]
- pub fn edges<'a>(&'a self) -> &'a [Node<K, V>] {
- self.as_slices_internal().edges
- }
+ let top = *self.node.ptr as *mut u8;
- #[inline]
- pub fn edges_mut<'a>(&'a mut self) -> &'a mut [Node<K, V>] {
- self.as_slices_internal_mut().edges
- }
-}
-
-// FIXME(gereeter) Write an efficient clone_from
-impl<K: Clone, V: Clone> Clone for Node<K, V> {
- fn clone(&self) -> Node<K, V> {
- let mut ret = if self.is_leaf() {
- Node::new_leaf(self.capacity())
- } else {
- unsafe { Node::new_internal(self.capacity()) }
+ self.node = unsafe {
+ BoxedNode::from_ptr(self.as_mut()
+ .cast_unchecked::<marker::Internal>()
+ .first_edge()
+ .descend()
+ .node)
};
+ self.height -= 1;
+ self.as_mut().as_leaf_mut().parent = ptr::null();
unsafe {
- // For failure safety
- let mut keys = RawItems::from_parts(ret.keys().as_ptr(), 0);
- let mut vals = RawItems::from_parts(ret.vals().as_ptr(), 0);
- let mut edges = RawItems::from_parts(ret.edges().as_ptr(), 0);
-
- for key in self.keys() {
- keys.push(key.clone())
- }
- for val in self.vals() {
- vals.push(val.clone())
- }
- for edge in self.edges() {
- edges.push(edge.clone())
- }
-
- mem::forget(keys);
- mem::forget(vals);
- mem::forget(edges);
-
- ret._len = self.len();
+ heap::deallocate(
+ top,
+ mem::size_of::<InternalNode<K, V>>(),
+ mem::align_of::<InternalNode<K, V>>()
+ );
}
-
- ret
}
}
-/// A reference to something in the middle of a `Node`. There are two `Type`s of `Handle`s,
-/// namely `KV` handles, which point to key/value pairs, and `Edge` handles, which point to edges
-/// before or after key/value pairs. Methods are provided for removing pairs, inserting into edges,
-/// accessing the stored values, and moving around the `Node`.
-///
-/// This handle is generic, and can take any sort of reference to a `Node`. The reason for this is
-/// two-fold. First of all, it reduces the amount of repetitive code, implementing functions that
-/// don't need mutability on both mutable and immutable references. Secondly and more importantly,
-/// this allows users of the `Handle` API to associate metadata with the reference. This is used in
-/// `BTreeMap` to give `Node`s temporary "IDs" that persist to when the `Node` is used in a
-/// `Handle`.
+/// A reference to a node.
///
-/// # A note on safety
-///
-/// Unfortunately, the extra power afforded by being generic also means that safety can technically
-/// be broken. For sensible implementations of `Deref` and `DerefMut`, these handles are perfectly
-/// safe. As long as repeatedly calling `.deref()` results in the same Node being returned each
-/// time, everything should work fine. However, if the `Deref` implementation swaps in multiple
-/// different nodes, then the indices that are assumed to be in bounds suddenly stop being so. For
-/// example:
-///
-/// ```rust,ignore
-/// struct Nasty<'a> {
-/// first: &'a Node<usize, usize>,
-/// second: &'a Node<usize, usize>,
-/// flag: &'a Cell<bool>,
-/// }
-///
-/// impl<'a> Deref for Nasty<'a> {
-/// type Target = Node<usize, usize>;
-///
-/// fn deref(&self) -> &Node<usize, usize> {
-/// if self.flag.get() {
-/// &*self.second
-/// } else {
-/// &*self.first
-/// }
-/// }
-/// }
-///
-/// fn main() {
-/// let flag = Cell::new(false);
-/// let mut small_node = Node::make_leaf_root(3);
-/// let mut large_node = Node::make_leaf_root(100);
-///
-/// for i in 0..100 {
-/// // Insert to the end
-/// large_node.edge_handle(i).insert_as_leaf(i, i);
-/// }
-///
-/// let nasty = Nasty {
-/// first: &large_node,
-/// second: &small_node,
-/// flag: &flag
-/// }
-///
-/// // The handle points at index 75.
-/// let handle = Node::search(nasty, 75);
-///
-/// // Now the handle still points at index 75, but on the small node, which has no index 75.
-/// flag.set(true);
-///
-/// println!("Uninitialized memory: {:?}", handle.into_kv());
-/// }
-/// ```
-#[derive(Copy, Clone)]
-pub struct Handle<NodeRef, Type, NodeType> {
- node: NodeRef,
- index: usize,
- marker: PhantomData<(Type, NodeType)>,
+/// This type has a number of paramaters that controls how it acts:
+/// - `BorrowType`: This can be `Immut<'a>` or `Mut<'a>` for some `'a` or `Owned`.
+/// When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`,
+/// when this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
+/// and when this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`.
+/// - `K` and `V`: These control what types of things are stored in the nodes.
+/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
+/// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
+/// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
+/// `NodeRef` could be pointing to either type of node.
+pub struct NodeRef<BorrowType, K, V, Type> {
+ height: usize,
+ node: NonZero<*mut LeafNode<K, V>>,
+ root: *mut Root<K, V>,
+ _marker: PhantomData<(BorrowType, Type)>
}
-pub mod handle {
- // Handle types.
- pub enum KV {}
- pub enum Edge {}
-
- // Handle node types.
- pub enum LeafOrInternal {}
- pub enum Leaf {}
- pub enum Internal {}
+impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> { }
+impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
+ fn clone(&self) -> Self {
+ *self
+ }
}
-impl<K: Ord, V> Node<K, V> {
- /// Searches for the given key in the node. If it finds an exact match,
- /// `Found` will be yielded with the matching index. If it doesn't find an exact match,
- /// `GoDown` will be yielded with the index of the subtree the key must lie in.
- pub fn search<Q: ?Sized, NodeRef: Deref<Target = Node<K, V>>>(node: NodeRef,
- key: &Q)
- -> SearchResult<NodeRef>
- where K: Borrow<Q>,
- Q: Ord
- {
- // FIXME(Gankro): Tune when to search linear or binary based on B (and maybe K/V).
- // For the B configured as of this writing (B = 6), binary search was *significantly*
- // worse for usizes.
- match node.as_slices_internal().search_linear(key) {
- (index, true) => {
- Found(Handle {
- node: node,
- index: index,
- marker: PhantomData,
- })
- }
- (index, false) => {
- GoDown(Handle {
- node: node,
- index: index,
- marker: PhantomData,
- })
- }
+unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync
+ for NodeRef<BorrowType, K, V, Type> { }
+
+unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send
+ for NodeRef<marker::Immut<'a>, K, V, Type> { }
+unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send
+ for NodeRef<marker::Mut<'a>, K, V, Type> { }
+unsafe impl<K: Send, V: Send, Type> Send
+ for NodeRef<marker::Owned, K, V, Type> { }
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
+ fn as_internal(&self) -> &InternalNode<K, V> {
+ unsafe {
+ &*(*self.node as *const InternalNode<K, V>)
}
}
}
-// Public interface
-impl<K, V> Node<K, V> {
- /// Make a leaf root from scratch
- pub fn make_leaf_root(b: usize) -> Node<K, V> {
- Node::new_leaf(capacity_from_b(b))
- }
-
- /// Make an internal root and swap it with an old root
- pub fn make_internal_root(left_and_out: &mut Node<K, V>,
- b: usize,
- key: K,
- value: V,
- right: Node<K, V>) {
- let node = mem::replace(left_and_out,
- unsafe { Node::new_internal(capacity_from_b(b)) });
- left_and_out._len = 1;
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
unsafe {
- ptr::write(left_and_out.keys_mut().get_unchecked_mut(0), key);
- ptr::write(left_and_out.vals_mut().get_unchecked_mut(0), value);
- ptr::write(left_and_out.edges_mut().get_unchecked_mut(0), node);
- ptr::write(left_and_out.edges_mut().get_unchecked_mut(1), right);
+ &mut *(*self.node as *mut InternalNode<K, V>)
}
}
+}
- /// How many key-value pairs the node contains
- pub fn len(&self) -> usize {
- self._len
- }
- /// Does the node not contain any key-value pairs
- pub fn is_empty(&self) -> bool {
- self.len() == 0
+impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
+ pub fn len(&self) -> usize {
+ self.as_leaf().len as usize
}
- /// How many key-value pairs the node can fit
- pub fn capacity(&self) -> usize {
- self._capacity
+ pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData
+ }
}
- /// If the node has any children
- pub fn is_leaf(&self) -> bool {
- self.edges.is_none()
+ fn reborrow<'a>(&'a self) -> NodeRef<marker::Immut<'a>, K, V, Type> {
+ NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData
+ }
}
- /// if the node has too few elements
- pub fn is_underfull(&self) -> bool {
- self.len() < min_load_from_capacity(self.capacity())
+ fn as_leaf(&self) -> &LeafNode<K, V> {
+ unsafe {
+ &**self.node
+ }
}
- /// if the node cannot fit any more elements
- pub fn is_full(&self) -> bool {
- self.len() == self.capacity()
+ pub fn keys(&self) -> &[K] {
+ self.reborrow().into_slices().0
}
-}
-impl<K, V, NodeRef: Deref<Target = Node<K, V>>, Type, NodeType> Handle<NodeRef, Type, NodeType> {
- /// Returns a reference to the node that contains the pointed-to edge or key/value pair. This
- /// is very different from `edge` and `edge_mut` because those return children of the node
- /// returned by `node`.
- pub fn node(&self) -> &Node<K, V> {
- &*self.node
+ pub fn vals(&self) -> &[V] {
+ self.reborrow().into_slices().1
}
-}
-impl<K, V, NodeRef, Type, NodeType> Handle<NodeRef, Type, NodeType>
- where NodeRef: Deref<Target = Node<K, V>> + DerefMut
-{
- /// Converts a handle into one that stores the same information using a raw pointer. This can
- /// be useful in conjunction with `from_raw` when the type system is insufficient for
- /// determining the lifetimes of the nodes.
- pub fn as_raw(&mut self) -> Handle<*mut Node<K, V>, Type, NodeType> {
- Handle {
- node: &mut *self.node as *mut _,
- index: self.index,
- marker: PhantomData,
+ pub fn ascend(self) -> Result<
+ Handle<
+ NodeRef<
+ BorrowType,
+ K, V,
+ marker::Internal
+ >,
+ marker::Edge
+ >,
+ Self
+ > {
+ if self.as_leaf().parent.is_null() {
+ Err(self)
+ } else {
+ Ok(Handle {
+ node: NodeRef {
+ height: self.height + 1,
+ node: unsafe {
+ NonZero::new(self.as_leaf().parent as *mut LeafNode<K, V>)
+ },
+ root: self.root,
+ _marker: PhantomData
+ },
+ idx: self.as_leaf().parent_idx as usize,
+ _marker: PhantomData
+ })
}
}
-}
-impl<K, V, Type, NodeType> Handle<*mut Node<K, V>, Type, NodeType> {
- /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
- /// stored with a reference. This is an unsafe inverse of `as_raw`, and together they allow
- /// unsafely extending the lifetime of the reference to the `Node`.
- pub unsafe fn from_raw<'a>(&'a self) -> Handle<&'a Node<K, V>, Type, NodeType> {
- Handle {
- node: &*self.node,
- index: self.index,
- marker: PhantomData,
- }
+ pub fn first_edge(self) -> Handle<Self, marker::Edge> {
+ Handle::new_edge(self, 0)
}
- /// Converts from a handle stored with a raw pointer, which isn't directly usable, to a handle
- /// stored with a mutable reference. This is an unsafe inverse of `as_raw`, and together they
- /// allow unsafely extending the lifetime of the reference to the `Node`.
- pub unsafe fn from_raw_mut<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, Type, NodeType> {
- Handle {
- node: &mut *self.node,
- index: self.index,
- marker: PhantomData,
- }
+ pub fn last_edge(self) -> Handle<Self, marker::Edge> {
+ let len = self.len();
+ Handle::new_edge(self, len)
}
}
-impl<'a, K: 'a, V: 'a> Handle<&'a Node<K, V>, handle::Edge, handle::Internal> {
- /// Turns the handle into a reference to the edge it points at. This is necessary because the
- /// returned pointer has a larger lifetime than what would be returned by `edge` or `edge_mut`,
- /// making it more suitable for moving down a chain of nodes.
- pub fn into_edge(self) -> &'a Node<K, V> {
- unsafe { self.node.edges().get_unchecked(self.index) }
+impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
+ pub unsafe fn deallocate_and_ascend(self) -> Option<
+ Handle<
+ NodeRef<
+ marker::Owned,
+ K, V,
+ marker::Internal
+ >,
+ marker::Edge
+ >
+ > {
+ let ptr = self.as_leaf() as *const LeafNode<K, V> as *const u8 as *mut u8;
+ let ret = self.ascend().ok();
+ heap::deallocate(ptr, mem::size_of::<LeafNode<K, V>>(), mem::align_of::<LeafNode<K, V>>());
+ ret
}
}
-impl<'a, K: 'a, V: 'a> Handle<&'a mut Node<K, V>, handle::Edge, handle::Internal> {
- /// Turns the handle into a mutable reference to the edge it points at. This is necessary
- /// because the returned pointer has a larger lifetime than what would be returned by
- /// `edge_mut`, making it more suitable for moving down a chain of nodes.
- pub fn into_edge_mut(self) -> &'a mut Node<K, V> {
- unsafe { self.node.edges_mut().get_unchecked_mut(self.index) }
+impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
+ pub unsafe fn deallocate_and_ascend(self) -> Option<
+ Handle<
+ NodeRef<
+ marker::Owned,
+ K, V,
+ marker::Internal
+ >,
+ marker::Edge
+ >
+ > {
+ let ptr = self.as_internal() as *const InternalNode<K, V> as *const u8 as *mut u8;
+ let ret = self.ascend().ok();
+ heap::deallocate(
+ ptr,
+ mem::size_of::<InternalNode<K, V>>(),
+ mem::align_of::<InternalNode<K, V>>()
+ );
+ ret
}
}
-impl<K, V, NodeRef: Deref<Target = Node<K, V>>> Handle<NodeRef, handle::Edge, handle::Internal> {
- // This doesn't exist because there are no uses for it,
- // but is fine to add, analogous to edge_mut.
- //
- // /// Returns a reference to the edge pointed-to by this handle. This should not be
- // /// confused with `node`, which references the parent node of what is returned here.
- // pub fn edge(&self) -> &Node<K, V>
-}
-
-pub enum ForceResult<NodeRef, Type> {
- Leaf(Handle<NodeRef, Type, handle::Leaf>),
- Internal(Handle<NodeRef, Type, handle::Internal>),
-}
+impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+ unsafe fn cast_unchecked<NewType>(&mut self)
+ -> NodeRef<marker::Mut, K, V, NewType> {
-impl<K, V, NodeRef: Deref<Target = Node<K, V>>, Type>
- Handle<NodeRef, Type, handle::LeafOrInternal>
-{
- /// Figure out whether this handle is pointing to something in a leaf node or to something in
- /// an internal node, clarifying the type according to the result.
- pub fn force(self) -> ForceResult<NodeRef, Type> {
- if self.node.is_leaf() {
- Leaf(Handle {
- node: self.node,
- index: self.index,
- marker: PhantomData,
- })
- } else {
- Internal(Handle {
- node: self.node,
- index: self.index,
- marker: PhantomData,
- })
+ NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData
}
}
-}
-impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Leaf>
- where NodeRef: Deref<Target = Node<K, V>> + DerefMut
-{
- /// Tries to insert this key-value pair at the given index in this leaf node
- /// If the node is full, we have to split it.
- ///
- /// Returns a *mut V to the inserted value, because the caller may want this when
- /// they're done mutating the tree, but we don't want to borrow anything for now.
- pub fn insert_as_leaf(mut self, key: K, value: V) -> (InsertionResult<K, V>, *mut V) {
- if !self.node.is_full() {
- // The element can fit, just insert it
- (Fit, unsafe { self.node.insert_kv(self.index, key, value) as *mut _ })
- } else {
- // The element can't fit, this node is full. Split it into two nodes.
- let (new_key, new_val, mut new_right) = self.node.split();
- let left_len = self.node.len();
-
- let ptr = unsafe {
- if self.index <= left_len {
- self.node.insert_kv(self.index, key, value)
- } else {
- // We need to subtract 1 because in splitting we took out new_key and new_val.
- // Just being in the right node means we are past left_len k/v pairs in the
- // left node and 1 k/v pair in the parent node.
- new_right.insert_kv(self.index - left_len - 1, key, value)
- }
- } as *mut _;
-
- (Split(new_key, new_val, new_right), ptr)
- }
- }
-}
-
-impl<K, V, NodeRef> Handle<NodeRef, handle::Edge, handle::Internal>
- where NodeRef: Deref<Target = Node<K, V>> + DerefMut
-{
- /// Returns a mutable reference to the edge pointed-to by this handle. This should not be
- /// confused with `node`, which references the parent node of what is returned here.
- pub fn edge_mut(&mut self) -> &mut Node<K, V> {
- unsafe { self.node.edges_mut().get_unchecked_mut(self.index) }
- }
-
- /// Tries to insert this key-value pair at the given index in this internal node
- /// If the node is full, we have to split it.
- pub fn insert_as_internal(mut self,
- key: K,
- value: V,
- right: Node<K, V>)
- -> InsertionResult<K, V> {
- if !self.node.is_full() {
- // The element can fit, just insert it
- unsafe {
- self.node.insert_kv(self.index, key, value);
- self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
- }
- Fit
- } else {
- // The element can't fit, this node is full. Split it into two nodes.
- let (new_key, new_val, mut new_right) = self.node.split();
- let left_len = self.node.len();
- if self.index <= left_len {
- unsafe {
- self.node.insert_kv(self.index, key, value);
- self.node.insert_edge(self.index + 1, right); // +1 to insert to the right
- }
- } else {
- unsafe {
- // The -1 here is for the same reason as in insert_as_internal - because we
- // split, there are actually left_len + 1 k/v pairs before the right node, with
- // the extra 1 being put in the parent.
- new_right.insert_kv(self.index - left_len - 1, key, value);
- new_right.insert_edge(self.index - left_len, right);
- }
- }
-
- Split(new_key, new_val, new_right)
+ unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut, K, V, Type> {
+ NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData
}
}
- /// Handle an underflow in this node's child. We favor handling "to the left" because we know
- /// we're empty, but our neighbour can be full. Handling to the left means when we choose to
- /// steal, we pop off the end of our neighbour (always fast) and "unshift" ourselves
- /// (always slow, but at least faster since we know we're half-empty).
- /// Handling "to the right" reverses these roles. Of course, we merge whenever possible
- /// because we want dense nodes, and merging is about equal work regardless of direction.
- pub fn handle_underflow(mut self) {
+ fn as_leaf_mut(&mut self) -> &mut LeafNode<K, V> {
unsafe {
- if self.index > 0 {
- self.handle_underflow_to_left();
- } else {
- self.handle_underflow_to_right();
- }
+ &mut **self.node
}
}
- /// Right is underflowed. Tries to steal from left,
- /// but merges left and right if left is low too.
- unsafe fn handle_underflow_to_left(&mut self) {
- let left_len = self.node.edges()[self.index - 1].len();
- if left_len > min_load_from_capacity(self.node.capacity()) {
- self.left_kv().steal_rightward();
- } else {
- self.left_kv().merge_children();
- }
+ pub fn keys_mut(&mut self) -> &mut [K] {
+ unsafe { self.reborrow_mut().into_slices_mut().0 }
}
- /// Left is underflowed. Tries to steal from the right,
- /// but merges left and right if right is low too.
- unsafe fn handle_underflow_to_right(&mut self) {
- let right_len = self.node.edges()[self.index + 1].len();
- if right_len > min_load_from_capacity(self.node.capacity()) {
- self.right_kv().steal_leftward();
- } else {
- self.right_kv().merge_children();
- }
+ pub fn vals_mut(&mut self) -> &mut [V] {
+ unsafe { self.reborrow_mut().into_slices_mut().1 }
}
}
-impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::Edge, NodeType>
- where NodeRef: Deref<Target = Node<K, V>> + DerefMut
-{
- /// Gets the handle pointing to the key/value pair just to the left of the pointed-to edge.
- /// This is unsafe because the handle might point to the first edge in the node, which has no
- /// pair to its left.
- unsafe fn left_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
- Handle {
- node: &mut *self.node,
- index: self.index - 1,
- marker: PhantomData,
+impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
+ pub fn into_slices(self) -> (&'a [K], &'a [V]) {
+ unsafe {
+ (
+ slice::from_raw_parts(
+ self.as_leaf().keys.as_ptr(),
+ self.len()
+ ),
+ slice::from_raw_parts(
+ self.as_leaf().vals.as_ptr(),
+ self.len()
+ )
+ )
}
}
+}
- /// Gets the handle pointing to the key/value pair just to the right of the pointed-to edge.
- /// This is unsafe because the handle might point to the last edge in the node, which has no
- /// pair to its right.
- unsafe fn right_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
- Handle {
- node: &mut *self.node,
- index: self.index,
- marker: PhantomData,
+impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+ pub fn into_root_mut(self) -> &'a mut Root<K, V> {
+ unsafe {
+ &mut *self.root
}
}
-}
-impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a Node<K, V>, handle::KV, NodeType> {
- /// Turns the handle into references to the key and value it points at. This is necessary
- /// because the returned pointers have larger lifetimes than what would be returned by `key`
- /// or `val`.
- pub fn into_kv(self) -> (&'a K, &'a V) {
- let (keys, vals) = self.node.as_slices();
+ pub fn into_slices_mut(mut self) -> (&'a mut [K], &'a mut [V]) {
unsafe {
- (keys.get_unchecked(self.index),
- vals.get_unchecked(self.index))
+ (
+ slice::from_raw_parts_mut(
+ &mut self.as_leaf_mut().keys as *mut [K] as *mut K,
+ self.len()
+ ),
+ slice::from_raw_parts_mut(
+ &mut self.as_leaf_mut().vals as *mut [V] as *mut V,
+ self.len()
+ )
+ )
}
}
}
-impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
- /// Turns the handle into mutable references to the key and value it points at. This is
- /// necessary because the returned pointers have larger lifetimes than what would be returned
- /// by `key_mut` or `val_mut`.
- pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
- let (keys, vals) = self.node.as_slices_mut();
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
+ pub fn push(&mut self, key: K, val: V) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(self.len() < CAPACITY);
+
+ let idx = self.len();
+
unsafe {
- (keys.get_unchecked_mut(self.index),
- vals.get_unchecked_mut(self.index))
+ ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
+ ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
}
+
+ self.as_leaf_mut().len += 1;
}
- /// Convert this handle into one pointing at the edge immediately to the left of the key/value
- /// pair pointed-to by this handle. This is useful because it returns a reference with larger
- /// lifetime than `left_edge`.
- pub fn into_left_edge(self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
- Handle {
- node: &mut *self.node,
- index: self.index,
- marker: PhantomData,
+ pub fn push_front(&mut self, key: K, val: V) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(self.len() < CAPACITY);
+
+ unsafe {
+ slice_insert(self.keys_mut(), 0, key);
+ slice_insert(self.vals_mut(), 0, val);
}
+
+ self.as_leaf_mut().len += 1;
}
}
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(edge.height == self.height - 1);
+ debug_assert!(self.len() < CAPACITY);
-impl<'a, K: 'a, V: 'a, NodeRef: Deref<Target = Node<K, V>> + 'a, NodeType> Handle<NodeRef,
- handle::KV,
- NodeType> {
- // These are fine to include, but are currently unneeded.
- //
- // /// Returns a reference to the key pointed-to by this handle. This doesn't return a
- // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
- // /// handle.
- // pub fn key(&'a self) -> &'a K {
- // unsafe { self.node.keys().get_unchecked(self.index) }
- // }
- //
- // /// Returns a reference to the value pointed-to by this handle. This doesn't return a
- // /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
- // /// handle.
- // pub fn val(&'a self) -> &'a V {
- // unsafe { self.node.vals().get_unchecked(self.index) }
- // }
-}
+ let idx = self.len();
-impl<'a, K: 'a, V: 'a, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType>
- where NodeRef: 'a + Deref<Target = Node<K, V>> + DerefMut
-{
- /// Returns a mutable reference to the key pointed-to by this handle. This doesn't return a
- /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
- /// handle.
- pub fn key_mut(&'a mut self) -> &'a mut K {
- unsafe { self.node.keys_mut().get_unchecked_mut(self.index) }
- }
+ unsafe {
+ ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
+ ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
+ ptr::write(self.as_internal_mut().edges.get_unchecked_mut(idx + 1), edge.node);
- /// Returns a mutable reference to the value pointed-to by this handle. This doesn't return a
- /// reference with a lifetime as large as `into_kv_mut`, but it also does not consume the
- /// handle.
- pub fn val_mut(&'a mut self) -> &'a mut V {
- unsafe { self.node.vals_mut().get_unchecked_mut(self.index) }
- }
-}
+ self.as_leaf_mut().len += 1;
-impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType>
- where NodeRef: Deref<Target = Node<K, V>> + DerefMut
-{
- /// Gets the handle pointing to the edge immediately to the left of the key/value pair pointed
- /// to by this handle.
- pub fn left_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
- Handle {
- node: &mut *self.node,
- index: self.index,
- marker: PhantomData,
+ Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
}
}
- /// Gets the handle pointing to the edge immediately to the right of the key/value pair pointed
- /// to by this handle.
- pub fn right_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
- Handle {
- node: &mut *self.node,
- index: self.index + 1,
- marker: PhantomData,
+ pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(edge.height == self.height - 1);
+ debug_assert!(self.len() < CAPACITY);
+
+ unsafe {
+ slice_insert(self.keys_mut(), 0, key);
+ slice_insert(self.vals_mut(), 0, val);
+ slice_insert(
+ slice::from_raw_parts_mut(
+ self.as_internal_mut().edges.as_mut_ptr(),
+ self.len()+1
+ ),
+ 0,
+ edge.node
+ );
+
+ self.as_leaf_mut().len += 1;
+
+ for i in 0..self.len()+1 {
+ Handle::new_edge(self.reborrow_mut(), i).correct_parent_link();
+ }
}
- }
-}
-impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Leaf>
- where NodeRef: Deref<Target = Node<K, V>> + DerefMut
-{
- /// Removes the key/value pair at the handle's location.
- ///
- /// # Panics (in debug build)
- ///
- /// Panics if the node containing the pair is not a leaf node.
- pub fn remove_as_leaf(mut self) -> (K, V) {
- unsafe { self.node.remove_kv(self.index) }
}
}
-impl<K, V, NodeRef> Handle<NodeRef, handle::KV, handle::Internal>
- where NodeRef: Deref<Target = Node<K, V>> + DerefMut
-{
- /// Steal! Stealing is roughly analogous to a binary tree rotation.
- /// In this case, we're "rotating" right.
- unsafe fn steal_rightward(&mut self) {
- // Take the biggest stuff off left
- let (mut key, mut val, edge) = {
- let mut left_handle = self.left_edge();
- let left = left_handle.edge_mut();
- let (key, val) = left.pop_kv();
- let edge = if left.is_leaf() {
- None
- } else {
- Some(left.pop_edge())
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ pub fn pop(&mut self) -> (K, V, Option<Root<K, V>>) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(self.len() > 0);
+
+ let idx = self.len() - 1;
+
+ unsafe {
+ let key = ptr::read(self.keys().get_unchecked(idx));
+ let val = ptr::read(self.vals().get_unchecked(idx));
+ let edge = match self.reborrow_mut().force() {
+ ForceResult::Leaf(_) => None,
+ ForceResult::Internal(internal) => {
+ let edge = ptr::read(internal.as_internal().edges.get_unchecked(idx + 1));
+ let mut new_root = Root { node: edge, height: internal.height - 1 };
+ new_root.as_mut().as_leaf_mut().parent = ptr::null();
+ Some(new_root)
+ }
};
+ self.as_leaf_mut().len -= 1;
(key, val, edge)
- };
-
- // Swap the parent's separating key-value pair with left's
- mem::swap(&mut key, self.key_mut());
- mem::swap(&mut val, self.val_mut());
-
- // Put them at the start of right
- let mut right_handle = self.right_edge();
- let right = right_handle.edge_mut();
- right.insert_kv(0, key, val);
- match edge {
- Some(edge) => right.insert_edge(0, edge),
- None => {}
}
}
- /// Steal! Stealing is roughly analogous to a binary tree rotation.
- /// In this case, we're "rotating" left.
- unsafe fn steal_leftward(&mut self) {
- // Take the smallest stuff off right
- let (mut key, mut val, edge) = {
- let mut right_handle = self.right_edge();
- let right = right_handle.edge_mut();
- let (key, val) = right.remove_kv(0);
- let edge = if right.is_leaf() {
- None
- } else {
- Some(right.remove_edge(0))
- };
+ pub fn pop_front(&mut self) -> (K, V, Option<Root<K, V>>) {
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(self.len() > 0);
- (key, val, edge)
- };
+ let old_len = self.len();
- // Swap the parent's separating key-value pair with right's
- mem::swap(&mut key, self.key_mut());
- mem::swap(&mut val, self.val_mut());
-
- // Put them at the end of left
- let mut left_handle = self.left_edge();
- let left = left_handle.edge_mut();
- left.push_kv(key, val);
- match edge {
- Some(edge) => left.push_edge(edge),
- None => {}
- }
- }
+ unsafe {
+ let key = slice_remove(self.keys_mut(), 0);
+ let val = slice_remove(self.vals_mut(), 0);
+ let edge = match self.reborrow_mut().force() {
+ ForceResult::Leaf(_) => None,
+ ForceResult::Internal(mut internal) => {
+ let edge = slice_remove(
+ slice::from_raw_parts_mut(
+ internal.as_internal_mut().edges.as_mut_ptr(),
+ old_len+1
+ ),
+ 0
+ );
+
+ let mut new_root = Root { node: edge, height: internal.height - 1 };
+ new_root.as_mut().as_leaf_mut().parent = ptr::null();
+
+ for i in 0..old_len {
+ Handle::new_edge(internal.reborrow_mut(), i).correct_parent_link();
+ }
+
+ Some(new_root)
+ }
+ };
- /// Merge! Smooshes left and right into one node, along with the key-value
- /// pair that separated them in their parent.
- unsafe fn merge_children(mut self) {
- // Permanently remove right's index, and the key-value pair that separates
- // left and right
- let (key, val) = self.node.remove_kv(self.index);
- let right = self.node.remove_edge(self.index + 1);
+ self.as_leaf_mut().len -= 1;
- // Give left right's stuff.
- self.left_edge()
- .edge_mut()
- .absorb(key, val, right);
+ (key, val, edge)
+ }
}
}
-impl<K, V> Node<K, V> {
- /// Returns the mutable handle pointing to the key/value pair at a given index.
- ///
- /// # Panics (in debug build)
- ///
- /// Panics if the given index is out of bounds.
- pub fn kv_handle(&mut self,
- index: usize)
- -> Handle<&mut Node<K, V>, handle::KV, handle::LeafOrInternal> {
- // Necessary for correctness, but in a private module
- debug_assert!(index < self.len(), "kv_handle index out of bounds");
- Handle {
- node: self,
- index: index,
- marker: PhantomData,
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ pub fn force(self) -> ForceResult<
+ NodeRef<BorrowType, K, V, marker::Leaf>,
+ NodeRef<BorrowType, K, V, marker::Internal>
+ > {
+ if self.height == 0 {
+ ForceResult::Leaf(NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData
+ })
+ } else {
+ ForceResult::Internal(NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData
+ })
}
}
+}
- pub fn iter<'a>(&'a self) -> Traversal<'a, K, V> {
- self.as_slices_internal().iter()
- }
+pub struct Handle<Node, Type> {
+ node: Node,
+ idx: usize,
+ _marker: PhantomData<Type>
+}
- pub fn iter_mut<'a>(&'a mut self) -> MutTraversal<'a, K, V> {
- self.as_slices_internal_mut().iter_mut()
+impl<Node: Copy, Type> Copy for Handle<Node, Type> { }
+impl<Node: Copy, Type> Clone for Handle<Node, Type> {
+ fn clone(&self) -> Self {
+ *self
}
+}
- pub fn into_iter(self) -> MoveTraversal<K, V> {
- unsafe {
- let ret = MoveTraversal {
- inner: MoveTraversalImpl {
- keys: RawItems::from_slice(self.keys()),
- vals: RawItems::from_slice(self.vals()),
- edges: RawItems::from_slice(self.edges()),
-
- ptr: Unique::new(*self.keys as *mut u8),
- capacity: self.capacity(),
- is_leaf: self.is_leaf(),
- },
- head_is_edge: true,
- tail_is_edge: true,
- has_edges: !self.is_leaf(),
- };
- mem::forget(self);
- ret
- }
+impl<Node, Type> Handle<Node, Type> {
+ pub fn into_node(self) -> Node {
+ self.node
}
+}
- /// When a node has no keys or values and only a single edge, extract that edge.
- pub fn hoist_lone_child(&mut self) {
+impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
+ pub fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
// Necessary for correctness, but in a private module
- debug_assert!(self.is_empty());
- debug_assert!(!self.is_leaf());
+ debug_assert!(idx < node.len());
- unsafe {
- let ret = ptr::read(self.edges().get_unchecked(0));
- self.destroy();
- ptr::write(self, ret);
+ Handle {
+ node: node,
+ idx: idx,
+ _marker: PhantomData
}
}
-}
-
-// Vector functions (all unchecked)
-impl<K, V> Node<K, V> {
- // This must be followed by push_edge on an internal node.
- #[inline]
- unsafe fn push_kv(&mut self, key: K, val: V) {
- let len = self.len();
-
- ptr::write(self.keys_mut().get_unchecked_mut(len), key);
- ptr::write(self.vals_mut().get_unchecked_mut(len), val);
- self._len += 1;
+ pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ Handle::new_edge(self.node, self.idx)
}
- // This can only be called immediately after a call to push_kv.
- #[inline]
- unsafe fn push_edge(&mut self, edge: Node<K, V>) {
- let len = self.len();
-
- ptr::write(self.edges_mut().get_unchecked_mut(len), edge);
+ pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ Handle::new_edge(self.node, self.idx + 1)
}
+}
- // This must be followed by insert_edge on an internal node.
- #[inline]
- unsafe fn insert_kv(&mut self, index: usize, key: K, val: V) -> &mut V {
- ptr::copy(self.keys().as_ptr().offset(index as isize),
- self.keys_mut().as_mut_ptr().offset(index as isize + 1),
- self.len() - index);
- ptr::copy(self.vals().as_ptr().offset(index as isize),
- self.vals_mut().as_mut_ptr().offset(index as isize + 1),
- self.len() - index);
+impl<BorrowType, K, V, NodeType, HandleType> PartialEq
+ for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
- ptr::write(self.keys_mut().get_unchecked_mut(index), key);
- ptr::write(self.vals_mut().get_unchecked_mut(index), val);
+ fn eq(&self, other: &Self) -> bool {
+ self.node.node == other.node.node && self.idx == other.idx
+ }
+}
- self._len += 1;
+impl<BorrowType, K, V, NodeType, HandleType>
+ Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType> {
- self.vals_mut().get_unchecked_mut(index)
- }
+ pub fn reborrow(&self)
+ -> Handle<NodeRef<marker::Immut, K, V, NodeType>, HandleType> {
- // This can only be called immediately after a call to insert_kv.
- #[inline]
- unsafe fn insert_edge(&mut self, index: usize, edge: Node<K, V>) {
- ptr::copy(self.edges().as_ptr().offset(index as isize),
- self.edges_mut().as_mut_ptr().offset(index as isize + 1),
- self.len() - index);
- ptr::write(self.edges_mut().get_unchecked_mut(index), edge);
+ // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
+ Handle {
+ node: self.node.reborrow(),
+ idx: self.idx,
+ _marker: PhantomData
+ }
}
+}
- // This must be followed by pop_edge on an internal node.
- #[inline]
- unsafe fn pop_kv(&mut self) -> (K, V) {
- let key = ptr::read(self.keys().get_unchecked(self.len() - 1));
- let val = ptr::read(self.vals().get_unchecked(self.len() - 1));
+impl<'a, K, V, NodeType, HandleType>
+ Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
- self._len -= 1;
+ pub unsafe fn reborrow_mut(&mut self)
+ -> Handle<NodeRef<marker::Mut, K, V, NodeType>, HandleType> {
- (key, val)
+ // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
+ Handle {
+ node: self.node.reborrow_mut(),
+ idx: self.idx,
+ _marker: PhantomData
+ }
}
+}
- // This can only be called immediately after a call to pop_kv.
- #[inline]
- unsafe fn pop_edge(&mut self) -> Node<K, V> {
- let edge = ptr::read(self.edges().get_unchecked(self.len() + 1));
-
- edge
- }
+impl<BorrowType, K, V, NodeType>
+ Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
- // This must be followed by remove_edge on an internal node.
- #[inline]
- unsafe fn remove_kv(&mut self, index: usize) -> (K, V) {
- let key = ptr::read(self.keys().get_unchecked(index));
- let val = ptr::read(self.vals().get_unchecked(index));
+ pub fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
+ // Necessary for correctness, but in a private module
+ debug_assert!(idx <= node.len());
- ptr::copy(self.keys().as_ptr().offset(index as isize + 1),
- self.keys_mut().as_mut_ptr().offset(index as isize),
- self.len() - index - 1);
- ptr::copy(self.vals().as_ptr().offset(index as isize + 1),
- self.vals_mut().as_mut_ptr().offset(index as isize),
- self.len() - index - 1);
+ Handle {
+ node: node,
+ idx: idx,
+ _marker: PhantomData
+ }
+ }
- self._len -= 1;
+ pub fn left_kv(self)
+ -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
- (key, val)
+ if self.idx > 0 {
+ Ok(Handle::new_kv(self.node, self.idx - 1))
+ } else {
+ Err(self)
+ }
}
- // This can only be called immediately after a call to remove_kv.
- #[inline]
- unsafe fn remove_edge(&mut self, index: usize) -> Node<K, V> {
- let edge = ptr::read(self.edges().get_unchecked(index));
+ pub fn right_kv(self)
+ -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
- ptr::copy(self.edges().as_ptr().offset(index as isize + 1),
- self.edges_mut().as_mut_ptr().offset(index as isize),
- // index can be == len+1, so do the +1 first to avoid underflow.
- (self.len() + 1) - index);
-
- edge
+ if self.idx < self.node.len() {
+ Ok(Handle::new_kv(self.node, self.idx))
+ } else {
+ Err(self)
+ }
}
}
-// Private implementation details
-impl<K, V> Node<K, V> {
- /// Node is full, so split it into two nodes, and yield the middle-most key-value pair
- /// because we have one too many, and our parent now has one too few
- fn split(&mut self) -> (K, V, Node<K, V>) {
- // Necessary for correctness, but in a private function
- debug_assert!(!self.is_empty());
-
- let mut right = if self.is_leaf() {
- Node::new_leaf(self.capacity())
- } else {
- unsafe { Node::new_internal(self.capacity()) }
- };
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
+ fn insert_fit(&mut self, key: K, val: V) -> *mut V {
+ // Necessary for correctness, but in a private module
+ debug_assert!(self.node.len() < CAPACITY);
unsafe {
- right._len = self.len() / 2;
- let right_offset = self.len() - right.len();
- ptr::copy_nonoverlapping(self.keys().as_ptr().offset(right_offset as isize),
- right.keys_mut().as_mut_ptr(),
- right.len());
- ptr::copy_nonoverlapping(self.vals().as_ptr().offset(right_offset as isize),
- right.vals_mut().as_mut_ptr(),
- right.len());
- if !self.is_leaf() {
- ptr::copy_nonoverlapping(self.edges().as_ptr().offset(right_offset as isize),
- right.edges_mut().as_mut_ptr(),
- right.len() + 1);
- }
-
- let key = ptr::read(self.keys().get_unchecked(right_offset - 1));
- let val = ptr::read(self.vals().get_unchecked(right_offset - 1));
+ slice_insert(self.node.keys_mut(), self.idx, key);
+ slice_insert(self.node.vals_mut(), self.idx, val);
- self._len = right_offset - 1;
+ self.node.as_leaf_mut().len += 1;
- (key, val, right)
+ self.node.vals_mut().get_unchecked_mut(self.idx)
}
}
- /// Take all the values from right, separated by the given key and value
- fn absorb(&mut self, key: K, val: V, mut right: Node<K, V>) {
- // Necessary for correctness, but in a private function
- // Just as a sanity check, make sure we can fit this guy in
- debug_assert!(self.len() + right.len() <= self.capacity());
- debug_assert!(self.is_leaf() == right.is_leaf());
+ pub fn insert(mut self, key: K, val: V)
+ -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
- unsafe {
- let old_len = self.len();
- self._len += right.len() + 1;
-
- ptr::write(self.keys_mut().get_unchecked_mut(old_len), key);
- ptr::write(self.vals_mut().get_unchecked_mut(old_len), val);
-
- ptr::copy_nonoverlapping(right.keys().as_ptr(),
- self.keys_mut().as_mut_ptr().offset(old_len as isize + 1),
- right.len());
- ptr::copy_nonoverlapping(right.vals().as_ptr(),
- self.vals_mut().as_mut_ptr().offset(old_len as isize + 1),
- right.len());
- if !self.is_leaf() {
- ptr::copy_nonoverlapping(right.edges().as_ptr(),
- self.edges_mut()
- .as_mut_ptr()
- .offset(old_len as isize + 1),
- right.len() + 1);
- }
-
- right.destroy();
- mem::forget(right);
+ if self.node.len() < CAPACITY {
+ let ptr = self.insert_fit(key, val);
+ (InsertResult::Fit(Handle::new_kv(self.node, self.idx)), ptr)
+ } else {
+ let middle = Handle::new_kv(self.node, B);
+ let (mut left, k, v, mut right) = middle.split();
+ let ptr = if self.idx <= B {
+ unsafe {
+ Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val)
+ }
+ } else {
+ unsafe {
+ Handle::new_edge(
+ right.as_mut().cast_unchecked::<marker::Leaf>(),
+ self.idx - (B + 1)
+ ).insert_fit(key, val)
+ }
+ };
+ (InsertResult::Split(left, k, v, right), ptr)
}
}
}
-/// Get the capacity of a node from the order of the parent B-Tree
-fn capacity_from_b(b: usize) -> usize {
- 2 * b - 1
-}
-
-/// Get the minimum load of a node from its capacity
-fn min_load_from_capacity(cap: usize) -> usize {
- // B - 1
- cap / 2
-}
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+ fn correct_parent_link(mut self) {
+ let idx = self.idx as u16;
+ let ptr = self.node.as_internal_mut() as *mut _;
+ let mut child = self.descend();
+ child.as_leaf_mut().parent = ptr;
+ child.as_leaf_mut().parent_idx = idx;
+ }
-/// A trait for pairs of `Iterator`s, one over edges and the other over key/value pairs. This is
-/// necessary, as the `MoveTraversalImpl` needs to have a destructor that deallocates the `Node`,
-/// and a pair of `Iterator`s would require two independent destructors.
-pub trait TraversalImpl {
- type Item;
- type Edge;
+ unsafe fn cast_unchecked<NewType>(&mut self)
+ -> Handle<NodeRef<marker::Mut, K, V, NewType>, marker::Edge> {
- fn next_kv(&mut self) -> Option<Self::Item>;
- fn next_kv_back(&mut self) -> Option<Self::Item>;
+ Handle::new_edge(self.node.cast_unchecked(), self.idx)
+ }
- fn next_edge(&mut self) -> Option<Self::Edge>;
- fn next_edge_back(&mut self) -> Option<Self::Edge>;
-}
+ fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
+ // Necessary for correctness, but in an internal module
+ debug_assert!(self.node.len() < CAPACITY);
+ debug_assert!(edge.height == self.node.height - 1);
-/// A `TraversalImpl` that actually is backed by two iterators. This works in the non-moving case,
-/// as no deallocation needs to be done.
-#[derive(Clone)]
-pub struct ElemsAndEdges<Elems, Edges>(Elems, Edges);
+ unsafe {
+ self.cast_unchecked::<marker::Leaf>().insert_fit(key, val);
+
+ slice_insert(
+ slice::from_raw_parts_mut(
+ self.node.as_internal_mut().edges.as_mut_ptr(),
+ self.node.len()
+ ),
+ self.idx + 1,
+ edge.node
+ );
+
+ for i in (self.idx+1)..(self.node.len()+1) {
+ Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
+ }
+ }
+ }
-impl<K, V, E, Elems: DoubleEndedIterator, Edges: DoubleEndedIterator>
- TraversalImpl for ElemsAndEdges<Elems, Edges>
- where Elems : Iterator<Item=(K, V)>, Edges : Iterator<Item=E>
-{
- type Item = (K, V);
- type Edge = E;
+ pub fn insert(mut self, key: K, val: V, edge: Root<K, V>)
+ -> InsertResult<'a, K, V, marker::Internal> {
- fn next_kv(&mut self) -> Option<(K, V)> { self.0.next() }
- fn next_kv_back(&mut self) -> Option<(K, V)> { self.0.next_back() }
+ // Necessary for correctness, but this is an internal module
+ debug_assert!(edge.height == self.node.height - 1);
- fn next_edge(&mut self) -> Option<E> { self.1.next() }
- fn next_edge_back(&mut self) -> Option<E> { self.1.next_back() }
+ if self.node.len() < CAPACITY {
+ self.insert_fit(key, val, edge);
+ InsertResult::Fit(Handle::new_kv(self.node, self.idx))
+ } else {
+ let middle = Handle::new_kv(self.node, B);
+ let (mut left, k, v, mut right) = middle.split();
+ if self.idx <= B {
+ unsafe {
+ Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val, edge);
+ }
+ } else {
+ unsafe {
+ Handle::new_edge(
+ right.as_mut().cast_unchecked::<marker::Internal>(),
+ self.idx - (B + 1)
+ ).insert_fit(key, val, edge);
+ }
+ }
+ InsertResult::Split(left, k, v, right)
+ }
+ }
}
-/// A `TraversalImpl` taking a `Node` by value.
-pub struct MoveTraversalImpl<K, V> {
- keys: RawItems<K>,
- vals: RawItems<V>,
- edges: RawItems<Node<K, V>>,
+impl<BorrowType, K, V>
+ Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
- // For deallocation when we are done iterating.
- ptr: Unique<u8>,
- capacity: usize,
- is_leaf: bool,
+ pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.node.height - 1,
+ node: unsafe { self.node.as_internal().edges.get_unchecked(self.idx).as_ptr() },
+ root: self.node.root,
+ _marker: PhantomData
+ }
+ }
}
-unsafe impl<K: Sync, V: Sync> Sync for MoveTraversalImpl<K, V> {}
-unsafe impl<K: Send, V: Send> Send for MoveTraversalImpl<K, V> {}
-
-impl<K, V> TraversalImpl for MoveTraversalImpl<K, V> {
- type Item = (K, V);
- type Edge = Node<K, V>;
+impl<'a, K: 'a, V: 'a, NodeType>
+ Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
- fn next_kv(&mut self) -> Option<(K, V)> {
- match (self.keys.next(), self.vals.next()) {
- (Some(k), Some(v)) => Some((k, v)),
- _ => None,
+ pub fn into_kv(self) -> (&'a K, &'a V) {
+ let (keys, vals) = self.node.into_slices();
+ unsafe {
+ (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx))
}
}
+}
- fn next_kv_back(&mut self) -> Option<(K, V)> {
- match (self.keys.next_back(), self.vals.next_back()) {
- (Some(k), Some(v)) => Some((k, v)),
- _ => None,
- }
- }
+impl<'a, K: 'a, V: 'a, NodeType>
+ Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
- fn next_edge(&mut self) -> Option<Node<K, V>> {
- // Necessary for correctness, but in a private module
- debug_assert!(!self.is_leaf);
- self.edges.next()
+ pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
+ let (mut keys, mut vals) = self.node.into_slices_mut();
+ unsafe {
+ (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
+ }
}
+}
- fn next_edge_back(&mut self) -> Option<Node<K, V>> {
- // Necessary for correctness, but in a private module
- debug_assert!(!self.is_leaf);
- self.edges.next_back()
+impl<'a, K, V, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
+ pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
+ unsafe {
+ let (mut keys, mut vals) = self.node.reborrow_mut().into_slices_mut();
+ (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
+ }
}
}
-impl<K, V> Drop for MoveTraversalImpl<K, V> {
- #[unsafe_destructor_blind_to_params]
- fn drop(&mut self) {
- // We need to cleanup the stored values manually, as the RawItems destructor would run
- // after our deallocation.
- for _ in self.keys.by_ref() {}
- for _ in self.vals.by_ref() {}
- for _ in self.edges.by_ref() {}
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
+ pub fn split(mut self)
+ -> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
+ unsafe {
+ let mut new_node = Box::new(LeafNode::new());
+
+ let k = ptr::read(self.node.keys().get_unchecked(self.idx));
+ let v = ptr::read(self.node.vals().get_unchecked(self.idx));
+
+ let new_len = self.node.len() - self.idx - 1;
+
+ ptr::copy_nonoverlapping(
+ self.node.keys().as_ptr().offset(self.idx as isize + 1),
+ new_node.keys.as_mut_ptr(),
+ new_len
+ );
+ ptr::copy_nonoverlapping(
+ self.node.vals().as_ptr().offset(self.idx as isize + 1),
+ new_node.vals.as_mut_ptr(),
+ new_len
+ );
+
+ self.node.as_leaf_mut().len = self.idx as u16;
+ new_node.len = new_len as u16;
+
+ (
+ self.node,
+ k, v,
+ Root {
+ node: BoxedNode::from_leaf(new_node),
+ height: 0
+ }
+ )
+ }
+ }
- let (alignment, size) = calculate_allocation_generic::<K, V>(self.capacity, self.is_leaf);
- unsafe { heap::deallocate(*self.ptr, size, alignment) };
+ pub fn remove(mut self)
+ -> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>, K, V) {
+ unsafe {
+ let k = slice_remove(self.node.keys_mut(), self.idx);
+ let v = slice_remove(self.node.vals_mut(), self.idx);
+ self.node.as_leaf_mut().len -= 1;
+ (self.left_edge(), k, v)
+ }
}
}
-/// An abstraction over all the different kinds of traversals a node supports
-#[derive(Clone)]
-pub struct AbsTraversal<Impl> {
- inner: Impl,
- head_is_edge: bool,
- tail_is_edge: bool,
- has_edges: bool,
-}
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
+ pub fn split(mut self)
+ -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
+ unsafe {
+ let mut new_node = Box::new(InternalNode::new());
+
+ let k = ptr::read(self.node.keys().get_unchecked(self.idx));
+ let v = ptr::read(self.node.vals().get_unchecked(self.idx));
+
+ let height = self.node.height;
+ let new_len = self.node.len() - self.idx - 1;
+
+ ptr::copy_nonoverlapping(
+ self.node.keys().as_ptr().offset(self.idx as isize + 1),
+ new_node.data.keys.as_mut_ptr(),
+ new_len
+ );
+ ptr::copy_nonoverlapping(
+ self.node.vals().as_ptr().offset(self.idx as isize + 1),
+ new_node.data.vals.as_mut_ptr(),
+ new_len
+ );
+ ptr::copy_nonoverlapping(
+ self.node.as_internal().edges.as_ptr().offset(self.idx as isize + 1),
+ new_node.edges.as_mut_ptr(),
+ new_len + 1
+ );
+
+ self.node.as_leaf_mut().len = self.idx as u16;
+ new_node.data.len = new_len as u16;
+
+ let mut new_root = Root {
+ node: BoxedNode::from_internal(new_node),
+ height: height
+ };
-/// A single atomic step in a traversal.
-pub enum TraversalItem<K, V, E> {
- /// An element is visited. This isn't written as `Elem(K, V)` just because `opt.map(Elem)`
- /// requires the function to take a single argument. (Enum constructors are functions.)
- Elem((K, V)),
- /// An edge is followed.
- Edge(E),
-}
+ for i in 0..(new_len+1) {
+ Handle::new_edge(new_root.as_mut().cast_unchecked(), i).correct_parent_link();
+ }
-/// A traversal over a node's entries and edges
-pub type Traversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
- slice::Iter<'a, V>>,
- slice::Iter<'a, Node<K, V>>>>;
+ (
+ self.node,
+ k, v,
+ new_root
+ )
+ }
+ }
-/// A mutable traversal over a node's entries and edges
-pub type MutTraversal<'a, K, V> = AbsTraversal<ElemsAndEdges<Zip<slice::Iter<'a, K>,
- slice::IterMut<'a, V>>,
- slice::IterMut<'a, Node<K, V>>>>;
+ pub fn can_merge(&self) -> bool {
+ (
+ self.reborrow()
+ .left_edge()
+ .descend()
+ .len()
+ + self.reborrow()
+ .right_edge()
+ .descend()
+ .len()
+ + 1
+ ) <= CAPACITY
+ }
+
+ pub fn merge(mut self)
+ -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+ let self1 = unsafe { ptr::read(&self) };
+ let self2 = unsafe { ptr::read(&self) };
+ let mut left_node = self1.left_edge().descend();
+ let left_len = left_node.len();
+ let mut right_node = self2.right_edge().descend();
+ let right_len = right_node.len();
+
+ // necessary for correctness, but in a private module
+ debug_assert!(left_len + right_len + 1 <= CAPACITY);
-/// An owning traversal over a node's entries and edges
-pub type MoveTraversal<K, V> = AbsTraversal<MoveTraversalImpl<K, V>>;
+ unsafe {
+ ptr::write(left_node.keys_mut().get_unchecked_mut(left_len),
+ slice_remove(self.node.keys_mut(), self.idx));
+ ptr::copy_nonoverlapping(
+ right_node.keys().as_ptr(),
+ left_node.keys_mut().as_mut_ptr().offset(left_len as isize + 1),
+ right_len
+ );
+ ptr::write(left_node.vals_mut().get_unchecked_mut(left_len),
+ slice_remove(self.node.vals_mut(), self.idx));
+ ptr::copy_nonoverlapping(
+ right_node.vals().as_ptr(),
+ left_node.vals_mut().as_mut_ptr().offset(left_len as isize + 1),
+ right_len
+ );
+
+ slice_remove(&mut self.node.as_internal_mut().edges, self.idx + 1);
+ for i in self.idx+1..self.node.len() {
+ Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
+ }
+ self.node.as_leaf_mut().len -= 1;
+
+ if self.node.height > 1 {
+ ptr::copy_nonoverlapping(
+ right_node.cast_unchecked().as_internal().edges.as_ptr(),
+ left_node.cast_unchecked()
+ .as_internal_mut()
+ .edges
+ .as_mut_ptr()
+ .offset(left_len as isize + 1),
+ right_len + 1
+ );
+
+ for i in left_len+1..left_len+right_len+2 {
+ Handle::new_edge(
+ left_node.cast_unchecked().reborrow_mut(),
+ i
+ ).correct_parent_link();
+ }
+ heap::deallocate(
+ *right_node.node as *mut u8,
+ mem::size_of::<InternalNode<K, V>>(),
+ mem::align_of::<InternalNode<K, V>>()
+ );
+ } else {
+ heap::deallocate(
+ *right_node.node as *mut u8,
+ mem::size_of::<LeafNode<K, V>>(),
+ mem::align_of::<LeafNode<K, V>>()
+ );
+ }
-impl<K, V, E, Impl> Iterator for AbsTraversal<Impl>
- where Impl: TraversalImpl<Item = (K, V), Edge = E>
-{
- type Item = TraversalItem<K, V, E>;
+ left_node.as_leaf_mut().len += right_len as u16 + 1;
- fn next(&mut self) -> Option<TraversalItem<K, V, E>> {
- self.next_edge_item().map(Edge).or_else(|| self.next_kv_item().map(Elem))
+ Handle::new_edge(self.node, self.idx)
+ }
}
}
-impl<K, V, E, Impl> DoubleEndedIterator for AbsTraversal<Impl>
- where Impl: TraversalImpl<Item = (K, V), Edge = E>
-{
- fn next_back(&mut self) -> Option<TraversalItem<K, V, E>> {
- self.next_edge_item_back().map(Edge).or_else(|| self.next_kv_item_back().map(Elem))
+impl<BorrowType, K, V, HandleType>
+ Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType> {
+
+ pub fn force(self) -> ForceResult<
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
+ Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>
+ > {
+ match self.node.force() {
+ ForceResult::Leaf(node) => ForceResult::Leaf(Handle {
+ node: node,
+ idx: self.idx,
+ _marker: PhantomData
+ }),
+ ForceResult::Internal(node) => ForceResult::Internal(Handle {
+ node: node,
+ idx: self.idx,
+ _marker: PhantomData
+ })
+ }
}
}
-impl<K, V, E, Impl> AbsTraversal<Impl> where Impl: TraversalImpl<Item = (K, V), Edge = E> {
- /// Advances the iterator and returns the item if it's an edge. Returns None
- /// and does nothing if the first item is not an edge.
- pub fn next_edge_item(&mut self) -> Option<E> {
- // NB. `&& self.has_edges` might be redundant in this condition.
- let edge = if self.head_is_edge && self.has_edges {
- self.inner.next_edge()
- } else {
- None
- };
- self.head_is_edge = false;
- edge
- }
-
- /// Advances the iterator and returns the item if it's an edge. Returns None
- /// and does nothing if the last item is not an edge.
- pub fn next_edge_item_back(&mut self) -> Option<E> {
- let edge = if self.tail_is_edge && self.has_edges {
- self.inner.next_edge_back()
- } else {
- None
- };
- self.tail_is_edge = false;
- edge
- }
-
- /// Advances the iterator and returns the item if it's a key-value pair. Returns None
- /// and does nothing if the first item is not a key-value pair.
- pub fn next_kv_item(&mut self) -> Option<(K, V)> {
- if !self.head_is_edge {
- self.head_is_edge = true;
- self.inner.next_kv()
- } else {
- None
- }
- }
+pub enum ForceResult<Leaf, Internal> {
+ Leaf(Leaf),
+ Internal(Internal)
+}
- /// Advances the iterator and returns the item if it's a key-value pair. Returns None
- /// and does nothing if the last item is not a key-value pair.
- pub fn next_kv_item_back(&mut self) -> Option<(K, V)> {
- if !self.tail_is_edge {
- self.tail_is_edge = true;
- self.inner.next_kv_back()
- } else {
- None
- }
- }
+pub enum InsertResult<'a, K, V, Type> {
+ Fit(Handle<NodeRef<marker::Mut<'a>, K, V, Type>, marker::KV>),
+ Split(NodeRef<marker::Mut<'a>, K, V, Type>, K, V, Root<K, V>)
}
-macro_rules! node_slice_impl {
- ($NodeSlice:ident, $Traversal:ident,
- $as_slices_internal:ident, $index:ident, $iter:ident) => {
- impl<'a, K: Ord + 'a, V: 'a> $NodeSlice<'a, K, V> {
- /// Performs linear search in a slice. Returns a tuple of (index, is_exact_match).
- fn search_linear<Q: ?Sized>(&self, key: &Q) -> (usize, bool)
- where K: Borrow<Q>, Q: Ord {
- for (i, k) in self.keys.iter().enumerate() {
- match key.cmp(k.borrow()) {
- Greater => {},
- Equal => return (i, true),
- Less => return (i, false),
- }
- }
- (self.keys.len(), false)
- }
+pub mod marker {
+ use core::marker::PhantomData;
- /// Returns a sub-slice with elements starting with `min_key`.
- pub fn slice_from<Q: ?Sized + Ord>(self, min_key: &Q) -> $NodeSlice<'a, K, V> where
- K: Borrow<Q>,
- {
- // _______________
- // |_1_|_3_|_5_|_7_|
- // | | | | |
- // 0 0 1 1 2 2 3 3 4 index
- // | | | | |
- // \___|___|___|___/ slice_from(&0); pos = 0
- // \___|___|___/ slice_from(&2); pos = 1
- // |___|___|___/ slice_from(&3); pos = 1; result.head_is_edge = false
- // \___|___/ slice_from(&4); pos = 2
- // \___/ slice_from(&6); pos = 3
- // \|/ slice_from(&999); pos = 4
- let (pos, pos_is_kv) = self.search_linear(min_key);
- $NodeSlice {
- has_edges: self.has_edges,
- edges: if !self.has_edges {
- self.edges
- } else {
- self.edges.$index(pos ..)
- },
- keys: &self.keys[pos ..],
- vals: self.vals.$index(pos ..),
- head_is_edge: !pos_is_kv,
- tail_is_edge: self.tail_is_edge,
- }
- }
+ pub enum Leaf { }
+ pub enum Internal { }
+ pub enum LeafOrInternal { }
- /// Returns a sub-slice with elements up to and including `max_key`.
- pub fn slice_to<Q: ?Sized + Ord>(self, max_key: &Q) -> $NodeSlice<'a, K, V> where
- K: Borrow<Q>,
- {
- // _______________
- // |_1_|_3_|_5_|_7_|
- // | | | | |
- // 0 0 1 1 2 2 3 3 4 index
- // | | | | |
- //\|/ | | | | slice_to(&0); pos = 0
- // \___/ | | | slice_to(&2); pos = 1
- // \___|___| | | slice_to(&3); pos = 1; result.tail_is_edge = false
- // \___|___/ | | slice_to(&4); pos = 2
- // \___|___|___/ | slice_to(&6); pos = 3
- // \___|___|___|___/ slice_to(&999); pos = 4
- let (pos, pos_is_kv) = self.search_linear(max_key);
- let pos = pos + if pos_is_kv { 1 } else { 0 };
- $NodeSlice {
- has_edges: self.has_edges,
- edges: if !self.has_edges {
- self.edges
- } else {
- self.edges.$index(.. (pos + 1))
- },
- keys: &self.keys[..pos],
- vals: self.vals.$index(.. pos),
- head_is_edge: self.head_is_edge,
- tail_is_edge: !pos_is_kv,
- }
- }
- }
+ pub enum Owned { }
+ pub struct Immut<'a>(PhantomData<&'a ()>);
+ pub struct Mut<'a>(PhantomData<&'a mut ()>);
- impl<'a, K: 'a, V: 'a> $NodeSlice<'a, K, V> {
- /// Returns an iterator over key/value pairs and edges in a slice.
- #[inline]
- pub fn $iter(self) -> $Traversal<'a, K, V> {
- let mut edges = self.edges.$iter();
- // Skip edges at both ends, if excluded.
- if !self.head_is_edge { edges.next(); }
- if !self.tail_is_edge { edges.next_back(); }
- // The key iterator is always immutable.
- $Traversal {
- inner: ElemsAndEdges(
- self.keys.iter().zip(self.vals.$iter()),
- edges
- ),
- head_is_edge: self.head_is_edge,
- tail_is_edge: self.tail_is_edge,
- has_edges: self.has_edges,
- }
- }
- }
- }
+ pub enum KV { }
+ pub enum Edge { }
}
-node_slice_impl!(NodeSlice, Traversal, as_slices_internal, index, iter);
-node_slice_impl!(MutNodeSlice, MutTraversal, as_slices_internal_mut, index_mut, iter_mut);
+unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
+ ptr::copy(
+ slice.as_ptr().offset(idx as isize),
+ slice.as_mut_ptr().offset(idx as isize + 1),
+ slice.len() - idx
+ );
+ ptr::write(slice.get_unchecked_mut(idx), val);
+}
+
+unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
+ let ret = ptr::read(slice.get_unchecked(idx));
+ ptr::copy(
+ slice.as_ptr().offset(idx as isize + 1),
+ slice.as_mut_ptr().offset(idx as isize),
+ slice.len() - idx - 1
+ );
+ ret
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use core::cmp::Ordering;
+
+use borrow::Borrow;
+
+use super::node::{Handle, NodeRef, marker};
+
+use super::node::ForceResult::*;
+use self::SearchResult::*;
+
+pub enum SearchResult<BorrowType, K, V, FoundType, GoDownType> {
+ Found(Handle<NodeRef<BorrowType, K, V, FoundType>, marker::KV>),
+ GoDown(Handle<NodeRef<BorrowType, K, V, GoDownType>, marker::Edge>)
+}
+
+pub fn search_tree<BorrowType, K, V, Q: ?Sized>(
+ mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ key: &Q
+) -> SearchResult<BorrowType, K, V, marker::LeafOrInternal, marker::Leaf>
+ where Q: Ord, K: Borrow<Q> {
+
+ loop {
+ match search_node(node, key) {
+ Found(handle) => return Found(handle),
+ GoDown(handle) => match handle.force() {
+ Leaf(leaf) => return GoDown(leaf),
+ Internal(internal) => {
+ node = internal.descend();
+ continue;
+ }
+ }
+ }
+ }
+}
+
+pub fn search_node<BorrowType, K, V, Type, Q: ?Sized>(
+ node: NodeRef<BorrowType, K, V, Type>,
+ key: &Q
+) -> SearchResult<BorrowType, K, V, Type, Type>
+ where Q: Ord, K: Borrow<Q> {
+
+ match search_linear(&node, key) {
+ (idx, true) => Found(
+ Handle::new_kv(node, idx)
+ ),
+ (idx, false) => SearchResult::GoDown(
+ Handle::new_edge(node, idx)
+ )
+ }
+}
+
+fn search_linear<BorrowType, K, V, Type, Q: ?Sized>(
+ node: &NodeRef<BorrowType, K, V, Type>,
+ key: &Q
+) -> (usize, bool)
+ where Q: Ord, K: Borrow<Q> {
+
+ for (i, k) in node.keys().iter().enumerate() {
+ match key.cmp(k.borrow()) {
+ Ordering::Greater => {},
+ Ordering::Equal => return (i, true),
+ Ordering::Less => return (i, false)
+ }
+ }
+ (node.keys().len(), false)
+}
+
///
/// If the set did not have a value present, `true` is returned.
///
- /// If the set did have this key present, that value is returned, and the
+ /// If the set did have this key present, `false` is returned, and the
/// entry is not updated. See the [module-level documentation] for more.
///
/// [module-level documentation]: index.html#insert-and-complex-keys
#![feature(iter_arith)]
#![feature(iter_arith)]
#![feature(lang_items)]
+#![feature(nonzero)]
#![feature(num_bits_bytes)]
-#![feature(oom)]
#![feature(pattern)]
#![feature(shared)]
#![feature(slice_bytes)]
#![feature(unboxed_closures)]
#![feature(unicode)]
#![feature(unique)]
-#![feature(unsafe_no_drop_flag, filling_drop)]
+#![feature(unsafe_no_drop_flag)]
#![cfg_attr(test, feature(clone_from_slice, rand, test))]
#![no_std]
}
}
+#[test]
+fn test_clone() {
+ let mut map = BTreeMap::new();
+ let size = 100;
+ assert_eq!(map.len(), 0);
+
+ for i in 0..size {
+ assert_eq!(map.insert(i, 10*i), None);
+ assert_eq!(map.len(), i + 1);
+ assert_eq!(map, map.clone());
+ }
+
+ for i in 0..size {
+ assert_eq!(map.insert(i, 100*i), Some(10*i));
+ assert_eq!(map.len(), size);
+ assert_eq!(map, map.clone());
+ }
+
+ for i in 0..size/2 {
+ assert_eq!(map.remove(&(i*2)), Some(i*200));
+ assert_eq!(map.len(), size - i - 1);
+ assert_eq!(map, map.clone());
+ }
+
+ for i in 0..size/2 {
+ assert_eq!(map.remove(&(2*i)), None);
+ assert_eq!(map.remove(&(2*i+1)), Some(i*200 + 100));
+ assert_eq!(map.len(), size/2 - i - 1);
+ assert_eq!(map, map.clone());
+ }
+}
+
mod bench {
use std::collections::BTreeMap;
use std::__rand::{Rng, thread_rng};
}
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: Reflect + 'static> Any for T {
+impl<T: Reflect + 'static + ?Sized > Any for T {
fn get_type_id(&self) -> TypeId { TypeId::of::<T>() }
}
EscapeDefaultState::Done => (0, Some(0)),
}
}
+
+ fn count(self) -> usize {
+ match self.state {
+ EscapeDefaultState::Char(_) => 1,
+ EscapeDefaultState::Unicode(iter) => iter.count(),
+ EscapeDefaultState::Done => 0,
+ EscapeDefaultState::Backslash(_) => 2,
+ }
+ }
+
+ fn nth(&mut self, n: usize) -> Option<char> {
+ match self.state {
+ EscapeDefaultState::Backslash(c) if n == 0 => {
+ self.state = EscapeDefaultState::Char(c);
+ Some('\\')
+ },
+ EscapeDefaultState::Backslash(c) if n == 1 => {
+ self.state = EscapeDefaultState::Done;
+ Some(c)
+ },
+ EscapeDefaultState::Backslash(_) => {
+ self.state = EscapeDefaultState::Done;
+ None
+ },
+ EscapeDefaultState::Char(c) => {
+ self.state = EscapeDefaultState::Done;
+
+ if n == 0 {
+ Some(c)
+ } else {
+ None
+ }
+ },
+ EscapeDefaultState::Done => return None,
+ EscapeDefaultState::Unicode(ref mut i) => return i.nth(n),
+ }
+ }
+
+ fn last(self) -> Option<char> {
+ match self.state {
+ EscapeDefaultState::Unicode(iter) => iter.last(),
+ EscapeDefaultState::Done => None,
+ EscapeDefaultState::Backslash(c) | EscapeDefaultState::Char(c) => Some(c),
+ }
+ }
}
assert!(!test.is::<[usize; 10]>());
}
+#[test]
+fn any_unsized() {
+ fn is_any<T: Any + ?Sized>() {}
+ is_any::<[i32]>();
+}
#[bench]
fn bench_downcast_ref(b: &mut Bencher) {
check(&[0xD800, 0x41, 0x42], &[Err(0xD800), Ok('A'), Ok('B')]);
check(&[0xD800, 0], &[Err(0xD800), Ok('\0')]);
}
+
+#[test]
+fn ed_iterator_specializations() {
+ // Check counting
+ assert_eq!('\n'.escape_default().count(), 2);
+ assert_eq!('c'.escape_default().count(), 1);
+ assert_eq!(' '.escape_default().count(), 1);
+ assert_eq!('\\'.escape_default().count(), 2);
+ assert_eq!('\''.escape_default().count(), 2);
+
+ // Check nth
+
+ // Check that OoB is handled correctly
+ assert_eq!('\n'.escape_default().nth(2), None);
+ assert_eq!('c'.escape_default().nth(1), None);
+ assert_eq!(' '.escape_default().nth(1), None);
+ assert_eq!('\\'.escape_default().nth(2), None);
+ assert_eq!('\''.escape_default().nth(2), None);
+
+ // Check the first char
+ assert_eq!('\n'.escape_default().nth(0), Some('\\'));
+ assert_eq!('c'.escape_default().nth(0), Some('c'));
+ assert_eq!(' '.escape_default().nth(0), Some(' '));
+ assert_eq!('\\'.escape_default().nth(0), Some('\\'));
+ assert_eq!('\''.escape_default().nth(0), Some('\\'));
+
+ // Check the second char
+ assert_eq!('\n'.escape_default().nth(1), Some('n'));
+ assert_eq!('\\'.escape_default().nth(1), Some('\\'));
+ assert_eq!('\''.escape_default().nth(1), Some('\''));
+
+ // Check the last char
+ assert_eq!('\n'.escape_default().last(), Some('n'));
+ assert_eq!('c'.escape_default().last(), Some('c'));
+ assert_eq!(' '.escape_default().last(), Some(' '));
+ assert_eq!('\\'.escape_default().last(), Some('\\'));
+ assert_eq!('\''.escape_default().last(), Some('\''));
+}
+
+
-Subproject commit 95d6a00134f284e6b889d98f4c2cb4b285950327
+Subproject commit 9aa6600bd8f4e4f370a7d2fb76c4b3efc669cadf
you no longer need to derive from `Super<Self>`.
"####,
+E0072: r##"
+When defining a recursive struct or enum, any use of the type being defined
+from inside the definition must occur behind a pointer (like `Box` or `&`).
+This is because structs and enums must have a well-defined size, and without
+the pointer the size of the type would need to be unbounded.
+
+Consider the following erroneous definition of a type for a list of bytes:
+
+```
+// error, invalid recursive struct type
+struct ListNode {
+ head: u8,
+ tail: Option<ListNode>,
+}
+```
+
+This type cannot have a well-defined size, because it needs to be arbitrarily
+large (since we would be able to nest `ListNode`s to any depth). Specifically,
+
+```plain
+size of `ListNode` = 1 byte for `head`
+ + 1 byte for the discriminant of the `Option`
+ + size of `ListNode`
+```
+
+One way to fix this is by wrapping `ListNode` in a `Box`, like so:
+
+```
+struct ListNode {
+ head: u8,
+ tail: Option<Box<ListNode>>,
+}
+```
+
+This works because `Box` is a pointer, so its size is well-known.
+"##,
+
E0109: r##"
You tried to give a type parameter to a type which doesn't need it. Erroneous
code example:
None => self.tcx.empty_parameter_environment()
};
- let infcx = infer::new_infer_ctxt(self.tcx, &self.tcx.tables, Some(param_env), false);
+ let infcx = infer::new_infer_ctxt(self.tcx, &self.tcx.tables, Some(param_env));
f(&mut euv::ExprUseVisitor::new(self, &infcx))
}
fn check_static_type(&self, e: &hir::Expr) {
let ty = self.tcx.node_id_to_type(e.id);
- let infcx = infer::new_infer_ctxt(self.tcx, &self.tcx.tables, None, false);
+ let infcx = infer::new_infer_ctxt(self.tcx, &self.tcx.tables, None);
let cause = traits::ObligationCause::new(e.span, e.id, traits::SharedStatic);
let mut fulfill_cx = infcx.fulfillment_cx.borrow_mut();
fulfill_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause);
//FIXME: (@jroesch) this code should be floated up as well
let infcx = infer::new_infer_ctxt(cx.tcx,
&cx.tcx.tables,
- Some(cx.param_env.clone()),
- false);
+ Some(cx.param_env.clone()));
if infcx.type_moves_by_default(pat_ty, pat.span) {
check_move(p, sub.as_ref().map(|p| &**p));
}
let infcx = infer::new_infer_ctxt(cx.tcx,
&cx.tcx.tables,
- Some(checker.cx.param_env.clone()),
- false);
+ Some(checker.cx.param_env.clone()));
let mut visitor = ExprUseVisitor::new(&mut checker, &infcx);
visitor.walk_expr(guard);
let param_env = ParameterEnvironment::for_item(self.tcx, fn_id);
let infcx = infer::new_infer_ctxt(self.tcx,
&self.tcx.tables,
- Some(param_env.clone()),
- false);
+ Some(param_env.clone()));
let mut delegate = RvalueContextDelegate { tcx: self.tcx, param_env: ¶m_env };
let mut euv = euv::ExprUseVisitor::new(&mut delegate, &infcx);
euv.walk_fn(fd, b);
use std::{i8, i16, i32, i64, u8, u16, u32, u64};
use std::rc::Rc;
-fn lookup_const<'a>(tcx: &'a ty::ctxt, e: &Expr) -> Option<&'a Expr> {
- let opt_def = tcx.def_map.borrow().get(&e.id).map(|d| d.full_def());
- match opt_def {
- Some(def::DefConst(def_id)) |
- Some(def::DefAssociatedConst(def_id)) => {
- lookup_const_by_id(tcx, def_id, Some(e.id), None)
- }
- Some(def::DefVariant(enum_def, variant_def, _)) => {
- lookup_variant_by_id(tcx, enum_def, variant_def)
- }
- _ => None
- }
-}
-
fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt,
enum_def: DefId,
variant_def: DefId)
hir::PatStruct(path.clone(), hir::HirVec::new(), false),
Some(def::DefVariant(..)) =>
hir::PatEnum(path.clone(), None),
- _ => {
- match lookup_const(tcx, expr) {
- Some(actual) => return const_expr_to_pat(tcx, actual, span),
- _ => unreachable!()
- }
- }
+ Some(def::DefConst(def_id)) |
+ Some(def::DefAssociatedConst(def_id)) => {
+ let expr = lookup_const_by_id(tcx, def_id, Some(expr.id), None).unwrap();
+ return const_expr_to_pat(tcx, expr, span);
+ },
+ _ => unreachable!(),
}
}
substs: trait_substs });
tcx.populate_implementations_for_trait_if_necessary(trait_ref.def_id());
- let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, false);
+ let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None);
let mut selcx = traits::SelectionContext::new(&infcx);
let obligation = traits::Obligation::new(traits::ObligationCause::dummy(),
}
}
-/// errors_will_be_reported is required to proxy to the fulfillment context
-/// FIXME -- a better option would be to hold back on modifying
-/// the global cache until we know that all dependent obligations
-/// are also satisfied. In that case, we could actually remove
-/// this boolean flag, and we'd also avoid the problem of squelching
-/// duplicate errors that occur across fns.
pub fn new_infer_ctxt<'a, 'tcx>(tcx: &'a ty::ctxt<'tcx>,
tables: &'a RefCell<ty::Tables<'tcx>>,
- param_env: Option<ty::ParameterEnvironment<'a, 'tcx>>,
- errors_will_be_reported: bool)
+ param_env: Option<ty::ParameterEnvironment<'a, 'tcx>>)
-> InferCtxt<'a, 'tcx> {
InferCtxt {
tcx: tcx,
float_unification_table: RefCell::new(UnificationTable::new()),
region_vars: RegionVarBindings::new(tcx),
parameter_environment: param_env.unwrap_or(tcx.empty_parameter_environment()),
- fulfillment_cx: RefCell::new(traits::FulfillmentContext::new(errors_will_be_reported)),
+ fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()),
reported_trait_errors: RefCell::new(FnvHashSet()),
normalize: false,
err_count_on_creation: tcx.sess.err_count()
pub fn normalizing_infer_ctxt<'a, 'tcx>(tcx: &'a ty::ctxt<'tcx>,
tables: &'a RefCell<ty::Tables<'tcx>>)
-> InferCtxt<'a, 'tcx> {
- let mut infcx = new_infer_ctxt(tcx, tables, None, false);
+ let mut infcx = new_infer_ctxt(tcx, tables, None);
infcx.normalize = true;
infcx
}
return value;
}
- let infcx = new_infer_ctxt(tcx, &tcx.tables, None, true);
+ let infcx = new_infer_ctxt(tcx, &tcx.tables, None);
let mut selcx = traits::SelectionContext::new(&infcx);
let cause = traits::ObligationCause::dummy();
let traits::Normalized { value: result, obligations } =
/// if the program type checks or not -- and they are unusual
/// occurrences in any case.
pub fn report_overflow_error<'a, 'tcx, T>(infcx: &InferCtxt<'a, 'tcx>,
- obligation: &Obligation<'tcx, T>)
+ obligation: &Obligation<'tcx, T>,
+ suggest_increasing_limit: bool)
-> !
where T: fmt::Display + TypeFoldable<'tcx>
{
"overflow evaluating the requirement `{}`",
predicate);
- suggest_new_overflow_limit(infcx.tcx, &mut err, obligation.cause.span);
+ if suggest_increasing_limit {
+ suggest_new_overflow_limit(infcx.tcx, &mut err, obligation.cause.span);
+ }
note_obligation_cause(infcx, &mut err, obligation);
unreachable!();
}
+/// Reports that a cycle was detected which led to overflow and halts
+/// compilation. This is equivalent to `report_overflow_error` except
+/// that we can give a more helpful error message (and, in particular,
+/// we do not suggest increasing the overflow limit, which is not
+/// going to help).
+pub fn report_overflow_error_cycle<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>,
+ cycle: &Vec<PredicateObligation<'tcx>>)
+ -> !
+{
+ assert!(cycle.len() > 1);
+
+ debug!("report_overflow_error_cycle(cycle length = {})", cycle.len());
+
+ let cycle = infcx.resolve_type_vars_if_possible(cycle);
+
+ debug!("report_overflow_error_cycle: cycle={:?}", cycle);
+
+ assert_eq!(&cycle[0].predicate, &cycle.last().unwrap().predicate);
+
+ try_report_overflow_error_type_of_infinite_size(infcx, &cycle);
+ report_overflow_error(infcx, &cycle[0], false);
+}
+
+/// If a cycle results from evaluated whether something is Sized, that
+/// is a particular special case that always results from a struct or
+/// enum definition that lacks indirection (e.g., `struct Foo { x: Foo
+/// }`). We wish to report a targeted error for this case.
+pub fn try_report_overflow_error_type_of_infinite_size<'a, 'tcx>(
+ infcx: &InferCtxt<'a, 'tcx>,
+ cycle: &[PredicateObligation<'tcx>])
+{
+ let sized_trait = match infcx.tcx.lang_items.sized_trait() {
+ Some(v) => v,
+ None => return,
+ };
+ let top_is_sized = {
+ match cycle[0].predicate {
+ ty::Predicate::Trait(ref data) => data.def_id() == sized_trait,
+ _ => false,
+ }
+ };
+ if !top_is_sized {
+ return;
+ }
+
+ // The only way to have a type of infinite size is to have,
+ // somewhere, a struct/enum type involved. Identify all such types
+ // and report the cycle to the user.
+
+ let struct_enum_tys: Vec<_> =
+ cycle.iter()
+ .flat_map(|obligation| match obligation.predicate {
+ ty::Predicate::Trait(ref data) => {
+ assert_eq!(data.def_id(), sized_trait);
+ let self_ty = data.skip_binder().trait_ref.self_ty(); // (*)
+ // (*) ok to skip binder because this is just
+ // error reporting and regions don't really
+ // matter
+ match self_ty.sty {
+ ty::TyEnum(..) | ty::TyStruct(..) => Some(self_ty),
+ _ => None,
+ }
+ }
+ _ => {
+ infcx.tcx.sess.span_bug(obligation.cause.span,
+ &format!("Sized cycle involving non-trait-ref: {:?}",
+ obligation.predicate));
+ }
+ })
+ .collect();
+
+ assert!(!struct_enum_tys.is_empty());
+
+ // This is a bit tricky. We want to pick a "main type" in the
+ // listing that is local to the current crate, so we can give a
+ // good span to the user. But it might not be the first one in our
+ // cycle list. So find the first one that is local and then
+ // rotate.
+ let (main_index, main_def_id) =
+ struct_enum_tys.iter()
+ .enumerate()
+ .filter_map(|(index, ty)| match ty.sty {
+ ty::TyEnum(adt_def, _) | ty::TyStruct(adt_def, _)
+ if adt_def.did.is_local() =>
+ Some((index, adt_def.did)),
+ _ =>
+ None,
+ })
+ .next()
+ .unwrap(); // should always be SOME local type involved!
+
+ // Rotate so that the "main" type is at index 0.
+ let struct_enum_tys: Vec<_> =
+ struct_enum_tys.iter()
+ .cloned()
+ .skip(main_index)
+ .chain(struct_enum_tys.iter().cloned().take(main_index))
+ .collect();
+
+ let tcx = infcx.tcx;
+ let mut err = recursive_type_with_infinite_size_error(tcx, main_def_id);
+ let len = struct_enum_tys.len();
+ if len > 2 {
+ let span = tcx.map.span_if_local(main_def_id).unwrap();
+ err.fileline_note(span,
+ &format!("type `{}` is embedded within `{}`...",
+ struct_enum_tys[0],
+ struct_enum_tys[1]));
+ for &next_ty in &struct_enum_tys[1..len-1] {
+ err.fileline_note(span,
+ &format!("...which in turn is embedded within `{}`...", next_ty));
+ }
+ err.fileline_note(span,
+ &format!("...which in turn is embedded within `{}`, \
+ completing the cycle.",
+ struct_enum_tys[len-1]));
+ }
+ err.emit();
+ infcx.tcx.sess.abort_if_errors();
+ unreachable!();
+}
+
+pub fn recursive_type_with_infinite_size_error<'tcx>(tcx: &ty::ctxt<'tcx>,
+ type_def_id: DefId)
+ -> DiagnosticBuilder<'tcx>
+{
+ assert!(type_def_id.is_local());
+ let span = tcx.map.span_if_local(type_def_id).unwrap();
+ let mut err = struct_span_err!(tcx.sess, span, E0072, "recursive type `{}` has infinite size",
+ tcx.item_path_str(type_def_id));
+ err.fileline_help(span, &format!("insert indirection (e.g., a `Box`, `Rc`, or `&`) \
+ at some point to make `{}` representable",
+ tcx.item_path_str(type_def_id)));
+ err
+}
+
pub fn report_selection_error<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>,
obligation: &PredicateObligation<'tcx>,
error: &SelectionError<'tcx>)
use middle::infer::InferCtxt;
use middle::ty::{self, Ty, TypeFoldable};
-
+use rustc_data_structures::obligation_forest::{Backtrace, ObligationForest, Error};
+use std::iter;
use syntax::ast;
use util::common::ErrorReported;
-use util::nodemap::{FnvHashSet, NodeMap};
+use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap};
use super::CodeAmbiguity;
use super::CodeProjectionError;
use super::CodeSelectionError;
use super::is_object_safe;
use super::FulfillmentError;
+use super::FulfillmentErrorCode;
use super::ObligationCause;
use super::PredicateObligation;
use super::project;
+use super::report_overflow_error_cycle;
use super::select::SelectionContext;
use super::Unimplemented;
use super::util::predicate_for_builtin_bound;
// A list of all obligations that have been registered with this
// fulfillment context.
- predicates: Vec<PredicateObligation<'tcx>>,
-
- // Remembers the count of trait obligations that we have already
- // attempted to select. This is used to avoid repeating work
- // when `select_new_obligations` is called.
- attempted_mark: usize,
+ predicates: ObligationForest<PendingPredicateObligation<'tcx>>,
// A set of constraints that regionck must validate. Each
// constraint has the form `T:'a`, meaning "some type `T` must
// obligations (otherwise, it's easy to fail to walk to a
// particular node-id).
region_obligations: NodeMap<Vec<RegionObligation<'tcx>>>,
-
- pub errors_will_be_reported: bool,
}
#[derive(Clone)]
pub cause: ObligationCause<'tcx>,
}
+#[derive(Clone, Debug)]
+pub struct PendingPredicateObligation<'tcx> {
+ pub obligation: PredicateObligation<'tcx>,
+ pub stalled_on: Vec<Ty<'tcx>>,
+}
+
impl<'tcx> FulfillmentContext<'tcx> {
/// Creates a new fulfillment context.
- ///
- /// `errors_will_be_reported` indicates whether ALL errors that
- /// are generated by this fulfillment context will be reported to
- /// the end user. This is used to inform caching, because it
- /// allows us to conclude that traits that resolve successfully
- /// will in fact always resolve successfully (in particular, it
- /// guarantees that if some dependent obligation encounters a
- /// problem, compilation will be aborted). If you're not sure of
- /// the right value here, pass `false`, as that is the more
- /// conservative option.
- ///
- /// FIXME -- a better option would be to hold back on modifying
- /// the global cache until we know that all dependent obligations
- /// are also satisfied. In that case, we could actually remove
- /// this boolean flag, and we'd also avoid the problem of squelching
- /// duplicate errors that occur across fns.
- pub fn new(errors_will_be_reported: bool) -> FulfillmentContext<'tcx> {
+ pub fn new() -> FulfillmentContext<'tcx> {
FulfillmentContext {
duplicate_set: FulfilledPredicates::new(),
- predicates: Vec::new(),
- attempted_mark: 0,
+ predicates: ObligationForest::new(),
region_obligations: NodeMap(),
- errors_will_be_reported: errors_will_be_reported,
}
}
}
debug!("register_predicate({:?})", obligation);
- self.predicates.push(obligation);
+ let obligation = PendingPredicateObligation {
+ obligation: obligation,
+ stalled_on: vec![]
+ };
+ self.predicates.push_root(obligation);
}
pub fn region_obligations(&self,
-> Result<(),Vec<FulfillmentError<'tcx>>>
{
try!(self.select_where_possible(infcx));
-
- // Anything left is ambiguous.
- let errors: Vec<FulfillmentError> =
- self.predicates
- .iter()
- .map(|o| FulfillmentError::new((*o).clone(), CodeAmbiguity))
- .collect();
-
+ let errors: Vec<_> =
+ self.predicates.to_errors(CodeAmbiguity)
+ .into_iter()
+ .map(|e| to_fulfillment_error(e))
+ .collect();
if errors.is_empty() {
Ok(())
} else {
}
}
- /// Attempts to select obligations that were registered since the call to a selection routine.
- /// This is used by the type checker to eagerly attempt to resolve obligations in hopes of
- /// gaining type information. It'd be equally valid to use `select_where_possible` but it
- /// results in `O(n^2)` performance (#18208).
- pub fn select_new_obligations<'a>(&mut self,
- infcx: &InferCtxt<'a,'tcx>)
- -> Result<(),Vec<FulfillmentError<'tcx>>>
- {
- let mut selcx = SelectionContext::new(infcx);
- self.select(&mut selcx, true)
- }
-
pub fn select_where_possible<'a>(&mut self,
infcx: &InferCtxt<'a,'tcx>)
-> Result<(),Vec<FulfillmentError<'tcx>>>
{
let mut selcx = SelectionContext::new(infcx);
- self.select(&mut selcx, false)
+ self.select(&mut selcx)
}
- pub fn pending_obligations(&self) -> &[PredicateObligation<'tcx>] {
- &self.predicates
+ pub fn pending_obligations(&self) -> Vec<PendingPredicateObligation<'tcx>> {
+ self.predicates.pending_obligations()
}
fn is_duplicate_or_add(&mut self,
tcx: &ty::ctxt<'tcx>,
predicate: &ty::Predicate<'tcx>)
-> bool {
- // This is a kind of dirty hack to allow us to avoid "rederiving"
- // things that we have already proven in other methods.
- //
- // The idea is that any predicate that doesn't involve type
- // parameters and which only involves the 'static region (and
- // no other regions) is universally solvable, since impls are global.
- //
- // This is particularly important since even if we have a
- // cache hit in the selection context, we still wind up
- // evaluating the 'nested obligations'. This cache lets us
- // skip those.
-
- if self.errors_will_be_reported && predicate.is_global() {
- tcx.fulfilled_predicates.borrow_mut().is_duplicate_or_add(predicate)
- } else {
- self.duplicate_set.is_duplicate_or_add(predicate)
+ // For "global" predicates -- that is, predicates that don't
+ // involve type parameters, inference variables, or regions
+ // other than 'static -- we can check the cache in the tcx,
+ // which allows us to leverage work from other threads. Note
+ // that we don't add anything to this cache yet (unlike the
+ // local cache). This is because the tcx cache maintains the
+ // invariant that it only contains things that have been
+ // proven, and we have not yet proven that `predicate` holds.
+ if predicate.is_global() && tcx.fulfilled_predicates.borrow().is_duplicate(predicate) {
+ return true;
}
+
+ // If `predicate` is not global, or not present in the tcx
+ // cache, we can still check for it in our local cache and add
+ // it if not present. Note that if we find this predicate in
+ // the local cache we can stop immediately, without reporting
+ // any errors, even though we don't know yet if it is
+ // true. This is because, while we don't yet know if the
+ // predicate holds, we know that this same fulfillment context
+ // already is in the process of finding out.
+ self.duplicate_set.is_duplicate_or_add(predicate)
}
/// Attempts to select obligations using `selcx`. If `only_new_obligations` is true, then it
/// only attempts to select obligations that haven't been seen before.
fn select<'a>(&mut self,
- selcx: &mut SelectionContext<'a, 'tcx>,
- only_new_obligations: bool)
+ selcx: &mut SelectionContext<'a, 'tcx>)
-> Result<(),Vec<FulfillmentError<'tcx>>>
{
- debug!("select({} obligations, only_new_obligations={}) start",
- self.predicates.len(),
- only_new_obligations);
+ debug!("select(obligation-forest-size={})", self.predicates.len());
let mut errors = Vec::new();
loop {
- let count = self.predicates.len();
-
- debug!("select_where_possible({} obligations) iteration",
- count);
+ debug!("select_where_possible: starting another iteration");
- let mut new_obligations = Vec::new();
-
- // If we are only attempting obligations we haven't seen yet,
- // then set `skip` to the number of obligations we've already
- // seen.
- let mut skip = if only_new_obligations {
- self.attempted_mark
- } else {
- 0
+ // Process pending obligations.
+ let outcome = {
+ let region_obligations = &mut self.region_obligations;
+ self.predicates.process_obligations(
+ |obligation, backtrace| process_predicate(selcx,
+ obligation,
+ backtrace,
+ region_obligations))
};
- // First pass: walk each obligation, retaining
- // only those that we cannot yet process.
- {
- let region_obligations = &mut self.region_obligations;
- self.predicates.retain(|predicate| {
- // Hack: Retain does not pass in the index, but we want
- // to avoid processing the first `start_count` entries.
- let processed =
- if skip == 0 {
- process_predicate(selcx, predicate,
- &mut new_obligations, &mut errors, region_obligations)
- } else {
- skip -= 1;
- false
- };
- !processed
- });
+ debug!("select_where_possible: outcome={:?}", outcome);
+
+ // these are obligations that were proven to be true.
+ for pending_obligation in outcome.completed {
+ let predicate = &pending_obligation.obligation.predicate;
+ if predicate.is_global() {
+ selcx.tcx().fulfilled_predicates.borrow_mut()
+ .is_duplicate_or_add(predicate);
+ }
}
- self.attempted_mark = self.predicates.len();
+ errors.extend(
+ outcome.errors.into_iter()
+ .map(|e| to_fulfillment_error(e)));
- if self.predicates.len() == count {
- // Nothing changed.
+ // If nothing new was added, no need to keep looping.
+ if outcome.stalled {
break;
}
-
- // Now go through all the successful ones,
- // registering any nested obligations for the future.
- for new_obligation in new_obligations {
- self.register_predicate_obligation(selcx.infcx(), new_obligation);
- }
}
- debug!("select({} obligations, {} errors) done",
- self.predicates.len(),
- errors.len());
+ debug!("select({} predicates remaining, {} errors) done",
+ self.predicates.len(), errors.len());
if errors.is_empty() {
Ok(())
}
}
+/// Like `process_predicate1`, but wrap result into a pending predicate.
fn process_predicate<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
- obligation: &PredicateObligation<'tcx>,
- new_obligations: &mut Vec<PredicateObligation<'tcx>>,
- errors: &mut Vec<FulfillmentError<'tcx>>,
+ pending_obligation: &mut PendingPredicateObligation<'tcx>,
+ backtrace: Backtrace<PendingPredicateObligation<'tcx>>,
region_obligations: &mut NodeMap<Vec<RegionObligation<'tcx>>>)
- -> bool
+ -> Result<Option<Vec<PendingPredicateObligation<'tcx>>>,
+ FulfillmentErrorCode<'tcx>>
{
- /*!
- * Processes a predicate obligation and modifies the appropriate
- * output array with the successful/error result. Returns `false`
- * if the predicate could not be processed due to insufficient
- * type inference.
- */
+ match process_predicate1(selcx, pending_obligation, backtrace, region_obligations) {
+ Ok(Some(v)) => {
+ // FIXME the right thing to do here, I think, is to permit
+ // DAGs. That is, we should detect whenever this predicate
+ // has appeared somewhere in the current tree./ If it's a
+ // parent, that's a cycle, and we should either error out
+ // or consider it ok. But if it's NOT a parent, we can
+ // ignore it, since it will be proven (or not) separately.
+ // However, this is a touch tricky, so I'm doing something
+ // a bit hackier for now so that the `huge-struct.rs` passes.
+
+ let retain_vec: Vec<_> = {
+ let mut dedup = FnvHashSet();
+ v.iter()
+ .map(|o| {
+ // Screen out obligations that we know globally
+ // are true. This should really be the DAG check
+ // mentioned above.
+ if
+ o.predicate.is_global() &&
+ selcx.tcx().fulfilled_predicates.borrow().is_duplicate(&o.predicate)
+ {
+ return false;
+ }
+
+ // If we see two siblings that are exactly the
+ // same, no need to add them twice.
+ if !dedup.insert(&o.predicate) {
+ return false;
+ }
+
+ true
+ })
+ .collect()
+ };
+
+ let pending_predicate_obligations =
+ v.into_iter()
+ .zip(retain_vec)
+ .flat_map(|(o, retain)| {
+ if retain {
+ Some(PendingPredicateObligation {
+ obligation: o,
+ stalled_on: vec![]
+ })
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ Ok(Some(pending_predicate_obligations))
+ }
+ Ok(None) => Ok(None),
+ Err(e) => Err(e)
+ }
+}
+
+/// Processes a predicate obligation and returns either:
+/// - `Ok(Some(v))` if the predicate is true, presuming that `v` are also true
+/// - `Ok(None)` if we don't have enough info to be sure
+/// - `Err` if the predicate does not hold
+fn process_predicate1<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
+ pending_obligation: &mut PendingPredicateObligation<'tcx>,
+ backtrace: Backtrace<PendingPredicateObligation<'tcx>>,
+ region_obligations: &mut NodeMap<Vec<RegionObligation<'tcx>>>)
+ -> Result<Option<Vec<PredicateObligation<'tcx>>>,
+ FulfillmentErrorCode<'tcx>>
+{
+ // if we were stalled on some unresolved variables, first check
+ // whether any of them have been resolved; if not, don't bother
+ // doing more work yet
+ if !pending_obligation.stalled_on.is_empty() {
+ if pending_obligation.stalled_on.iter().all(|&ty| {
+ let resolved_ty = selcx.infcx().resolve_type_vars_if_possible(&ty);
+ resolved_ty == ty // nothing changed here
+ }) {
+ debug!("process_predicate: pending obligation {:?} still stalled on {:?}",
+ selcx.infcx().resolve_type_vars_if_possible(&pending_obligation.obligation),
+ pending_obligation.stalled_on);
+ return Ok(None);
+ }
+ pending_obligation.stalled_on = vec![];
+ }
+
+ let obligation = &pending_obligation.obligation;
+
+ // If we exceed the recursion limit, take a moment to look for a
+ // cycle so we can give a better error report from here, where we
+ // have more context.
+ let recursion_limit = selcx.tcx().sess.recursion_limit.get();
+ if obligation.recursion_depth >= recursion_limit {
+ if let Some(cycle) = scan_for_cycle(obligation, &backtrace) {
+ report_overflow_error_cycle(selcx.infcx(), &cycle);
+ }
+ }
match obligation.predicate {
ty::Predicate::Trait(ref data) => {
+ if coinductive_match(selcx, obligation, data, &backtrace) {
+ return Ok(Some(vec![]));
+ }
+
let trait_obligation = obligation.with(data.clone());
match selcx.select(&trait_obligation) {
- Ok(None) => {
- false
+ Ok(Some(vtable)) => {
+ Ok(Some(vtable.nested_obligations()))
}
- Ok(Some(s)) => {
- new_obligations.append(&mut s.nested_obligations());
- true
+ Ok(None) => {
+ // This is a bit subtle: for the most part, the
+ // only reason we can fail to make progress on
+ // trait selection is because we don't have enough
+ // information about the types in the trait. One
+ // exception is that we sometimes haven't decided
+ // what kind of closure a closure is. *But*, in
+ // that case, it turns out, the type of the
+ // closure will also change, because the closure
+ // also includes references to its upvars as part
+ // of its type, and those types are resolved at
+ // the same time.
+ pending_obligation.stalled_on =
+ data.skip_binder() // ok b/c this check doesn't care about regions
+ .input_types()
+ .iter()
+ .map(|t| selcx.infcx().resolve_type_vars_if_possible(t))
+ .filter(|t| t.has_infer_types())
+ .flat_map(|t| t.walk())
+ .filter(|t| match t.sty { ty::TyInfer(_) => true, _ => false })
+ .collect();
+
+ debug!("process_predicate: pending obligation {:?} now stalled on {:?}",
+ selcx.infcx().resolve_type_vars_if_possible(obligation),
+ pending_obligation.stalled_on);
+
+ Ok(None)
}
Err(selection_err) => {
- debug!("predicate: {:?} error: {:?}",
- obligation,
- selection_err);
- errors.push(
- FulfillmentError::new(
- obligation.clone(),
- CodeSelectionError(selection_err)));
- true
+ Err(CodeSelectionError(selection_err))
}
}
}
ty::Predicate::Equate(ref binder) => {
match selcx.infcx().equality_predicate(obligation.cause.span, binder) {
- Ok(()) => { }
- Err(_) => {
- errors.push(
- FulfillmentError::new(
- obligation.clone(),
- CodeSelectionError(Unimplemented)));
- }
+ Ok(()) => Ok(Some(Vec::new())),
+ Err(_) => Err(CodeSelectionError(Unimplemented)),
}
- true
}
ty::Predicate::RegionOutlives(ref binder) => {
match selcx.infcx().region_outlives_predicate(obligation.cause.span, binder) {
- Ok(()) => { }
- Err(_) => {
- errors.push(
- FulfillmentError::new(
- obligation.clone(),
- CodeSelectionError(Unimplemented)));
- }
+ Ok(()) => Ok(Some(Vec::new())),
+ Err(_) => Err(CodeSelectionError(Unimplemented)),
}
-
- true
}
ty::Predicate::TypeOutlives(ref binder) => {
// If so, this obligation is an error (for now). Eventually we should be
// able to support additional cases here, like `for<'a> &'a str: 'a`.
None => {
- errors.push(
- FulfillmentError::new(
- obligation.clone(),
- CodeSelectionError(Unimplemented)))
+ Err(CodeSelectionError(Unimplemented))
}
// Otherwise, we have something of the form
// `for<'a> T: 'a where 'a not in T`, which we can treat as `T: 'static`.
register_region_obligation(t_a, ty::ReStatic,
obligation.cause.clone(),
region_obligations);
+ Ok(Some(vec![]))
}
}
}
register_region_obligation(t_a, r_b,
obligation.cause.clone(),
region_obligations);
+ Ok(Some(vec![]))
}
}
- true
}
ty::Predicate::Projection(ref data) => {
let project_obligation = obligation.with(data.clone());
- let result = project::poly_project_and_unify_type(selcx, &project_obligation);
- debug!("process_predicate: poly_project_and_unify_type({:?}) returned {:?}",
- project_obligation,
- result);
- match result {
- Ok(Some(obligations)) => {
- new_obligations.extend(obligations);
- true
- }
- Ok(None) => {
- false
- }
- Err(err) => {
- errors.push(
- FulfillmentError::new(
- obligation.clone(),
- CodeProjectionError(err)));
- true
- }
+ match project::poly_project_and_unify_type(selcx, &project_obligation) {
+ Ok(v) => Ok(v),
+ Err(e) => Err(CodeProjectionError(e))
}
}
ty::Predicate::ObjectSafe(trait_def_id) => {
if !is_object_safe(selcx.tcx(), trait_def_id) {
- errors.push(FulfillmentError::new(
- obligation.clone(),
- CodeSelectionError(Unimplemented)));
+ Err(CodeSelectionError(Unimplemented))
+ } else {
+ Ok(Some(Vec::new()))
}
- true
}
ty::Predicate::WellFormed(ty) => {
- match ty::wf::obligations(selcx.infcx(), obligation.cause.body_id,
- ty, obligation.cause.span) {
- Some(obligations) => {
- new_obligations.extend(obligations);
- true
- }
- None => {
- false
+ Ok(ty::wf::obligations(selcx.infcx(), obligation.cause.body_id,
+ ty, obligation.cause.span))
+ }
+ }
+}
+
+/// For defaulted traits, we use a co-inductive strategy to solve, so
+/// that recursion is ok. This routine returns true if the top of the
+/// stack (`top_obligation` and `top_data`):
+/// - is a defaulted trait, and
+/// - it also appears in the backtrace at some position `X`; and,
+/// - all the predicates at positions `X..` between `X` an the top are
+/// also defaulted traits.
+fn coinductive_match<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
+ top_obligation: &PredicateObligation<'tcx>,
+ top_data: &ty::PolyTraitPredicate<'tcx>,
+ backtrace: &Backtrace<PendingPredicateObligation<'tcx>>)
+ -> bool
+{
+ if selcx.tcx().trait_has_default_impl(top_data.def_id()) {
+ debug!("coinductive_match: top_data={:?}", top_data);
+ for bt_obligation in backtrace.clone() {
+ debug!("coinductive_match: bt_obligation={:?}", bt_obligation);
+
+ // *Everything* in the backtrace must be a defaulted trait.
+ match bt_obligation.obligation.predicate {
+ ty::Predicate::Trait(ref data) => {
+ if !selcx.tcx().trait_has_default_impl(data.def_id()) {
+ debug!("coinductive_match: trait does not have default impl");
+ break;
+ }
}
+ _ => { break; }
+ }
+
+ // And we must find a recursive match.
+ if bt_obligation.obligation.predicate == top_obligation.predicate {
+ debug!("coinductive_match: found a match in the backtrace");
+ return true;
}
}
}
+
+ false
+}
+
+fn scan_for_cycle<'a,'tcx>(top_obligation: &PredicateObligation<'tcx>,
+ backtrace: &Backtrace<PendingPredicateObligation<'tcx>>)
+ -> Option<Vec<PredicateObligation<'tcx>>>
+{
+ let mut map = FnvHashMap();
+ let all_obligations =
+ || iter::once(top_obligation)
+ .chain(backtrace.clone()
+ .map(|p| &p.obligation));
+ for (index, bt_obligation) in all_obligations().enumerate() {
+ if let Some(&start) = map.get(&bt_obligation.predicate) {
+ // Found a cycle starting at position `start` and running
+ // until the current position (`index`).
+ return Some(all_obligations().skip(start).take(index - start + 1).cloned().collect());
+ } else {
+ map.insert(bt_obligation.predicate.clone(), index);
+ }
+ }
+ None
}
fn register_region_obligation<'tcx>(t_a: Ty<'tcx>,
!self.set.insert(key.clone())
}
}
+
+fn to_fulfillment_error<'tcx>(
+ error: Error<PendingPredicateObligation<'tcx>, FulfillmentErrorCode<'tcx>>)
+ -> FulfillmentError<'tcx>
+{
+ let obligation = error.backtrace.into_iter().next().unwrap().obligation;
+ FulfillmentError::new(obligation, error.error)
+}
use syntax::codemap::{Span, DUMMY_SP};
pub use self::error_reporting::TraitErrorKey;
+pub use self::error_reporting::recursive_type_with_infinite_size_error;
pub use self::error_reporting::report_fulfillment_errors;
pub use self::error_reporting::report_overflow_error;
+pub use self::error_reporting::report_overflow_error_cycle;
pub use self::error_reporting::report_selection_error;
pub use self::error_reporting::report_object_safety_error;
pub use self::coherence::orphan_check;
// this function's result remains infallible, we must confirm
// that guess. While imperfect, I believe this is sound.
- let mut fulfill_cx = FulfillmentContext::new(false);
+ let mut fulfill_cx = FulfillmentContext::new();
// We can use a dummy node-id here because we won't pay any mind
// to region obligations that arise (there shouldn't really be any
let elaborated_env = unnormalized_env.with_caller_bounds(predicates);
- let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, Some(elaborated_env), false);
- let predicates = match fully_normalize(&infcx, cause,
+ let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, Some(elaborated_env));
+ let predicates = match fully_normalize(&infcx,
+ cause,
&infcx.parameter_environment.caller_bounds) {
Ok(predicates) => predicates,
Err(errors) => {
}
};
+ debug!("normalize_param_env_or_error: normalized predicates={:?}",
+ predicates);
+
let free_regions = FreeRegionMap::new();
infcx.resolve_regions_and_report_errors(&free_regions, body_id);
let predicates = match infcx.fully_resolve(&predicates) {
}
};
+ debug!("normalize_param_env_or_error: resolved predicates={:?}",
+ predicates);
+
infcx.parameter_environment.with_caller_bounds(predicates)
}
-> Result<T, Vec<FulfillmentError<'tcx>>>
where T : TypeFoldable<'tcx>
{
- debug!("normalize_param_env(value={:?})", value);
+ debug!("fully_normalize(value={:?})", value);
let mut selcx = &mut SelectionContext::new(infcx);
// FIXME (@jroesch) ISSUE 26721
//
// I think we should probably land this refactor and then come
// back to this is a follow-up patch.
- let mut fulfill_cx = FulfillmentContext::new(false);
+ let mut fulfill_cx = FulfillmentContext::new();
let Normalized { value: normalized_value, obligations } =
project::normalize(selcx, cause, value);
- debug!("normalize_param_env: normalized_value={:?} obligations={:?}",
+ debug!("fully_normalize: normalized_value={:?} obligations={:?}",
normalized_value,
obligations);
for obligation in obligations {
fulfill_cx.register_predicate_obligation(selcx.infcx(), obligation);
}
- try!(fulfill_cx.select_all_or_error(infcx));
+ debug!("fully_normalize: select_all_or_error start");
+ match fulfill_cx.select_all_or_error(infcx) {
+ Ok(()) => { }
+ Err(e) => {
+ debug!("fully_normalize: error={:?}", e);
+ return Err(e);
+ }
+ }
+ debug!("fully_normalize: select_all_or_error complete");
let resolved_value = infcx.resolve_type_vars_if_possible(&normalized_value);
- debug!("normalize_param_env: resolved_value={:?}", resolved_value);
+ debug!("fully_normalize: resolved_value={:?}", resolved_value);
Ok(resolved_value)
}
let recursion_limit = selcx.tcx().sess.recursion_limit.get();
if obligation.recursion_depth >= recursion_limit {
debug!("project: overflow!");
- report_overflow_error(selcx.infcx(), &obligation);
+ report_overflow_error(selcx.infcx(), &obligation, true);
}
let obligation_trait_ref =
// not update) the cache.
let recursion_limit = self.infcx.tcx.sess.recursion_limit.get();
if stack.obligation.recursion_depth >= recursion_limit {
- report_overflow_error(self.infcx(), &stack.obligation);
+ report_overflow_error(self.infcx(), &stack.obligation, true);
}
// Check the cache. Note that we skolemize the trait-ref
nested: ty::Binder<Vec<Ty<'tcx>>>)
-> VtableBuiltinData<PredicateObligation<'tcx>>
{
+ debug!("vtable_builtin_data(obligation={:?}, bound={:?}, nested={:?})",
+ obligation, bound, nested);
+
let trait_def = match self.tcx().lang_items.from_builtin_kind(bound) {
Ok(def_id) => def_id,
Err(_) => {
let tcx = self.tcx;
// FIXME: (@jroesch) float this code up
- let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, Some(self.clone()), false);
+ let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, Some(self.clone()));
let adt = match self_type.sty {
ty::TyStruct(struct_def, substs) => {
-> bool
{
let tcx = param_env.tcx;
- let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, Some(param_env.clone()), false);
+ let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, Some(param_env.clone()));
let is_impld = traits::type_known_to_meet_builtin_bound(&infcx,
self, bound, span);
debug!("check_loans(body id={})", body.id);
let param_env = ty::ParameterEnvironment::for_item(bccx.tcx, fn_id);
- let infcx = infer::new_infer_ctxt(bccx.tcx, &bccx.tcx.tables, Some(param_env), false);
+ let infcx = infer::new_infer_ctxt(bccx.tcx, &bccx.tcx.tables, Some(param_env));
let mut clcx = CheckLoanCtxt {
bccx: bccx,
};
let param_env = ty::ParameterEnvironment::for_item(bccx.tcx, fn_id);
- let infcx = infer::new_infer_ctxt(bccx.tcx, &bccx.tcx.tables, Some(param_env), false);
+ let infcx = infer::new_infer_ctxt(bccx.tcx, &bccx.tcx.tables, Some(param_env));
{
let mut euv = euv::ExprUseVisitor::new(&mut glcx, &infcx);
euv.walk_fn(decl, body);
impl<'a, 'tcx, 'v> Visitor<'v> for StaticInitializerCtxt<'a, 'tcx> {
fn visit_expr(&mut self, ex: &Expr) {
if let hir::ExprAddrOf(mutbl, ref base) = ex.node {
- let infcx = infer::new_infer_ctxt(self.bccx.tcx, &self.bccx.tcx.tables, None, false);
+ let infcx = infer::new_infer_ctxt(self.bccx.tcx, &self.bccx.tcx.tables, None);
let mc = mc::MemCategorizationContext::new(&infcx);
let base_cmt = mc.cat_expr(&**base).unwrap();
let borrow_kind = ty::BorrowKind::from_mutbl(mutbl);
html_favicon_url = "https://www.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![feature(rustc_private, staged_api)]
#![feature(hashmap_hasher)]
+#![feature(nonzero)]
+#![feature(rustc_private)]
+#![feature(staged_api)]
#![cfg_attr(test, feature(test))]
+extern crate core;
#[macro_use] extern crate log;
extern crate serialize as rustc_serialize; // used by deriving
pub mod bitvec;
pub mod graph;
pub mod ivar;
+pub mod obligation_forest;
pub mod snapshot_vec;
pub mod transitive_relation;
pub mod unify;
--- /dev/null
+The `ObligationForest` is a utility data structure used in trait
+matching to track the set of outstanding obligations (those not yet
+resolved to success or error). It also tracks the "backtrace" of each
+pending obligation (why we are trying to figure this out in the first
+place).
+
+### External view
+
+`ObligationForest` supports two main public operations (there are a
+few others not discussed here):
+
+1. Add a new root obligation (`push_root`).
+2. Process the pending obligations (`process_obligations`).
+
+When a new obligation `N` is added, it becomes the root of an
+obligation tree. This tree is a singleton to start, so `N` is both the
+root and the only leaf. Each time the `process_obligations` method is
+called, it will invoke its callback with every pending obligation (so
+that will include `N`, the first time). The callback shoud process the
+obligation `O` that it is given and return one of three results:
+
+- `Ok(None)` -> ambiguous result. Obligation was neither a success
+ nor a failure. It is assumed that further attempts to process the
+ obligation will yield the same result unless something in the
+ surrounding environment changes.
+- `Ok(Some(C))` - the obligation was *shallowly successful*. The
+ vector `C` is a list of subobligations. The meaning of this is that
+ `O` was successful on the assumption that all the obligations in `C`
+ are also successful. Therefore, `O` is only considered a "true"
+ success if `C` is empty. Otherwise, `O` is put into a suspended
+ state and the obligations in `C` become the new pending
+ obligations. They will be processed the next time you call
+ `process_obligations`.
+- `Err(E)` -> obligation failed with error `E`. We will collect this
+ error and return it from `process_obligations`, along with the
+ "backtrace" of obligations (that is, the list of obligations up to
+ and including the root of the failed obligation). No further
+ obligations from that same tree will be processed, since the tree is
+ now considered to be in error.
+
+When the call to `process_obligations` completes, you get back an `Outcome`,
+which includes three bits of information:
+
+- `completed`: a list of obligations where processing was fully
+ completed without error (meaning that all transitive subobligations
+ have also been completed). So, for example, if the callback from
+ `process_obligations` returns `Ok(Some(C))` for some obligation `O`,
+ then `O` will be considered completed right away if `C` is the
+ empty vector. Otherwise it will only be considered completed once
+ all the obligations in `C` have been found completed.
+- `errors`: a list of errors that occurred and associated backtraces
+ at the time of error, which can be used to give context to the user.
+- `stalled`: if true, then none of the existing obligations were
+ *shallowly successful* (that is, no callback returned `Ok(Some(_))`).
+ This implies that all obligations were either errors or returned an
+ ambiguous result, which means that any further calls to
+ `process_obligations` would simply yield back further ambiguous
+ results. This is used by the `FulfillmentContext` to decide when it
+ has reached a steady state.
+
+#### Snapshots
+
+The `ObligationForest` supports a limited form of snapshots; see
+`start_snapshot`; `commit_snapshot`; and `rollback_snapshot`. In
+particular, you can use a snapshot to roll back new root
+obligations. However, it is an error to attempt to
+`process_obligations` during a snapshot.
+
+### Implementation details
+
+For the most part, comments specific to the implementation are in the
+code. This file only contains a very high-level overview. Basically,
+the forest is stored in a vector. Each element of the vector is a node
+in some tree. Each node in the vector has the index of an (optional)
+parent and (for convenience) its root (which may be itself). It also
+has a current state, described by `NodeState`. After each
+processing step, we compress the vector to remove completed and error
+nodes, which aren't needed anymore.
+
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! The `ObligationForest` is a utility data structure used in trait
+//! matching to track the set of outstanding obligations (those not
+//! yet resolved to success or error). It also tracks the "backtrace"
+//! of each pending obligation (why we are trying to figure this out
+//! in the first place). See README.md for a general overview of how
+//! to use this class.
+
+use std::fmt::Debug;
+use std::mem;
+
+mod node_index;
+
+#[cfg(test)]
+mod test;
+
+pub struct ObligationForest<O> {
+ /// The list of obligations. In between calls to
+ /// `process_obligations`, this list only contains nodes in the
+ /// `Pending` or `Success` state (with a non-zero number of
+ /// incomplete children). During processing, some of those nodes
+ /// may be changed to the error state, or we may find that they
+ /// are completed (That is, `num_incomplete_children` drops to 0).
+ /// At the end of processing, those nodes will be removed by a
+ /// call to `compress`.
+ ///
+ /// At all times we maintain the invariant that every node appears
+ /// at a higher index than its parent. This is needed by the
+ /// backtrace iterator (which uses `split_at`).
+ nodes: Vec<Node<O>>,
+ snapshots: Vec<usize>
+}
+
+pub struct Snapshot {
+ len: usize,
+}
+
+pub use self::node_index::NodeIndex;
+
+struct Node<O> {
+ state: NodeState<O>,
+ parent: Option<NodeIndex>,
+ root: NodeIndex, // points to the root, which may be the current node
+}
+
+/// The state of one node in some tree within the forest. This
+/// represents the current state of processing for the obligation (of
+/// type `O`) associated with this node.
+#[derive(Debug)]
+enum NodeState<O> {
+ /// Obligation not yet resolved to success or error.
+ Pending { obligation: O },
+
+ /// Obligation resolved to success; `num_incomplete_children`
+ /// indicates the number of children still in an "incomplete"
+ /// state. Incomplete means that either the child is still
+ /// pending, or it has children which are incomplete. (Basically,
+ /// there is pending work somewhere in the subtree of the child.)
+ ///
+ /// Once all children have completed, success nodes are removed
+ /// from the vector by the compression step.
+ Success { obligation: O, num_incomplete_children: usize },
+
+ /// This obligation was resolved to an error. Error nodes are
+ /// removed from the vector by the compression step.
+ Error,
+}
+
+#[derive(Debug)]
+pub struct Outcome<O,E> {
+ /// Obligations that were completely evaluated, including all
+ /// (transitive) subobligations.
+ pub completed: Vec<O>,
+
+ /// Backtrace of obligations that were found to be in error.
+ pub errors: Vec<Error<O,E>>,
+
+ /// If true, then we saw no successful obligations, which means
+ /// there is no point in further iteration. This is based on the
+ /// assumption that when trait matching returns `Err` or
+ /// `Ok(None)`, those results do not affect environmental
+ /// inference state. (Note that if we invoke `process_obligations`
+ /// with no pending obligations, stalled will be true.)
+ pub stalled: bool,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct Error<O,E> {
+ pub error: E,
+ pub backtrace: Vec<O>,
+}
+
+impl<O: Debug> ObligationForest<O> {
+ pub fn new() -> ObligationForest<O> {
+ ObligationForest {
+ nodes: vec![],
+ snapshots: vec![]
+ }
+ }
+
+ /// Return the total number of nodes in the forest that have not
+ /// yet been fully resolved.
+ pub fn len(&self) -> usize {
+ self.nodes.len()
+ }
+
+ pub fn start_snapshot(&mut self) -> Snapshot {
+ self.snapshots.push(self.nodes.len());
+ Snapshot { len: self.snapshots.len() }
+ }
+
+ pub fn commit_snapshot(&mut self, snapshot: Snapshot) {
+ assert_eq!(snapshot.len, self.snapshots.len());
+ let nodes_len = self.snapshots.pop().unwrap();
+ assert!(self.nodes.len() >= nodes_len);
+ }
+
+ pub fn rollback_snapshot(&mut self, snapshot: Snapshot) {
+ // Check that we are obeying stack discipline.
+ assert_eq!(snapshot.len, self.snapshots.len());
+ let nodes_len = self.snapshots.pop().unwrap();
+
+ // The only action permitted while in a snapshot is to push
+ // new root obligations. Because no processing will have been
+ // done, those roots should still be in the pending state.
+ debug_assert!(self.nodes[nodes_len..].iter().all(|n| match n.state {
+ NodeState::Pending { .. } => true,
+ _ => false,
+ }));
+
+ self.nodes.truncate(nodes_len);
+ }
+
+ pub fn in_snapshot(&self) -> bool {
+ !self.snapshots.is_empty()
+ }
+
+ /// Adds a new tree to the forest.
+ ///
+ /// This CAN be done during a snapshot.
+ pub fn push_root(&mut self, obligation: O) {
+ let index = NodeIndex::new(self.nodes.len());
+ self.nodes.push(Node::new(index, None, obligation));
+ }
+
+ /// Convert all remaining obligations to the given error.
+ ///
+ /// This cannot be done during a snapshot.
+ pub fn to_errors<E:Clone>(&mut self, error: E) -> Vec<Error<O,E>> {
+ assert!(!self.in_snapshot());
+ let mut errors = vec![];
+ for index in 0..self.nodes.len() {
+ debug_assert!(!self.nodes[index].is_popped());
+ self.inherit_error(index);
+ if let NodeState::Pending { .. } = self.nodes[index].state {
+ let backtrace = self.backtrace(index);
+ errors.push(Error { error: error.clone(), backtrace: backtrace });
+ }
+ }
+ let successful_obligations = self.compress();
+ assert!(successful_obligations.is_empty());
+ errors
+ }
+
+ /// Returns the set of obligations that are in a pending state.
+ pub fn pending_obligations(&self) -> Vec<O> where O: Clone {
+ self.nodes.iter()
+ .filter_map(|n| match n.state {
+ NodeState::Pending { ref obligation } => Some(obligation),
+ _ => None,
+ })
+ .cloned()
+ .collect()
+ }
+
+ /// Process the obligations.
+ ///
+ /// This CANNOT be unrolled (presently, at least).
+ pub fn process_obligations<E,F>(&mut self, mut action: F) -> Outcome<O,E>
+ where E: Debug, F: FnMut(&mut O, Backtrace<O>) -> Result<Option<Vec<O>>, E>
+ {
+ debug!("process_obligations(len={})", self.nodes.len());
+ assert!(!self.in_snapshot()); // cannot unroll this action
+
+ let mut errors = vec![];
+ let mut stalled = true;
+
+ // We maintain the invariant that the list is in pre-order, so
+ // parents occur before their children. Also, whenever an
+ // error occurs, we propagate it from the child all the way to
+ // the root of the tree. Together, these two facts mean that
+ // when we visit a node, we can check if its root is in error,
+ // and we will find out if any prior node within this forest
+ // encountered an error.
+
+ for index in 0..self.nodes.len() {
+ debug_assert!(!self.nodes[index].is_popped());
+ self.inherit_error(index);
+
+ debug!("process_obligations: node {} == {:?}",
+ index, self.nodes[index].state);
+
+ let result = {
+ let parent = self.nodes[index].parent;
+ let (prefix, suffix) = self.nodes.split_at_mut(index);
+ let backtrace = Backtrace::new(prefix, parent);
+ match suffix[0].state {
+ NodeState::Error |
+ NodeState::Success { .. } =>
+ continue,
+ NodeState::Pending { ref mut obligation } =>
+ action(obligation, backtrace),
+ }
+ };
+
+ debug!("process_obligations: node {} got result {:?}", index, result);
+
+ match result {
+ Ok(None) => {
+ // no change in state
+ }
+ Ok(Some(children)) => {
+ // if we saw a Some(_) result, we are not (yet) stalled
+ stalled = false;
+ self.success(index, children);
+ }
+ Err(err) => {
+ let backtrace = self.backtrace(index);
+ errors.push(Error { error: err, backtrace: backtrace });
+ }
+ }
+ }
+
+ // Now we have to compress the result
+ let successful_obligations = self.compress();
+
+ debug!("process_obligations: complete");
+
+ Outcome {
+ completed: successful_obligations,
+ errors: errors,
+ stalled: stalled,
+ }
+ }
+
+ /// Indicates that node `index` has been processed successfully,
+ /// yielding `children` as the derivative work. If children is an
+ /// empty vector, this will update the ref count on the parent of
+ /// `index` to indicate that a child has completed
+ /// successfully. Otherwise, adds new nodes to represent the child
+ /// work.
+ fn success(&mut self, index: usize, children: Vec<O>) {
+ debug!("success(index={}, children={:?})", index, children);
+
+ let num_incomplete_children = children.len();
+
+ if num_incomplete_children == 0 {
+ // if there is no work left to be done, decrement parent's ref count
+ self.update_parent(index);
+ } else {
+ // create child work
+ let root_index = self.nodes[index].root;
+ let node_index = NodeIndex::new(index);
+ self.nodes.extend(
+ children.into_iter()
+ .map(|o| Node::new(root_index, Some(node_index), o)));
+ }
+
+ // change state from `Pending` to `Success`, temporarily swapping in `Error`
+ let state = mem::replace(&mut self.nodes[index].state, NodeState::Error);
+ self.nodes[index].state = match state {
+ NodeState::Pending { obligation } =>
+ NodeState::Success { obligation: obligation,
+ num_incomplete_children: num_incomplete_children },
+ NodeState::Success { .. } |
+ NodeState::Error =>
+ unreachable!()
+ };
+ }
+
+ /// Decrements the ref count on the parent of `child`; if the
+ /// parent's ref count then reaches zero, proceeds recursively.
+ fn update_parent(&mut self, child: usize) {
+ debug!("update_parent(child={})", child);
+ if let Some(parent) = self.nodes[child].parent {
+ let parent = parent.get();
+ match self.nodes[parent].state {
+ NodeState::Success { ref mut num_incomplete_children, .. } => {
+ *num_incomplete_children -= 1;
+ if *num_incomplete_children > 0 {
+ return;
+ }
+ }
+ _ => unreachable!(),
+ }
+ self.update_parent(parent);
+ }
+ }
+
+ /// If the root of `child` is in an error state, places `child`
+ /// into an error state. This is used during processing so that we
+ /// skip the remaining obligations from a tree once some other
+ /// node in the tree is found to be in error.
+ fn inherit_error(&mut self, child: usize) {
+ let root = self.nodes[child].root.get();
+ if let NodeState::Error = self.nodes[root].state {
+ self.nodes[child].state = NodeState::Error;
+ }
+ }
+
+ /// Returns a vector of obligations for `p` and all of its
+ /// ancestors, putting them into the error state in the process.
+ /// The fact that the root is now marked as an error is used by
+ /// `inherit_error` above to propagate the error state to the
+ /// remainder of the tree.
+ fn backtrace(&mut self, mut p: usize) -> Vec<O> {
+ let mut trace = vec![];
+ loop {
+ let state = mem::replace(&mut self.nodes[p].state, NodeState::Error);
+ match state {
+ NodeState::Pending { obligation } |
+ NodeState::Success { obligation, .. } => {
+ trace.push(obligation);
+ }
+ NodeState::Error => {
+ // we should not encounter an error, because if
+ // there was an error in the ancestors, it should
+ // have been propagated down and we should never
+ // have tried to process this obligation
+ panic!("encountered error in node {:?} when collecting stack trace", p);
+ }
+ }
+
+ // loop to the parent
+ match self.nodes[p].parent {
+ Some(q) => { p = q.get(); }
+ None => { return trace; }
+ }
+ }
+ }
+
+ /// Compresses the vector, removing all popped nodes. This adjusts
+ /// the indices and hence invalidates any outstanding
+ /// indices. Cannot be used during a transaction.
+ fn compress(&mut self) -> Vec<O> {
+ assert!(!self.in_snapshot()); // didn't write code to unroll this action
+ let mut rewrites: Vec<_> = (0..self.nodes.len()).collect();
+
+ // Finish propagating error state. Note that in this case we
+ // only have to check immediate parents, rather than all
+ // ancestors, because all errors have already occurred that
+ // are going to occur.
+ let nodes_len = self.nodes.len();
+ for i in 0..nodes_len {
+ if !self.nodes[i].is_popped() {
+ self.inherit_error(i);
+ }
+ }
+
+ // Now go through and move all nodes that are either
+ // successful or which have an error over into to the end of
+ // the list, preserving the relative order of the survivors
+ // (which is important for the `inherit_error` logic).
+ let mut dead = 0;
+ for i in 0..nodes_len {
+ if self.nodes[i].is_popped() {
+ dead += 1;
+ } else if dead > 0 {
+ self.nodes.swap(i, i - dead);
+ rewrites[i] -= dead;
+ }
+ }
+
+ // Pop off all the nodes we killed and extract the success
+ // stories.
+ let successful =
+ (0 .. dead).map(|_| self.nodes.pop().unwrap())
+ .flat_map(|node| match node.state {
+ NodeState::Error => None,
+ NodeState::Pending { .. } => unreachable!(),
+ NodeState::Success { obligation, num_incomplete_children } => {
+ assert_eq!(num_incomplete_children, 0);
+ Some(obligation)
+ }
+ })
+ .collect();
+
+ // Adjust the parent indices, since we compressed things.
+ for node in &mut self.nodes {
+ if let Some(ref mut index) = node.parent {
+ let new_index = rewrites[index.get()];
+ debug_assert!(new_index < (nodes_len - dead));
+ *index = NodeIndex::new(new_index);
+ }
+
+ node.root = NodeIndex::new(rewrites[node.root.get()]);
+ }
+
+ successful
+ }
+}
+
+impl<O> Node<O> {
+ fn new(root: NodeIndex, parent: Option<NodeIndex>, obligation: O) -> Node<O> {
+ Node {
+ parent: parent,
+ state: NodeState::Pending { obligation: obligation },
+ root: root
+ }
+ }
+
+ fn is_popped(&self) -> bool {
+ match self.state {
+ NodeState::Pending { .. } => false,
+ NodeState::Success { num_incomplete_children, .. } => num_incomplete_children == 0,
+ NodeState::Error => true,
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct Backtrace<'b, O: 'b> {
+ nodes: &'b [Node<O>],
+ pointer: Option<NodeIndex>,
+}
+
+impl<'b, O> Backtrace<'b, O> {
+ fn new(nodes: &'b [Node<O>], pointer: Option<NodeIndex>) -> Backtrace<'b, O> {
+ Backtrace { nodes: nodes, pointer: pointer }
+ }
+}
+
+impl<'b, O> Iterator for Backtrace<'b, O> {
+ type Item = &'b O;
+
+ fn next(&mut self) -> Option<&'b O> {
+ debug!("Backtrace: self.pointer = {:?}", self.pointer);
+ if let Some(p) = self.pointer {
+ self.pointer = self.nodes[p.get()].parent;
+ match self.nodes[p.get()].state {
+ NodeState::Pending { ref obligation } |
+ NodeState::Success { ref obligation, .. } => {
+ Some(obligation)
+ }
+ NodeState::Error => {
+ panic!("Backtrace encountered an error.");
+ }
+ }
+ } else {
+ None
+ }
+ }
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use core::nonzero::NonZero;
+use std::u32;
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub struct NodeIndex {
+ index: NonZero<u32>
+}
+
+impl NodeIndex {
+ pub fn new(value: usize) -> NodeIndex {
+ assert!(value < (u32::MAX as usize));
+ unsafe {
+ NodeIndex { index: NonZero::new((value as u32) + 1) }
+ }
+ }
+
+ pub fn get(self) -> usize {
+ (*self.index - 1) as usize
+ }
+}
+
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use super::{ObligationForest, Outcome, Error};
+
+#[test]
+fn push_pop() {
+ let mut forest = ObligationForest::new();
+ forest.push_root("A");
+ forest.push_root("B");
+ forest.push_root("C");
+
+ // first round, B errors out, A has subtasks, and C completes, creating this:
+ // A |-> A.1
+ // |-> A.2
+ // |-> A.3
+ let Outcome { completed: ok, errors: err, .. } = forest.process_obligations(|obligation, _| {
+ match *obligation {
+ "A" => Ok(Some(vec!["A.1", "A.2", "A.3"])),
+ "B" => Err("B is for broken"),
+ "C" => Ok(Some(vec![])),
+ _ => unreachable!(),
+ }
+ });
+ assert_eq!(ok, vec!["C"]);
+ assert_eq!(err, vec![Error {error: "B is for broken",
+ backtrace: vec!["B"]}]);
+
+ // second round: two delays, one success, creating an uneven set of subtasks:
+ // A |-> A.1
+ // |-> A.2
+ // |-> A.3 |-> A.3.i
+ // D |-> D.1
+ // |-> D.2
+ forest.push_root("D");
+ let Outcome { completed: ok, errors: err, .. }: Outcome<&'static str, ()> =
+ forest.process_obligations(|obligation, _| {
+ match *obligation {
+ "A.1" => Ok(None),
+ "A.2" => Ok(None),
+ "A.3" => Ok(Some(vec!["A.3.i"])),
+ "D" => Ok(Some(vec!["D.1", "D.2"])),
+ _ => unreachable!(),
+ }
+ });
+ assert_eq!(ok, Vec::<&'static str>::new());
+ assert_eq!(err, Vec::new());
+
+
+ // third round: ok in A.1 but trigger an error in A.2. Check that it
+ // propagates to A.3.i, but not D.1 or D.2.
+ // D |-> D.1 |-> D.1.i
+ // |-> D.2 |-> D.2.i
+ let Outcome { completed: ok, errors: err, .. } = forest.process_obligations(|obligation, _| {
+ match *obligation {
+ "A.1" => Ok(Some(vec![])),
+ "A.2" => Err("A is for apple"),
+ "D.1" => Ok(Some(vec!["D.1.i"])),
+ "D.2" => Ok(Some(vec!["D.2.i"])),
+ _ => unreachable!(),
+ }
+ });
+ assert_eq!(ok, vec!["A.1"]);
+ assert_eq!(err, vec![Error { error: "A is for apple",
+ backtrace: vec!["A.2", "A"] }]);
+
+ // fourth round: error in D.1.i that should propagate to D.2.i
+ let Outcome { completed: ok, errors: err, .. } = forest.process_obligations(|obligation, _| {
+ match *obligation {
+ "D.1.i" => Err("D is for dumb"),
+ _ => panic!("unexpected obligation {:?}", obligation),
+ }
+ });
+ assert_eq!(ok, Vec::<&'static str>::new());
+ assert_eq!(err, vec![Error { error: "D is for dumb",
+ backtrace: vec!["D.1.i", "D.1", "D"] }]);
+}
+
+// Test that if a tree with grandchildren succeeds, everything is
+// reported as expected:
+// A
+// A.1
+// A.2
+// A.2.i
+// A.2.ii
+// A.3
+#[test]
+fn success_in_grandchildren() {
+ let mut forest = ObligationForest::new();
+ forest.push_root("A");
+
+ let Outcome { completed: ok, errors: err, .. } =
+ forest.process_obligations::<(),_>(|obligation, _| {
+ match *obligation {
+ "A" => Ok(Some(vec!["A.1", "A.2", "A.3"])),
+ _ => unreachable!(),
+ }
+ });
+ assert!(ok.is_empty());
+ assert!(err.is_empty());
+
+ let Outcome { completed: ok, errors: err, .. } =
+ forest.process_obligations::<(),_>(|obligation, _| {
+ match *obligation {
+ "A.1" => Ok(Some(vec![])),
+ "A.2" => Ok(Some(vec!["A.2.i", "A.2.ii"])),
+ "A.3" => Ok(Some(vec![])),
+ _ => unreachable!(),
+ }
+ });
+ assert_eq!(ok, vec!["A.3", "A.1"]);
+ assert!(err.is_empty());
+
+ let Outcome { completed: ok, errors: err, .. } =
+ forest.process_obligations::<(),_>(|obligation, _| {
+ match *obligation {
+ "A.2.i" => Ok(Some(vec!["A.2.i.a"])),
+ "A.2.ii" => Ok(Some(vec![])),
+ _ => unreachable!(),
+ }
+ });
+ assert_eq!(ok, vec!["A.2.ii"]);
+ assert!(err.is_empty());
+
+ let Outcome { completed: ok, errors: err, .. } =
+ forest.process_obligations::<(),_>(|obligation, _| {
+ match *obligation {
+ "A.2.i.a" => Ok(Some(vec![])),
+ _ => unreachable!(),
+ }
+ });
+ assert_eq!(ok, vec!["A.2.i.a", "A.2.i", "A.2", "A"]);
+ assert!(err.is_empty());
+
+ let Outcome { completed: ok, errors: err, .. } =
+ forest.process_obligations::<(),_>(|_, _| unreachable!());
+ assert!(ok.is_empty());
+ assert!(err.is_empty());
+}
+
+#[test]
+fn to_errors_no_throw() {
+ // check that converting multiple children with common parent (A)
+ // only yields one of them (and does not panic, in particular).
+ let mut forest = ObligationForest::new();
+ forest.push_root("A");
+ let Outcome { completed: ok, errors: err, .. } =
+ forest.process_obligations::<(),_>(|obligation, _| {
+ match *obligation {
+ "A" => Ok(Some(vec!["A.1", "A.2", "A.3"])),
+ _ => unreachable!(),
+ }
+ });
+ assert_eq!(ok.len(), 0);
+ assert_eq!(err.len(), 0);
+ let errors = forest.to_errors(());
+ assert_eq!(errors.len(), 1);
+}
+
+#[test]
+fn backtrace() {
+ // check that converting multiple children with common parent (A)
+ // only yields one of them (and does not panic, in particular).
+ let mut forest: ObligationForest<&'static str> = ObligationForest::new();
+ forest.push_root("A");
+ let Outcome { completed: ok, errors: err, .. } =
+ forest.process_obligations::<(),_>(|obligation, mut backtrace| {
+ assert!(backtrace.next().is_none());
+ match *obligation {
+ "A" => Ok(Some(vec!["A.1"])),
+ _ => unreachable!(),
+ }
+ });
+ assert!(ok.is_empty());
+ assert!(err.is_empty());
+ let Outcome { completed: ok, errors: err, .. } =
+ forest.process_obligations::<(),_>(|obligation, mut backtrace| {
+ assert!(backtrace.next().unwrap() == &"A");
+ assert!(backtrace.next().is_none());
+ match *obligation {
+ "A.1" => Ok(Some(vec!["A.1.i"])),
+ _ => unreachable!(),
+ }
+ });
+ assert!(ok.is_empty());
+ assert!(err.is_empty());
+ let Outcome { completed: ok, errors: err, .. } =
+ forest.process_obligations::<(),_>(|obligation, mut backtrace| {
+ assert!(backtrace.next().unwrap() == &"A.1");
+ assert!(backtrace.next().unwrap() == &"A");
+ assert!(backtrace.next().is_none());
+ match *obligation {
+ "A.1.i" => Ok(None),
+ _ => unreachable!(),
+ }
+ });
+ assert_eq!(ok.len(), 0);
+ assert!(err.is_empty());
+}
lang_items,
stability::Index::new(krate),
|tcx| {
- let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, false);
+ let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None);
body(Env { infcx: &infcx });
let free_regions = FreeRegionMap::new();
infcx.resolve_regions_and_report_errors(&free_regions,
let node_id = tcx.map.as_local_node_id(method.def_id).unwrap();
let param_env = ty::ParameterEnvironment::for_item(tcx, node_id);
- let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, Some(param_env), false);
+ let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, Some(param_env));
let mut selcx = traits::SelectionContext::new(&infcx);
match selcx.select(&obligation) {
// The method comes from a `T: Trait` bound.
X86StdcallCallConv = 64,
X86FastcallCallConv = 65,
X86_64_Win64 = 79,
+ X86_VectorCall = 80
}
#[derive(Copy, Clone)]
let param_env = ty::ParameterEnvironment::for_item(self.tcx, id);
- let infcx = infer::new_infer_ctxt(self.tcx, &self.tcx.tables, Some(param_env), true);
+ let infcx = infer::new_infer_ctxt(self.tcx, &self.tcx.tables, Some(param_env));
match build_mir(Cx::new(&infcx), implicit_arg_tys, id, span, decl, body) {
Ok(mut mir) => {
use std::iter::once;
use libc::c_uint;
use syntax::abi::{Cdecl, Aapcs, C, Win64, Abi};
-use syntax::abi::{PlatformIntrinsic, RustIntrinsic, Rust, RustCall, Stdcall, Fastcall, System};
+use syntax::abi::{PlatformIntrinsic, RustIntrinsic, Rust, RustCall, Stdcall};
+use syntax::abi::{Fastcall, Vectorcall, System};
use syntax::attr;
use syntax::codemap::Span;
use syntax::parse::token::{InternedString, special_idents};
Stdcall => llvm::X86StdcallCallConv,
Fastcall => llvm::X86FastcallCallConv,
+ Vectorcall => llvm::X86_VectorCall,
C => llvm::CCallConv,
Win64 => llvm::X86_64_Win64,
fulfillment_cx
.pending_obligations()
.iter()
+ .map(|obligation| &obligation.obligation)
.filter_map(|obligation| {
debug!("deduce_expectations_from_obligations: obligation.predicate={:?}",
obligation.predicate);
fulfillment_cx
.pending_obligations()
.iter()
+ .map(|obligation| &obligation.obligation)
.filter_map(|obligation| {
let opt_trait_ref = match obligation.predicate {
ty::Predicate::Projection(ref data) => Some(data.to_poly_trait_ref()),
debug!("compare_impl_method: impl_trait_ref (liberated) = {:?}",
impl_trait_ref);
- let mut infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, true);
+ let mut infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None);
let mut fulfillment_cx = infcx.fulfillment_cx.borrow_mut();
let trait_to_impl_substs = &impl_trait_ref.substs;
debug!("compare_const_impl(impl_trait_ref={:?})",
impl_trait_ref);
- let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, true);
+ let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None);
let mut fulfillment_cx = infcx.fulfillment_cx.borrow_mut();
// The below is for the most part highly similar to the procedure
// check that the impl type can be made to match the trait type.
let impl_param_env = ty::ParameterEnvironment::for_item(tcx, self_type_node_id);
- let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, Some(impl_param_env), true);
+ let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, Some(impl_param_env));
let named_type = tcx.lookup_item_type(self_type_did).ty;
let named_type = named_type.subst(tcx, &infcx.parameter_environment.free_substs);
// FIXME(#18653) -- Try to resolve obligations, giving us more
// typing information, which can sometimes be needed to avoid
// pathological region inference failures.
- fcx.select_new_obligations();
+ fcx.select_obligations_where_possible();
// Insert any adjustments needed (always an autoref of some mutability).
match self_expr {
-> Inherited<'a, 'tcx> {
Inherited {
- infcx: infer::new_infer_ctxt(tcx, tables, Some(param_env), true),
+ infcx: infer::new_infer_ctxt(tcx, tables, Some(param_env)),
locals: RefCell::new(NodeMap()),
tables: tables,
deferred_call_resolutions: RefCell::new(DefIdMap()),
return ty;
}
- // If not, try resolving any new fcx obligations that have cropped up.
- self.select_new_obligations();
- ty = self.infcx().resolve_type_vars_if_possible(&ty);
- if !ty.has_infer_types() {
- debug!("resolve_type_vars_if_possible: ty={:?}", ty);
- return ty;
- }
-
- // If not, try resolving *all* pending obligations as much as
+ // If not, try resolving pending obligations as much as
// possible. This can help substantially when there are
// indirect dependencies that don't seem worth tracking
// precisely.
Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
}
}
-
- /// Try to select any fcx obligation that we haven't tried yet, in an effort
- /// to improve inference. You could just call
- /// `select_obligations_where_possible` except that it leads to repeated
- /// work.
- fn select_new_obligations(&self) {
- match
- self.inh.infcx.fulfillment_cx
- .borrow_mut()
- .select_new_obligations(self.infcx())
- {
- Ok(()) => { }
- Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); }
- }
- }
-
}
impl<'a, 'tcx> RegionScope for FnCtxt<'a, 'tcx> {
// an "opportunistic" vtable resolution of any trait bounds on
// the call. This helps coercions.
if check_blocks {
- fcx.select_new_obligations();
+ fcx.select_obligations_where_possible();
}
// For variadic functions, we don't have a declared type for all of
pub fn check_representable(tcx: &ty::ctxt,
sp: Span,
item_id: ast::NodeId,
- designation: &str) -> bool {
+ _designation: &str) -> bool {
let rty = tcx.node_id_to_type(item_id);
// Check that it is possible to represent this type. This call identifies
// caught by case 1.
match rty.is_representable(tcx, sp) {
Representability::SelfRecursive => {
- struct_span_err!(tcx.sess, sp, E0072, "invalid recursive {} type", designation)
- .fileline_help(sp, "wrap the inner value in a box to make it representable")
- .emit();
+ let item_def_id = tcx.map.local_def_id(item_id);
+ traits::recursive_type_with_infinite_size_error(tcx, item_def_id).emit();
return false
}
Representability::Representable | Representability::ContainsRecursive => (),
debug!("check_implementations_of_coerce_unsized: {:?} -> {:?} (free)",
source, target);
- let infcx = new_infer_ctxt(tcx, &tcx.tables, Some(param_env), true);
+ let infcx = new_infer_ctxt(tcx, &tcx.tables, Some(param_env));
let check_mutbl = |mt_a: ty::TypeAndMut<'tcx>, mt_b: ty::TypeAndMut<'tcx>,
mk_ptr: &Fn(Ty<'tcx>) -> Ty<'tcx>| {
pub fn check_coherence(crate_context: &CrateCtxt) {
let _task = crate_context.tcx.dep_graph.in_task(DepNode::Coherence);
- let infcx = new_infer_ctxt(crate_context.tcx, &crate_context.tcx.tables, None, true);
+ let infcx = new_infer_ctxt(crate_context.tcx, &crate_context.tcx.tables, None);
CoherenceChecker {
crate_context: crate_context,
inference_context: infcx,
impl1_def_id,
impl2_def_id);
- let infcx = infer::new_infer_ctxt(self.tcx, &self.tcx.tables, None, false);
+ let infcx = infer::new_infer_ctxt(self.tcx, &self.tcx.tables, None);
if let Some(trait_ref) = traits::overlapping_impls(&infcx, impl1_def_id, impl2_def_id) {
self.report_overlap_error(impl1_def_id, impl2_def_id, trait_ref);
}
```
"##,
-E0072: r##"
-When defining a recursive struct or enum, any use of the type being defined
-from inside the definition must occur behind a pointer (like `Box` or `&`).
-This is because structs and enums must have a well-defined size, and without
-the pointer the size of the type would need to be unbounded.
-
-Consider the following erroneous definition of a type for a list of bytes:
-
-```
-// error, invalid recursive struct type
-struct ListNode {
- head: u8,
- tail: Option<ListNode>,
-}
-```
-
-This type cannot have a well-defined size, because it needs to be arbitrarily
-large (since we would be able to nest `ListNode`s to any depth). Specifically,
-
-```plain
-size of `ListNode` = 1 byte for `head`
- + 1 byte for the discriminant of the `Option`
- + size of `ListNode`
-```
-
-One way to fix this is by wrapping `ListNode` in a `Box`, like so:
-
-```
-struct ListNode {
- head: u8,
- tail: Option<Box<ListNode>>,
-}
-```
-
-This works because `Box` is a pointer, so its size is well-known.
-"##,
-
E0073: r##"
You cannot define a struct (or enum) `Foo` that requires an instance of `Foo`
in order to make a new `Foo` value. This is because there would be no way a
{
let result = match maybe_infcx {
None => {
- let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, false);
+ let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None);
infer::mk_eqty(&infcx, t1_is_expected, TypeOrigin::Misc(span), t1, t2)
}
Some(infcx) => {
Cdecl,
Stdcall,
Fastcall,
+ Vectorcall,
Aapcs,
Win64,
AbiData {abi: Cdecl, name: "cdecl" },
AbiData {abi: Stdcall, name: "stdcall" },
AbiData {abi: Fastcall, name: "fastcall" },
+ AbiData {abi: Vectorcall, name: "vectorcall"},
AbiData {abi: Aapcs, name: "aapcs" },
AbiData {abi: Win64, name: "win64" },
// Allows cfg(target_thread_local)
("cfg_target_thread_local", "1.7.0", Some(29594), Active),
+
+ // rustc internal
+ ("abi_vectorcall", "1.7.0", None, Active)
];
// (changing above list without updating src/doc/reference.md makes @cmr sad)
Abi::PlatformIntrinsic => {
Some(("platform_intrinsics",
"platform intrinsics are experimental and possibly buggy"))
+ },
+ Abi::Vectorcall => {
+ Some(("abi_vectorcall",
+ "vectorcall is experimental and subject to change"
+ ))
}
_ => None
};
"intrinsics are subject to change")
}
FnKind::ItemFn(_, _, _, _, abi, _) |
- FnKind::Method(_, &ast::MethodSig { abi, .. }, _) if abi == Abi::RustCall => {
- self.gate_feature("unboxed_closures",
- span,
- "rust-call ABI is subject to change")
- }
+ FnKind::Method(_, &ast::MethodSig { abi, .. }, _) => match abi {
+ Abi::RustCall => {
+ self.gate_feature("unboxed_closures", span,
+ "rust-call ABI is subject to change");
+ },
+ Abi::Vectorcall => {
+ self.gate_feature("abi_vectorcall", span,
+ "vectorcall is experimental and subject to change");
+ },
+ _ => {}
+ },
_ => {}
}
visit::walk_fn(self, fn_kind, fn_decl, block, span);
pub fn main() {
let x: Vec<Trait + Sized> = Vec::new();
//~^ ERROR the trait `core::marker::Sized` is not implemented
- //~^^ ERROR the trait `core::marker::Sized` is not implemented
+ //~| ERROR the trait `core::marker::Sized` is not implemented
+ //~| ERROR the trait `core::marker::Sized` is not implemented
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+extern "vectorcall" { //~ ERROR vectorcall is experimental and subject to change
+ fn bar();
+}
+
+extern "vectorcall" fn baz() { //~ ERROR vectorcall is experimental and subject to change
+}
+
+fn main() {
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-
-// error-pattern: invalid recursive enum type
-
enum mlist { cons(isize, mlist), nil, }
+//~^ ERROR recursive type `mlist` has infinite size
fn main() { let a = mlist::cons(10, mlist::cons(11, mlist::nil)); }
// except according to those terms.
struct Foo { foo: Option<Option<Foo>> }
-//~^ ERROR invalid recursive struct type
+//~^ ERROR recursive type `Foo` has infinite size
impl Foo { fn bar(&self) {} }
// except according to those terms.
struct Baz { q: Option<Foo> }
-//~^ ERROR invalid recursive struct type
struct Foo { q: Option<Baz> }
-//~^ ERROR invalid recursive struct type
+//~^ ERROR recursive type `Foo` has infinite size
impl Foo { fn bar(&self) {} }
use std::sync::Mutex;
struct Foo { foo: Mutex<Option<Foo>> }
-//~^ ERROR invalid recursive struct type
+//~^ ERROR recursive type `Foo` has infinite size
impl Foo { fn bar(&self) {} }
use std::marker;
struct Foo<T> { foo: Option<Option<Foo<T>>>, marker: marker::PhantomData<T> }
-//~^ ERROR invalid recursive struct type
+//~^ ERROR recursive type `Foo` has infinite size
impl<T> Foo<T> { fn bar(&self) {} }
use std::marker;
struct Foo { foo: Bar<Foo> }
+
struct Bar<T> { x: Bar<Foo> , marker: marker::PhantomData<T> }
-//~^ ERROR invalid recursive struct type
+//~^ ERROR recursive type `Bar` has infinite size
impl Foo { fn foo(&self) {} }
use std::sync::Mutex;
enum Foo { X(Mutex<Option<Foo>>) }
-//~^ ERROR invalid recursive enum type
+//~^ ERROR recursive type `Foo` has infinite size
impl Foo { fn bar(self) {} }
// except according to those terms.
enum Foo { Voo(Option<Option<Foo>>) }
-//~^ ERROR invalid recursive enum type
+//~^ ERROR recursive type `Foo` has infinite size
impl Foo { fn bar(&self) {} }
// except according to those terms.
fn main() {
- for (ref i,) in [].iter() { //~ ERROR: type mismatch resolving
+ for (ref i,) in [].iter() { //~ ERROR mismatched types
i.clone();
//~^ ERROR: the type of this value must be known in this context
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// error-pattern: overflow representing the type `S`
-
-trait Mirror { type It; }
-impl<T> Mirror for T { type It = Self; }
+trait Mirror { type It: ?Sized; }
+impl<T: ?Sized> Mirror for T { type It = Self; }
struct S(Option<<S as Mirror>::It>);
+//~^ ERROR recursive type `S` has infinite size
fn main() {
let _s = S(None);
use send_packet;
pub type ping = send_packet<pong>;
pub struct pong(send_packet<ping>);
- //~^ ERROR invalid recursive struct type
+ //~^ ERROR recursive type `pingpong::pong` has infinite size
}
fn main() {}
enum foo { foo_(bar) }
enum bar { bar_none, bar_some(bar) }
-//~^ ERROR invalid recursive enum type
+//~^ ERROR recursive type `bar` has infinite size
fn main() {
}
enum foo { foo_(bar) }
struct bar { x: bar }
-//~^ ERROR invalid recursive struct type
+//~^ ERROR E0072
fn main() {
}
enum E1 { V1(E2<E1>), }
enum E2<T> { V2(E2<E1>, marker::PhantomData<T>), }
-//~^ ERROR invalid recursive enum type
+//~^ ERROR recursive type `E2` has infinite size
impl E1 { fn foo(&self) {} }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-struct S {
- //~^ ERROR invalid recursive struct type
+struct S { //~ ERROR E0072
element: Option<S>
}
static boxed: Box<RefCell<isize>> = box RefCell::new(0);
//~^ ERROR allocations are not allowed in statics
//~| ERROR the trait `core::marker::Sync` is not implemented for the type
-//~| ERROR the trait `core::marker::Sync` is not implemented for the type
fn main() { }
let t: S<T> = S(marker::PhantomData);
let a = &t as &Gettable<T>;
//~^ ERROR the trait `core::marker::Send` is not implemented
- //~^^ ERROR the trait `core::marker::Copy` is not implemented
}
fn g<T>(val: T) {
let t: S<T> = S(marker::PhantomData);
let a: &Gettable<T> = &t;
//~^ ERROR the trait `core::marker::Send` is not implemented
- //~^^ ERROR the trait `core::marker::Copy` is not implemented
}
fn foo<'a>() {
extern crate libc;
-trait Mirror { type It; }
-impl<T> Mirror for T { type It = Self; }
+trait Mirror { type It: ?Sized; }
+impl<T: ?Sized> Mirror for T { type It = Self; }
#[repr(C)]
pub struct StructWithProjection(*mut <StructWithProjection as Mirror>::It);
#[repr(C)]
let x = RefCell::new(0);
f(x);
//~^ ERROR `core::marker::Sync` is not implemented
- //~^^ ERROR `core::marker::Sync` is not implemented
}
fn assert<T: RecoverSafe + ?Sized>() {}
fn main() {
- assert::<Rc<RefCell<i32>>>(); //~ ERROR: is not implemented
- //~^ ERROR: is not implemented
+ assert::<Rc<RefCell<i32>>>(); //~ ERROR E0277
}
fn main() {
assert::<Arc<RefCell<i32>>>(); //~ ERROR: is not implemented
- //~^ ERROR: is not implemented
}
fn assert<T: RecoverSafe + ?Sized>() {}
fn main() {
- assert::<&RefCell<i32>>(); //~ ERROR: is not implemented
- //~^ ERROR is not implemented
+ assert::<&RefCell<i32>>(); //~ ERROR E0277
}
fn assert<T: RecoverSafe + ?Sized>() {}
fn main() {
- assert::<*mut RefCell<i32>>(); //~ ERROR: is not implemented
- //~^ ERROR is not implemented
+ assert::<*mut RefCell<i32>>(); //~ ERROR E0277
}
}
fn make_bar_explicit<T:Bar>(t: &T) -> &Bar {
+ //~^ ERROR E0038
t as &Bar
}
// Bool => does not implement iterator.
for i in false..true {}
- //~^ ERROR the trait
- //~^^ ERROR the trait
- //~^^^ ERROR the trait
+ //~^ ERROR E0277
// Unsized type.
let arr: &[_] = &[1, 2, 3];
let range = *arr..;
//~^ ERROR the trait `core::marker::Sized` is not implemented
+ //~| ERROR the trait `core::marker::Sized` is not implemented
}
self.head * other.head + self.tail.dot(other.tail)
}
}
-fn test<T:Dot> (n:isize, i:isize, first:T, second:T) ->isize {
- match n { 0 => {first.dot(second)} //~ ERROR overflow
+fn test<T:Dot> (n:isize, i:isize, first:T, second:T) ->isize { //~ ERROR recursion limit
+ match n { 0 => {first.dot(second)}
// FIXME(#4287) Error message should be here. It should be
// a type error to instantiate `test` at a type other than T.
_ => {test (n-1, i+1, Cons {head:2*i+1, tail:first}, Cons{head:i*i, tail:second})}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// error-pattern: invalid recursive enum type
-
enum list<T> { cons(T, list<T>), nil }
+//~^ ERROR recursive type `list` has infinite size
fn main() {}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Test the error message resulting from a cycle in solving `Foo:
+// Sized`. The specifics of the message will of course but the main
+// thing we want to preserve is that:
+//
+// 1. the message should appear attached to one of the structs
+// defined in this file;
+// 2. it should elaborate the steps that led to the cycle.
+
+struct Baz { q: Option<Foo> }
+
+struct Foo { q: Option<Baz> }
+//~^ ERROR recursive type `Foo` has infinite size
+//~| type `Foo` is embedded within `core::option::Option<Foo>`...
+//~| ...which in turn is embedded within `core::option::Option<Foo>`...
+//~| ...which in turn is embedded within `Baz`...
+//~| ...which in turn is embedded within `core::option::Option<Baz>`...
+//~| ...which in turn is embedded within `Foo`, completing the cycle.
+
+impl Foo { fn bar(&self) {} }
+
+fn main() {}
x: 3
};
- let baz: Foo<usize> = panic!();
+ let baz: Foo<usize> = loop { };
//~^ ERROR not implemented
}
(box 10 as Box<bar>).dup();
//~^ ERROR E0038
//~| ERROR E0038
+ //~| ERROR E0038
+ //~| ERROR E0038
//~| ERROR E0277
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// OIBIT-based version of #29859, supertrait version. Test that using
+// a simple OIBIT `..` impl alone still doesn't allow arbitary bounds
+// to be synthesized.
+
+#![feature(optin_builtin_traits)]
+
+trait Magic: Copy {}
+impl Magic for .. {}
+
+fn copy<T: Magic>(x: T) -> (T, T) { (x, x) }
+
+#[derive(Debug)]
+struct NoClone;
+
+fn main() {
+ let (a, b) = copy(NoClone); //~ ERROR E0277
+ println!("{:?} {:?}", a, b);
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #29859, supertrait version. This example
+// allowed arbitrary trait bounds to be synthesized.
+
+trait Magic: Copy {}
+impl<T: Magic> Magic for T {}
+
+fn copy<T: Magic>(x: T) -> (T, T) { (x, x) }
+
+#[derive(Debug)]
+struct NoClone;
+
+fn main() {
+ let (a, b) = copy(NoClone); //~ ERROR E0275
+ println!("{:?} {:?}", a, b);
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #29859, initial version. This example allowed
+// arbitrary trait bounds to be synthesized.
+
+// Trait that you want all types to implement.
+use std::marker::{Sync as Trait};
+
+pub trait Magic {
+ type X: Trait;
+}
+impl<T: Magic> Magic for T {
+ type X = Self;
+}
+
+fn check<T: Trait>() {}
+
+fn wizard<T: Magic>() { check::<<T as Magic>::X>(); }
+
+fn main() {
+ wizard::<*mut ()>(); //~ ERROR E0275
+ // check::<*mut ()>();
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// error-pattern:invalid recursive struct type
-struct t1 {
+struct t1 { //~ ERROR E0072
foo: isize,
foolish: t1
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(abi_vectorcall)]
+
+trait A {
+ extern "vectorcall" fn test1(i: i32);
+}
+
+struct S;
+
+impl A for S {
+ extern "vectorcall" fn test1(i: i32) {
+ assert_eq!(i, 1);
+ }
+}
+
+extern "vectorcall" fn test2(i: i32) {
+ assert_eq!(i, 2);
+}
+
+fn main() {
+ <S as A>::test1(1);
+ test2(2);
+}