btree: avoid forcing the allocator to be a reference

This commit is contained in:
Ralf Jung 2022-06-16 22:07:10 -07:00
parent 6ec3993ef4
commit 901cd3a844
10 changed files with 264 additions and 241 deletions

View File

@ -15,12 +15,12 @@ impl<K, V> Root<K, V> {
/// a `BTreeMap`, both iterators should produce keys in strictly ascending
/// order, each greater than all keys in the tree, including any keys
/// already in the tree upon entry.
pub fn append_from_sorted_iters<I, A: Allocator>(
pub fn append_from_sorted_iters<I, A: Allocator + Clone>(
&mut self,
left: I,
right: I,
length: &mut usize,
alloc: &A,
alloc: A,
) where
K: Ord,
I: Iterator<Item = (K, V)> + FusedIterator,
@ -35,7 +35,7 @@ impl<K, V> Root<K, V> {
/// Pushes all key-value pairs to the end of the tree, incrementing a
/// `length` variable along the way. The latter makes it easier for the
/// caller to avoid a leak when the iterator panicks.
pub fn bulk_push<I, A: Allocator>(&mut self, iter: I, length: &mut usize, alloc: &A)
pub fn bulk_push<I, A: Allocator + Clone>(&mut self, iter: I, length: &mut usize, alloc: A)
where
I: Iterator<Item = (K, V)>,
{
@ -64,7 +64,7 @@ impl<K, V> Root<K, V> {
}
Err(_) => {
// We are at the top, create a new root node and push there.
open_node = self.push_internal_level(alloc);
open_node = self.push_internal_level(alloc.clone());
break;
}
}
@ -72,9 +72,9 @@ impl<K, V> Root<K, V> {
// Push key-value pair and new right subtree.
let tree_height = open_node.height() - 1;
let mut right_tree = Root::new(alloc);
let mut right_tree = Root::new(alloc.clone());
for _ in 0..tree_height {
right_tree.push_internal_level(alloc);
right_tree.push_internal_level(alloc.clone());
}
open_node.push(key, value, right_tree);

View File

@ -7,9 +7,9 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
/// sibling. If successful but at the cost of shrinking the parent node,
/// returns that shrunk parent node. Returns an `Err` if the node is
/// an empty root.
fn fix_node_through_parent<A: Allocator>(
fn fix_node_through_parent<A: Allocator + Clone>(
self,
alloc: &A,
alloc: A,
) -> Result<Option<NodeRef<marker::Mut<'a>, K, V, marker::Internal>>, Self> {
let len = self.len();
if len >= MIN_LEN {
@ -54,9 +54,9 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
///
/// This method does not expect ancestors to already be underfull upon entry
/// and panics if it encounters an empty ancestor.
pub fn fix_node_and_affected_ancestors<A: Allocator>(mut self, alloc: &A) -> bool {
pub fn fix_node_and_affected_ancestors<A: Allocator + Clone>(mut self, alloc: A) -> bool {
loop {
match self.fix_node_through_parent(alloc) {
match self.fix_node_through_parent(alloc.clone()) {
Ok(Some(parent)) => self = parent.forget_type(),
Ok(None) => return true,
Err(_) => return false,
@ -67,28 +67,28 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
impl<K, V> Root<K, V> {
/// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty.
pub fn fix_top<A: Allocator>(&mut self, alloc: &A) {
pub fn fix_top<A: Allocator + Clone>(&mut self, alloc: A) {
while self.height() > 0 && self.len() == 0 {
self.pop_internal_level(alloc);
self.pop_internal_level(alloc.clone());
}
}
/// Stocks up or merge away any underfull nodes on the right border of the
/// tree. The other nodes, those that are not the root nor a rightmost edge,
/// must already have at least MIN_LEN elements.
pub fn fix_right_border<A: Allocator>(&mut self, alloc: &A) {
self.fix_top(alloc);
pub fn fix_right_border<A: Allocator + Clone>(&mut self, alloc: A) {
self.fix_top(alloc.clone());
if self.len() > 0 {
self.borrow_mut().last_kv().fix_right_border_of_right_edge(alloc);
self.borrow_mut().last_kv().fix_right_border_of_right_edge(alloc.clone());
self.fix_top(alloc);
}
}
/// The symmetric clone of `fix_right_border`.
pub fn fix_left_border<A: Allocator>(&mut self, alloc: &A) {
self.fix_top(alloc);
pub fn fix_left_border<A: Allocator + Clone>(&mut self, alloc: A) {
self.fix_top(alloc.clone());
if self.len() > 0 {
self.borrow_mut().first_kv().fix_left_border_of_left_edge(alloc);
self.borrow_mut().first_kv().fix_left_border_of_left_edge(alloc.clone());
self.fix_top(alloc);
}
}
@ -115,16 +115,16 @@ impl<K, V> Root<K, V> {
}
impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
fn fix_left_border_of_left_edge<A: Allocator>(mut self, alloc: &A) {
fn fix_left_border_of_left_edge<A: Allocator + Clone>(mut self, alloc: A) {
while let Internal(internal_kv) = self.force() {
self = internal_kv.fix_left_child(alloc).first_kv();
self = internal_kv.fix_left_child(alloc.clone()).first_kv();
debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
}
}
fn fix_right_border_of_right_edge<A: Allocator>(mut self, alloc: &A) {
fn fix_right_border_of_right_edge<A: Allocator + Clone>(mut self, alloc: A) {
while let Internal(internal_kv) = self.force() {
self = internal_kv.fix_right_child(alloc).last_kv();
self = internal_kv.fix_right_child(alloc.clone()).last_kv();
debug_assert!(self.reborrow().into_node().len() > MIN_LEN);
}
}
@ -135,9 +135,9 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
/// provisions an extra element to allow merging its children in turn
/// without becoming underfull.
/// Returns the left child.
fn fix_left_child<A: Allocator>(
fn fix_left_child<A: Allocator + Clone>(
self,
alloc: &A,
alloc: A,
) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
let mut internal_kv = self.consider_for_balancing();
let left_len = internal_kv.left_child_len();
@ -158,9 +158,9 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
/// provisions an extra element to allow merging its children in turn
/// without becoming underfull.
/// Returns wherever the right child ended up.
fn fix_right_child<A: Allocator>(
fn fix_right_child<A: Allocator + Clone>(
self,
alloc: &A,
alloc: A,
) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
let mut internal_kv = self.consider_for_balancing();
let right_len = internal_kv.right_child_len();

View File

@ -171,7 +171,7 @@ pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT;
pub struct BTreeMap<
K,
V,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> {
root: Option<Root<K, V>>,
length: usize,
@ -179,18 +179,18 @@ pub struct BTreeMap<
}
#[stable(feature = "btree_drop", since = "1.7.0")]
unsafe impl<#[may_dangle] K, #[may_dangle] V, A: Allocator> Drop for BTreeMap<K, V, A> {
unsafe impl<#[may_dangle] K, #[may_dangle] V, A: Allocator + Clone> Drop for BTreeMap<K, V, A> {
fn drop(&mut self) {
drop(unsafe { ptr::read(self) }.into_iter())
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: Clone, V: Clone, A: Clone + Allocator> Clone for BTreeMap<K, V, A> {
impl<K: Clone, V: Clone, A: Allocator + Clone> Clone for BTreeMap<K, V, A> {
fn clone(&self) -> BTreeMap<K, V, A> {
fn clone_subtree<'a, K: Clone, V: Clone, A: Clone + Allocator>(
fn clone_subtree<'a, K: Clone, V: Clone, A: Allocator + Clone>(
node: NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>,
alloc: &A,
alloc: A,
) -> BTreeMap<K, V, A>
where
K: 'a,
@ -199,9 +199,9 @@ impl<K: Clone, V: Clone, A: Clone + Allocator> Clone for BTreeMap<K, V, A> {
match node.force() {
Leaf(leaf) => {
let mut out_tree = BTreeMap {
root: Some(Root::new(alloc)),
root: Some(Root::new(alloc.clone())),
length: 0,
alloc: ManuallyDrop::new((*alloc).clone()),
alloc: ManuallyDrop::new(alloc),
};
{
@ -224,11 +224,12 @@ impl<K: Clone, V: Clone, A: Clone + Allocator> Clone for BTreeMap<K, V, A> {
out_tree
}
Internal(internal) => {
let mut out_tree = clone_subtree(internal.first_edge().descend(), alloc);
let mut out_tree =
clone_subtree(internal.first_edge().descend(), alloc.clone());
{
let out_root = out_tree.root.as_mut().unwrap();
let mut out_node = out_root.push_internal_level(alloc);
let mut out_node = out_root.push_internal_level(alloc.clone());
let mut in_edge = internal.first_edge();
while let Ok(kv) = in_edge.right_kv() {
let (k, v) = kv.into_kv();
@ -236,7 +237,7 @@ impl<K: Clone, V: Clone, A: Clone + Allocator> Clone for BTreeMap<K, V, A> {
let k = (*k).clone();
let v = (*v).clone();
let subtree = clone_subtree(in_edge.descend(), alloc);
let subtree = clone_subtree(in_edge.descend(), alloc.clone());
// We can't destructure subtree directly
// because BTreeMap implements Drop
@ -247,7 +248,11 @@ impl<K: Clone, V: Clone, A: Clone + Allocator> Clone for BTreeMap<K, V, A> {
(root, length)
};
out_node.push(k, v, subroot.unwrap_or_else(|| Root::new(alloc)));
out_node.push(
k,
v,
subroot.unwrap_or_else(|| Root::new(alloc.clone())),
);
out_tree.length += 1 + sublength;
}
}
@ -258,14 +263,14 @@ impl<K: Clone, V: Clone, A: Clone + Allocator> Clone for BTreeMap<K, V, A> {
}
if self.is_empty() {
BTreeMap::new_in(ManuallyDrop::into_inner(self.alloc.clone()))
BTreeMap::new_in((*self.alloc).clone())
} else {
clone_subtree(self.root.as_ref().unwrap().reborrow(), &*self.alloc) // unwrap succeeds because not empty
clone_subtree(self.root.as_ref().unwrap().reborrow(), (*self.alloc).clone()) // unwrap succeeds because not empty
}
}
}
impl<K, Q: ?Sized, A: Allocator> super::Recover<Q> for BTreeMap<K, (), A>
impl<K, Q: ?Sized, A: Allocator + Clone> super::Recover<Q> for BTreeMap<K, (), A>
where
K: Borrow<Q> + Ord,
Q: Ord,
@ -285,9 +290,14 @@ where
let root_node = map.root.as_mut()?.borrow_mut();
match root_node.search_tree(key) {
Found(handle) => Some(
OccupiedEntry { handle, dormant_map, alloc: &*map.alloc, _marker: PhantomData }
.remove_kv()
.0,
OccupiedEntry {
handle,
dormant_map,
alloc: (*map.alloc).clone(),
_marker: PhantomData,
}
.remove_kv()
.0,
),
GoDown(_) => None,
}
@ -295,7 +305,8 @@ where
fn replace(&mut self, key: K) -> Option<K> {
let (map, dormant_map) = DormantMutRef::new(self);
let root_node = map.root.get_or_insert_with(|| Root::new(&*map.alloc)).borrow_mut();
let root_node =
map.root.get_or_insert_with(|| Root::new((*map.alloc).clone())).borrow_mut();
match root_node.search_tree::<K>(&key) {
Found(mut kv) => Some(mem::replace(kv.key_mut(), key)),
GoDown(handle) => {
@ -303,7 +314,7 @@ where
key,
handle: Some(handle),
dormant_map,
alloc: &*map.alloc,
alloc: (*map.alloc).clone(),
_marker: PhantomData,
}
.insert(());
@ -369,14 +380,14 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IterMut<'_, K, V> {
pub struct IntoIter<
K,
V,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> {
range: LazyLeafRange<marker::Dying, K, V>,
length: usize,
alloc: A,
}
impl<K, V, A: Allocator> IntoIter<K, V, A> {
impl<K, V, A: Allocator + Clone> IntoIter<K, V, A> {
/// Returns an iterator of references over the remaining items.
#[inline]
pub(super) fn iter(&self) -> Iter<'_, K, V> {
@ -385,7 +396,7 @@ impl<K, V, A: Allocator> IntoIter<K, V, A> {
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<K: Debug, V: Debug, A: Allocator> Debug for IntoIter<K, V, A> {
impl<K: Debug, V: Debug, A: Allocator + Clone> Debug for IntoIter<K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
@ -456,12 +467,12 @@ impl<K, V: fmt::Debug> fmt::Debug for ValuesMut<'_, K, V> {
/// [`into_keys`]: BTreeMap::into_keys
#[must_use = "iterators are lazy and do nothing unless consumed"]
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
pub struct IntoKeys<K, V, A: Allocator = Global> {
pub struct IntoKeys<K, V, A: Allocator + Clone = Global> {
inner: IntoIter<K, V, A>,
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K: fmt::Debug, V, A: Allocator> fmt::Debug for IntoKeys<K, V, A> {
impl<K: fmt::Debug, V, A: Allocator + Clone> fmt::Debug for IntoKeys<K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.inner.iter().map(|(key, _)| key)).finish()
}
@ -478,13 +489,13 @@ impl<K: fmt::Debug, V, A: Allocator> fmt::Debug for IntoKeys<K, V, A> {
pub struct IntoValues<
K,
V,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> {
inner: IntoIter<K, V, A>,
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V: fmt::Debug, A: Allocator> fmt::Debug for IntoValues<K, V, A> {
impl<K, V: fmt::Debug, A: Allocator + Clone> fmt::Debug for IntoValues<K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.inner.iter().map(|(_, val)| val)).finish()
}
@ -557,7 +568,7 @@ impl<K, V> BTreeMap<K, V> {
}
}
impl<K, V, A: Allocator> BTreeMap<K, V, A> {
impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// Clears the map, removing all elements.
///
/// # Examples
@ -578,7 +589,7 @@ impl<K, V, A: Allocator> BTreeMap<K, V, A> {
mem::drop(BTreeMap {
root: mem::replace(&mut self.root, None),
length: mem::replace(&mut self.length, 0),
alloc: ManuallyDrop::new(&*self.alloc),
alloc: self.alloc.clone(),
});
}
@ -605,7 +616,7 @@ impl<K, V, A: Allocator> BTreeMap<K, V, A> {
}
}
impl<K, V, A: Allocator> BTreeMap<K, V, A> {
impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// Returns a reference to the value corresponding to the key.
///
/// The key may be any borrowed form of the map's key type, but the ordering
@ -721,7 +732,7 @@ impl<K, V, A: Allocator> BTreeMap<K, V, A> {
Some(OccupiedEntry {
handle: kv.forget_node_type(),
dormant_map,
alloc: &*map.alloc,
alloc: (*map.alloc).clone(),
_marker: PhantomData,
})
}
@ -809,7 +820,7 @@ impl<K, V, A: Allocator> BTreeMap<K, V, A> {
Some(OccupiedEntry {
handle: kv.forget_node_type(),
dormant_map,
alloc: &*map.alloc,
alloc: (*map.alloc).clone(),
_marker: PhantomData,
})
}
@ -1029,8 +1040,13 @@ impl<K, V, A: Allocator> BTreeMap<K, V, A> {
let root_node = map.root.as_mut()?.borrow_mut();
match root_node.search_tree(key) {
Found(handle) => Some(
OccupiedEntry { handle, dormant_map, alloc: &*map.alloc, _marker: PhantomData }
.remove_entry(),
OccupiedEntry {
handle,
dormant_map,
alloc: (*map.alloc).clone(),
_marker: PhantomData,
}
.remove_entry(),
),
GoDown(_) => None,
}
@ -1106,14 +1122,15 @@ impl<K, V, A: Allocator> BTreeMap<K, V, A> {
return;
}
let self_iter =
mem::replace(self, Self::new_in(ManuallyDrop::into_inner(self.alloc.clone())))
.into_iter();
let other_iter =
mem::replace(other, Self::new_in(ManuallyDrop::into_inner(self.alloc.clone())))
.into_iter();
let root = self.root.get_or_insert_with(|| Root::new(&*self.alloc));
root.append_from_sorted_iters(self_iter, other_iter, &mut self.length, &*self.alloc)
let self_iter = mem::replace(self, Self::new_in((*self.alloc).clone())).into_iter();
let other_iter = mem::replace(other, Self::new_in((*self.alloc).clone())).into_iter();
let root = self.root.get_or_insert_with(|| Root::new((*self.alloc).clone()));
root.append_from_sorted_iters(
self_iter,
other_iter,
&mut self.length,
(*self.alloc).clone(),
)
}
/// Constructs a double-ended iterator over a sub-range of elements in the map.
@ -1232,21 +1249,21 @@ impl<K, V, A: Allocator> BTreeMap<K, V, A> {
key,
handle: None,
dormant_map,
alloc: &*map.alloc,
alloc: (*map.alloc).clone(),
_marker: PhantomData,
}),
Some(ref mut root) => match root.borrow_mut().search_tree(&key) {
Found(handle) => Occupied(OccupiedEntry {
handle,
dormant_map,
alloc: &*map.alloc,
alloc: (*map.alloc).clone(),
_marker: PhantomData,
}),
GoDown(handle) => Vacant(VacantEntry {
key,
handle: Some(handle),
dormant_map,
alloc: &*map.alloc,
alloc: (*map.alloc).clone(),
_marker: PhantomData,
}),
},
@ -1289,22 +1306,18 @@ impl<K, V, A: Allocator> BTreeMap<K, V, A> {
A: Clone,
{
if self.is_empty() {
return Self::new_in(ManuallyDrop::into_inner(self.alloc.clone()));
return Self::new_in((*self.alloc).clone());
}
let total_num = self.len();
let left_root = self.root.as_mut().unwrap(); // unwrap succeeds because not empty
let right_root = left_root.split_off(key, &*self.alloc);
let right_root = left_root.split_off(key, (*self.alloc).clone());
let (new_left_len, right_len) = Root::calc_split_length(total_num, &left_root, &right_root);
self.length = new_left_len;
BTreeMap {
root: Some(right_root),
length: right_len,
alloc: ManuallyDrop::new((*self.alloc).clone()),
}
BTreeMap { root: Some(right_root), length: right_len, alloc: self.alloc.clone() }
}
/// Creates an iterator that visits all elements (key-value pairs) in
@ -1340,7 +1353,7 @@ impl<K, V, A: Allocator> BTreeMap<K, V, A> {
/// assert_eq!(odds.keys().copied().collect::<Vec<_>>(), [1, 3, 5, 7]);
/// ```
#[unstable(feature = "btree_drain_filter", issue = "70530")]
pub fn drain_filter<F>(&mut self, pred: F) -> DrainFilter<'_, K, V, F, &A>
pub fn drain_filter<F>(&mut self, pred: F) -> DrainFilter<'_, K, V, F, A>
where
K: Ord,
F: FnMut(&K, &mut V) -> bool,
@ -1349,7 +1362,7 @@ impl<K, V, A: Allocator> BTreeMap<K, V, A> {
DrainFilter { pred, inner, alloc }
}
pub(super) fn drain_filter_inner(&mut self) -> (DrainFilterInner<'_, K, V>, &A)
pub(super) fn drain_filter_inner(&mut self) -> (DrainFilterInner<'_, K, V>, A)
where
K: Ord,
{
@ -1362,7 +1375,7 @@ impl<K, V, A: Allocator> BTreeMap<K, V, A> {
dormant_root: Some(dormant_root),
cur_leaf_edge: Some(front),
},
&*self.alloc,
(*self.alloc).clone(),
)
} else {
(
@ -1371,7 +1384,7 @@ impl<K, V, A: Allocator> BTreeMap<K, V, A> {
dormant_root: None,
cur_leaf_edge: None,
},
&*self.alloc,
(*self.alloc).clone(),
)
}
}
@ -1426,15 +1439,15 @@ impl<K, V, A: Allocator> BTreeMap<K, V, A> {
K: Ord,
I: IntoIterator<Item = (K, V)>,
{
let mut root = Root::new(&alloc);
let mut root = Root::new(alloc.clone());
let mut length = 0;
root.bulk_push(DedupSortedIter::new(iter.into_iter()), &mut length, &alloc);
root.bulk_push(DedupSortedIter::new(iter.into_iter()), &mut length, alloc.clone());
BTreeMap { root: Some(root), length, alloc: ManuallyDrop::new(alloc) }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V, A: Allocator> IntoIterator for &'a BTreeMap<K, V, A> {
impl<'a, K, V, A: Allocator + Clone> IntoIterator for &'a BTreeMap<K, V, A> {
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
@ -1503,7 +1516,7 @@ impl<K, V> Clone for Iter<'_, K, V> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V, A: Allocator> IntoIterator for &'a mut BTreeMap<K, V, A> {
impl<'a, K, V, A: Allocator + Clone> IntoIterator for &'a mut BTreeMap<K, V, A> {
type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
@ -1573,7 +1586,7 @@ impl<'a, K, V> IterMut<'a, K, V> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V, A: Allocator> IntoIterator for BTreeMap<K, V, A> {
impl<K, V, A: Allocator + Clone> IntoIterator for BTreeMap<K, V, A> {
type Item = (K, V);
type IntoIter = IntoIter<K, V, A>;
@ -1598,11 +1611,11 @@ impl<K, V, A: Allocator> IntoIterator for BTreeMap<K, V, A> {
}
#[stable(feature = "btree_drop", since = "1.7.0")]
impl<K, V, A: Allocator> Drop for IntoIter<K, V, A> {
impl<K, V, A: Allocator + Clone> Drop for IntoIter<K, V, A> {
fn drop(&mut self) {
struct DropGuard<'a, K, V, A: Allocator>(&'a mut IntoIter<K, V, A>);
struct DropGuard<'a, K, V, A: Allocator + Clone>(&'a mut IntoIter<K, V, A>);
impl<'a, K, V, A: Allocator> Drop for DropGuard<'a, K, V, A> {
impl<'a, K, V, A: Allocator + Clone> Drop for DropGuard<'a, K, V, A> {
fn drop(&mut self) {
// Continue the same loop we perform below. This only runs when unwinding, so we
// don't have to care about panics this time (they'll abort).
@ -1622,7 +1635,7 @@ impl<K, V, A: Allocator> Drop for IntoIter<K, V, A> {
}
}
impl<K, V, A: Allocator> IntoIter<K, V, A> {
impl<K, V, A: Allocator + Clone> IntoIter<K, V, A> {
/// Core of a `next` method returning a dying KV handle,
/// invalidated by further calls to this function and some others.
fn dying_next(
@ -1653,7 +1666,7 @@ impl<K, V, A: Allocator> IntoIter<K, V, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V, A: Allocator> Iterator for IntoIter<K, V, A> {
impl<K, V, A: Allocator + Clone> Iterator for IntoIter<K, V, A> {
type Item = (K, V);
fn next(&mut self) -> Option<(K, V)> {
@ -1667,7 +1680,7 @@ impl<K, V, A: Allocator> Iterator for IntoIter<K, V, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V, A: Allocator> DoubleEndedIterator for IntoIter<K, V, A> {
impl<K, V, A: Allocator + Clone> DoubleEndedIterator for IntoIter<K, V, A> {
fn next_back(&mut self) -> Option<(K, V)> {
// SAFETY: we consume the dying handle immediately.
self.dying_next_back().map(unsafe { |kv| kv.into_key_val() })
@ -1675,14 +1688,14 @@ impl<K, V, A: Allocator> DoubleEndedIterator for IntoIter<K, V, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K, V, A: Allocator> ExactSizeIterator for IntoIter<K, V, A> {
impl<K, V, A: Allocator + Clone> ExactSizeIterator for IntoIter<K, V, A> {
fn len(&self) -> usize {
self.length
}
}
#[stable(feature = "fused", since = "1.26.0")]
impl<K, V, A: Allocator> FusedIterator for IntoIter<K, V, A> {}
impl<K, V, A: Allocator + Clone> FusedIterator for IntoIter<K, V, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, K, V> Iterator for Keys<'a, K, V> {
@ -1781,7 +1794,7 @@ pub struct DrainFilter<
K,
V,
F,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> where
F: 'a + FnMut(&K, &mut V) -> bool,
{
@ -1804,7 +1817,7 @@ pub(super) struct DrainFilterInner<'a, K, V> {
}
#[unstable(feature = "btree_drain_filter", issue = "70530")]
impl<K, V, F, A: Allocator> Drop for DrainFilter<'_, K, V, F, A>
impl<K, V, F, A: Allocator + Clone> Drop for DrainFilter<'_, K, V, F, A>
where
F: FnMut(&K, &mut V) -> bool,
{
@ -1826,7 +1839,7 @@ where
}
#[unstable(feature = "btree_drain_filter", issue = "70530")]
impl<K, V, F, A: Allocator> Iterator for DrainFilter<'_, K, V, F, A>
impl<K, V, F, A: Allocator + Clone> Iterator for DrainFilter<'_, K, V, F, A>
where
F: FnMut(&K, &mut V) -> bool,
{
@ -1849,7 +1862,7 @@ impl<'a, K, V> DrainFilterInner<'a, K, V> {
}
/// Implementation of a typical `DrainFilter::next` method, given the predicate.
pub(super) fn next<F, A: Allocator>(&mut self, pred: &mut F, alloc: &A) -> Option<(K, V)>
pub(super) fn next<F, A: Allocator + Clone>(&mut self, pred: &mut F, alloc: A) -> Option<(K, V)>
where
F: FnMut(&K, &mut V) -> bool,
{
@ -1862,10 +1875,10 @@ impl<'a, K, V> DrainFilterInner<'a, K, V> {
// SAFETY: we will touch the root in a way that will not
// invalidate the position returned.
let root = unsafe { self.dormant_root.take().unwrap().awaken() };
root.pop_internal_level(alloc);
root.pop_internal_level(alloc.clone());
self.dormant_root = Some(DormantMutRef::new(root).1);
},
alloc,
alloc.clone(),
);
self.cur_leaf_edge = Some(pos);
return Some(kv);
@ -1944,7 +1957,7 @@ impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V, A: Allocator> Iterator for IntoKeys<K, V, A> {
impl<K, V, A: Allocator + Clone> Iterator for IntoKeys<K, V, A> {
type Item = K;
fn next(&mut self) -> Option<K> {
@ -1969,24 +1982,24 @@ impl<K, V, A: Allocator> Iterator for IntoKeys<K, V, A> {
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V, A: Allocator> DoubleEndedIterator for IntoKeys<K, V, A> {
impl<K, V, A: Allocator + Clone> DoubleEndedIterator for IntoKeys<K, V, A> {
fn next_back(&mut self) -> Option<K> {
self.inner.next_back().map(|(k, _)| k)
}
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V, A: Allocator> ExactSizeIterator for IntoKeys<K, V, A> {
impl<K, V, A: Allocator + Clone> ExactSizeIterator for IntoKeys<K, V, A> {
fn len(&self) -> usize {
self.inner.len()
}
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V, A: Allocator> FusedIterator for IntoKeys<K, V, A> {}
impl<K, V, A: Allocator + Clone> FusedIterator for IntoKeys<K, V, A> {}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V, A: Allocator> Iterator for IntoValues<K, V, A> {
impl<K, V, A: Allocator + Clone> Iterator for IntoValues<K, V, A> {
type Item = V;
fn next(&mut self) -> Option<V> {
@ -2003,21 +2016,21 @@ impl<K, V, A: Allocator> Iterator for IntoValues<K, V, A> {
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V, A: Allocator> DoubleEndedIterator for IntoValues<K, V, A> {
impl<K, V, A: Allocator + Clone> DoubleEndedIterator for IntoValues<K, V, A> {
fn next_back(&mut self) -> Option<V> {
self.inner.next_back().map(|(_, v)| v)
}
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V, A: Allocator> ExactSizeIterator for IntoValues<K, V, A> {
impl<K, V, A: Allocator + Clone> ExactSizeIterator for IntoValues<K, V, A> {
fn len(&self) -> usize {
self.inner.len()
}
}
#[stable(feature = "map_into_keys_values", since = "1.54.0")]
impl<K, V, A: Allocator> FusedIterator for IntoValues<K, V, A> {}
impl<K, V, A: Allocator + Clone> FusedIterator for IntoValues<K, V, A> {}
#[stable(feature = "btree_range", since = "1.17.0")]
impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
@ -2083,7 +2096,7 @@ impl<K: Ord, V> FromIterator<(K, V)> for BTreeMap<K, V> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: Ord, V, A: Allocator> Extend<(K, V)> for BTreeMap<K, V, A> {
impl<K: Ord, V, A: Allocator + Clone> Extend<(K, V)> for BTreeMap<K, V, A> {
#[inline]
fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
iter.into_iter().for_each(move |(k, v)| {
@ -2098,7 +2111,9 @@ impl<K: Ord, V, A: Allocator> Extend<(K, V)> for BTreeMap<K, V, A> {
}
#[stable(feature = "extend_ref", since = "1.2.0")]
impl<'a, K: Ord + Copy, V: Copy, A: Allocator> Extend<(&'a K, &'a V)> for BTreeMap<K, V, A> {
impl<'a, K: Ord + Copy, V: Copy, A: Allocator + Clone> Extend<(&'a K, &'a V)>
for BTreeMap<K, V, A>
{
fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
}
@ -2110,7 +2125,7 @@ impl<'a, K: Ord + Copy, V: Copy, A: Allocator> Extend<(&'a K, &'a V)> for BTreeM
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: Hash, V: Hash, A: Allocator> Hash for BTreeMap<K, V, A> {
impl<K: Hash, V: Hash, A: Allocator + Clone> Hash for BTreeMap<K, V, A> {
fn hash<H: Hasher>(&self, state: &mut H) {
state.write_length_prefix(self.len());
for elt in self {
@ -2128,17 +2143,17 @@ impl<K, V> Default for BTreeMap<K, V> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: PartialEq, V: PartialEq, A: Allocator> PartialEq for BTreeMap<K, V, A> {
impl<K: PartialEq, V: PartialEq, A: Allocator + Clone> PartialEq for BTreeMap<K, V, A> {
fn eq(&self, other: &BTreeMap<K, V, A>) -> bool {
self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: Eq, V: Eq, A: Allocator> Eq for BTreeMap<K, V, A> {}
impl<K: Eq, V: Eq, A: Allocator + Clone> Eq for BTreeMap<K, V, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: PartialOrd, V: PartialOrd, A: Allocator> PartialOrd for BTreeMap<K, V, A> {
impl<K: PartialOrd, V: PartialOrd, A: Allocator + Clone> PartialOrd for BTreeMap<K, V, A> {
#[inline]
fn partial_cmp(&self, other: &BTreeMap<K, V, A>) -> Option<Ordering> {
self.iter().partial_cmp(other.iter())
@ -2146,7 +2161,7 @@ impl<K: PartialOrd, V: PartialOrd, A: Allocator> PartialOrd for BTreeMap<K, V, A
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: Ord, V: Ord, A: Allocator> Ord for BTreeMap<K, V, A> {
impl<K: Ord, V: Ord, A: Allocator + Clone> Ord for BTreeMap<K, V, A> {
#[inline]
fn cmp(&self, other: &BTreeMap<K, V, A>) -> Ordering {
self.iter().cmp(other.iter())
@ -2154,14 +2169,14 @@ impl<K: Ord, V: Ord, A: Allocator> Ord for BTreeMap<K, V, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K: Debug, V: Debug, A: Allocator> Debug for BTreeMap<K, V, A> {
impl<K: Debug, V: Debug, A: Allocator + Clone> Debug for BTreeMap<K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<K, Q: ?Sized, V, A: Allocator> Index<&Q> for BTreeMap<K, V, A>
impl<K, Q: ?Sized, V, A: Allocator + Clone> Index<&Q> for BTreeMap<K, V, A>
where
K: Borrow<Q> + Ord,
Q: Ord,
@ -2201,7 +2216,7 @@ impl<K: Ord, V, const N: usize> From<[(K, V); N]> for BTreeMap<K, V> {
}
}
impl<K, V, A: Allocator> BTreeMap<K, V, A> {
impl<K, V, A: Allocator + Clone> BTreeMap<K, V, A> {
/// Gets an iterator over the entries of the map, sorted by key.
///
/// # Examples

View File

@ -21,7 +21,7 @@ pub enum Entry<
'a,
K: 'a,
V: 'a,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> {
/// A vacant entry.
#[stable(feature = "rust1", since = "1.0.0")]
@ -33,7 +33,7 @@ pub enum Entry<
}
#[stable(feature = "debug_btree_map", since = "1.12.0")]
impl<K: Debug + Ord, V: Debug, A: Allocator> Debug for Entry<'_, K, V, A> {
impl<K: Debug + Ord, V: Debug, A: Allocator + Clone> Debug for Entry<'_, K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
@ -49,21 +49,21 @@ pub struct VacantEntry<
'a,
K,
V,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> {
pub(super) key: K,
/// `None` for a (empty) map without root
pub(super) handle: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
pub(super) dormant_map: DormantMutRef<'a, BTreeMap<K, V, A>>,
pub(super) alloc: &'a A,
pub(super) alloc: A,
// Be invariant in `K` and `V`
pub(super) _marker: PhantomData<&'a mut (K, V)>,
}
#[stable(feature = "debug_btree_map", since = "1.12.0")]
impl<K: Debug + Ord, V, A: Allocator> Debug for VacantEntry<'_, K, V, A> {
impl<K: Debug + Ord, V, A: Allocator + Clone> Debug for VacantEntry<'_, K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VacantEntry").field(self.key()).finish()
}
@ -76,19 +76,19 @@ pub struct OccupiedEntry<
'a,
K,
V,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> {
pub(super) handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>,
pub(super) dormant_map: DormantMutRef<'a, BTreeMap<K, V, A>>,
pub(super) alloc: &'a A,
pub(super) alloc: A,
// Be invariant in `K` and `V`
pub(super) _marker: PhantomData<&'a mut (K, V)>,
}
#[stable(feature = "debug_btree_map", since = "1.12.0")]
impl<K: Debug + Ord, V: Debug, A: Allocator> Debug for OccupiedEntry<'_, K, V, A> {
impl<K: Debug + Ord, V: Debug, A: Allocator + Clone> Debug for OccupiedEntry<'_, K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish()
}
@ -98,7 +98,7 @@ impl<K: Debug + Ord, V: Debug, A: Allocator> Debug for OccupiedEntry<'_, K, V, A
///
/// Contains the occupied entry, and the value that was not inserted.
#[unstable(feature = "map_try_insert", issue = "82766")]
pub struct OccupiedError<'a, K: 'a, V: 'a, A: Allocator = Global> {
pub struct OccupiedError<'a, K: 'a, V: 'a, A: Allocator + Clone = Global> {
/// The entry in the map that was already occupied.
pub entry: OccupiedEntry<'a, K, V, A>,
/// The value which was not inserted, because the entry was already occupied.
@ -106,7 +106,7 @@ pub struct OccupiedError<'a, K: 'a, V: 'a, A: Allocator = Global> {
}
#[unstable(feature = "map_try_insert", issue = "82766")]
impl<K: Debug + Ord, V: Debug, A: Allocator> Debug for OccupiedError<'_, K, V, A> {
impl<K: Debug + Ord, V: Debug, A: Allocator + Clone> Debug for OccupiedError<'_, K, V, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedError")
.field("key", self.entry.key())
@ -117,7 +117,9 @@ impl<K: Debug + Ord, V: Debug, A: Allocator> Debug for OccupiedError<'_, K, V, A
}
#[unstable(feature = "map_try_insert", issue = "82766")]
impl<'a, K: Debug + Ord, V: Debug, A: Allocator> fmt::Display for OccupiedError<'a, K, V, A> {
impl<'a, K: Debug + Ord, V: Debug, A: Allocator + Clone> fmt::Display
for OccupiedError<'a, K, V, A>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
@ -129,7 +131,7 @@ impl<'a, K: Debug + Ord, V: Debug, A: Allocator> fmt::Display for OccupiedError<
}
}
impl<'a, K: Ord, V, A: Allocator> Entry<'a, K, V, A> {
impl<'a, K: Ord, V, A: Allocator + Clone> Entry<'a, K, V, A> {
/// Ensures a value is in the entry by inserting the default if empty, and returns
/// a mutable reference to the value in the entry.
///
@ -257,7 +259,7 @@ impl<'a, K: Ord, V, A: Allocator> Entry<'a, K, V, A> {
}
}
impl<'a, K: Ord, V: Default, A: Allocator> Entry<'a, K, V, A> {
impl<'a, K: Ord, V: Default, A: Allocator + Clone> Entry<'a, K, V, A> {
#[stable(feature = "entry_or_default", since = "1.28.0")]
/// Ensures a value is in the entry by inserting the default value if empty,
/// and returns a mutable reference to the value in the entry.
@ -280,7 +282,7 @@ impl<'a, K: Ord, V: Default, A: Allocator> Entry<'a, K, V, A> {
}
}
impl<'a, K: Ord, V, A: Allocator> VacantEntry<'a, K, V, A> {
impl<'a, K: Ord, V, A: Allocator + Clone> VacantEntry<'a, K, V, A> {
/// Gets a reference to the key that would be used when inserting a value
/// through the VacantEntry.
///
@ -338,13 +340,13 @@ impl<'a, K: Ord, V, A: Allocator> VacantEntry<'a, K, V, A> {
None => {
// SAFETY: There is no tree yet so no reference to it exists.
let map = unsafe { self.dormant_map.awaken() };
let mut root = NodeRef::new_leaf(self.alloc);
let mut root = NodeRef::new_leaf(self.alloc.clone());
let val_ptr = root.borrow_mut().push(self.key, value) as *mut V;
map.root = Some(root.forget_type());
map.length = 1;
val_ptr
}
Some(handle) => match handle.insert_recursing(self.key, value, self.alloc) {
Some(handle) => match handle.insert_recursing(self.key, value, self.alloc.clone()) {
(None, val_ptr) => {
// SAFETY: We have consumed self.handle.
let map = unsafe { self.dormant_map.awaken() };
@ -369,7 +371,7 @@ impl<'a, K: Ord, V, A: Allocator> VacantEntry<'a, K, V, A> {
}
}
impl<'a, K: Ord, V, A: Allocator> OccupiedEntry<'a, K, V, A> {
impl<'a, K: Ord, V, A: Allocator + Clone> OccupiedEntry<'a, K, V, A> {
/// Gets a reference to the key in the entry.
///
/// # Examples
@ -538,13 +540,13 @@ impl<'a, K: Ord, V, A: Allocator> OccupiedEntry<'a, K, V, A> {
pub(super) fn remove_kv(self) -> (K, V) {
let mut emptied_internal_root = false;
let (old_kv, _) =
self.handle.remove_kv_tracking(|| emptied_internal_root = true, self.alloc);
self.handle.remove_kv_tracking(|| emptied_internal_root = true, self.alloc.clone());
// SAFETY: we consumed the intermediate root borrow, `self.handle`.
let map = unsafe { self.dormant_map.awaken() };
map.length -= 1;
if emptied_internal_root {
let root = map.root.as_mut().unwrap();
root.pop_internal_level(&*self.alloc);
root.pop_internal_level(self.alloc);
}
old_kv
}

View File

@ -116,11 +116,7 @@ impl<K, V> BTreeMap<K, V> {
{
let iter = mem::take(self).into_iter();
if !iter.is_empty() {
self.root.insert(Root::new(&*self.alloc)).bulk_push(
iter,
&mut self.length,
&*self.alloc,
);
self.root.insert(Root::new(*self.alloc)).bulk_push(iter, &mut self.length, *self.alloc);
}
}
}

View File

@ -178,9 +178,9 @@ impl<K, V> LazyLeafRange<marker::Dying, K, V> {
}
#[inline]
pub unsafe fn deallocating_next_unchecked<A: Allocator>(
pub unsafe fn deallocating_next_unchecked<A: Allocator + Clone>(
&mut self,
alloc: &A,
alloc: A,
) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
debug_assert!(self.front.is_some());
let front = self.init_front().unwrap();
@ -188,9 +188,9 @@ impl<K, V> LazyLeafRange<marker::Dying, K, V> {
}
#[inline]
pub unsafe fn deallocating_next_back_unchecked<A: Allocator>(
pub unsafe fn deallocating_next_back_unchecked<A: Allocator + Clone>(
&mut self,
alloc: &A,
alloc: A,
) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
debug_assert!(self.back.is_some());
let back = self.init_back().unwrap();
@ -198,7 +198,7 @@ impl<K, V> LazyLeafRange<marker::Dying, K, V> {
}
#[inline]
pub fn deallocating_end<A: Allocator>(&mut self, alloc: &A) {
pub fn deallocating_end<A: Allocator + Clone>(&mut self, alloc: A) {
if let Some(front) = self.take_front() {
front.deallocating_end(alloc)
}
@ -444,9 +444,9 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
/// `deallocating_next_back`.
/// - The returned KV handle is only valid to access the key and value,
/// and only valid until the next call to a `deallocating_` method.
unsafe fn deallocating_next<A: Allocator>(
unsafe fn deallocating_next<A: Allocator + Clone>(
self,
alloc: &A,
alloc: A,
) -> Option<(Self, Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>)>
{
let mut edge = self.forget_node_type();
@ -454,7 +454,7 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
edge = match edge.right_kv() {
Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)),
Err(last_edge) => {
match unsafe { last_edge.into_node().deallocate_and_ascend(alloc) } {
match unsafe { last_edge.into_node().deallocate_and_ascend(alloc.clone()) } {
Some(parent_edge) => parent_edge.forget_node_type(),
None => return None,
}
@ -476,9 +476,9 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
/// `deallocating_next`.
/// - The returned KV handle is only valid to access the key and value,
/// and only valid until the next call to a `deallocating_` method.
unsafe fn deallocating_next_back<A: Allocator>(
unsafe fn deallocating_next_back<A: Allocator + Clone>(
self,
alloc: &A,
alloc: A,
) -> Option<(Self, Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>)>
{
let mut edge = self.forget_node_type();
@ -486,7 +486,7 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
edge = match edge.left_kv() {
Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)),
Err(last_edge) => {
match unsafe { last_edge.into_node().deallocate_and_ascend(alloc) } {
match unsafe { last_edge.into_node().deallocate_and_ascend(alloc.clone()) } {
Some(parent_edge) => parent_edge.forget_node_type(),
None => return None,
}
@ -501,9 +501,11 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
/// both sides of the tree, and have hit the same edge. As it is intended
/// only to be called when all keys and values have been returned,
/// no cleanup is done on any of the keys or values.
fn deallocating_end<A: Allocator>(self, alloc: &A) {
fn deallocating_end<A: Allocator + Clone>(self, alloc: A) {
let mut edge = self.forget_node_type();
while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend(alloc) } {
while let Some(parent_edge) =
unsafe { edge.into_node().deallocate_and_ascend(alloc.clone()) }
{
edge = parent_edge.forget_node_type();
}
}
@ -578,9 +580,9 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
///
/// The only safe way to proceed with the updated handle is to compare it, drop it,
/// or call this method or counterpart `deallocating_next_back_unchecked` again.
unsafe fn deallocating_next_unchecked<A: Allocator>(
unsafe fn deallocating_next_unchecked<A: Allocator + Clone>(
&mut self,
alloc: &A,
alloc: A,
) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
super::mem::replace(self, |leaf_edge| unsafe {
leaf_edge.deallocating_next(alloc).unwrap()
@ -599,9 +601,9 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
///
/// The only safe way to proceed with the updated handle is to compare it, drop it,
/// or call this method or counterpart `deallocating_next_unchecked` again.
unsafe fn deallocating_next_back_unchecked<A: Allocator>(
unsafe fn deallocating_next_back_unchecked<A: Allocator + Clone>(
&mut self,
alloc: &A,
alloc: A,
) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
super::mem::replace(self, |leaf_edge| unsafe {
leaf_edge.deallocating_next_back(alloc).unwrap()

View File

@ -78,7 +78,7 @@ impl<K, V> LeafNode<K, V> {
}
/// Creates a new boxed `LeafNode`.
fn new<A: Allocator>(alloc: &A) -> Box<Self, &A> {
fn new<A: Allocator + Clone>(alloc: A) -> Box<Self, A> {
unsafe {
let mut leaf = Box::new_uninit_in(alloc);
LeafNode::init(leaf.as_mut_ptr());
@ -110,7 +110,7 @@ impl<K, V> InternalNode<K, V> {
/// An invariant of internal nodes is that they have at least one
/// initialized and valid edge. This function does not set up
/// such an edge.
unsafe fn new<A: Allocator>(alloc: &A) -> Box<Self, &A> {
unsafe fn new<A: Allocator + Clone>(alloc: A) -> Box<Self, A> {
unsafe {
let mut node = Box::<Self, _>::new_uninit_in(alloc);
// We only need to initialize the data; the edges are MaybeUninit.
@ -213,17 +213,17 @@ unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type>
unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Dying, K, V, Type> {}
impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
pub fn new_leaf<A: Allocator>(alloc: &A) -> Self {
pub fn new_leaf<A: Allocator + Clone>(alloc: A) -> Self {
Self::from_new_leaf(LeafNode::new(alloc))
}
fn from_new_leaf<A: Allocator>(leaf: Box<LeafNode<K, V>, A>) -> Self {
fn from_new_leaf<A: Allocator + Clone>(leaf: Box<LeafNode<K, V>, A>) -> Self {
NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData }
}
}
impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
fn new_internal<A: Allocator>(child: Root<K, V>, alloc: &A) -> Self {
fn new_internal<A: Allocator + Clone>(child: Root<K, V>, alloc: A) -> Self {
let mut new_node = unsafe { InternalNode::new(alloc) };
new_node.edges[0].write(child.node);
unsafe { NodeRef::from_new_internal(new_node, child.height + 1) }
@ -231,7 +231,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
/// # Safety
/// `height` must not be zero.
unsafe fn from_new_internal<A: Allocator>(
unsafe fn from_new_internal<A: Allocator + Clone>(
internal: Box<InternalNode<K, V>, A>,
height: usize,
) -> Self {
@ -390,9 +390,9 @@ impl<K, V> NodeRef<marker::Dying, K, V, marker::LeafOrInternal> {
/// Similar to `ascend`, gets a reference to a node's parent node, but also
/// deallocates the current node in the process. This is unsafe because the
/// current node will still be accessible despite being deallocated.
pub unsafe fn deallocate_and_ascend<A: Allocator>(
pub unsafe fn deallocate_and_ascend<A: Allocator + Clone>(
self,
alloc: &A,
alloc: A,
) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::Internal>, marker::Edge>> {
let height = self.height;
let node = self.node;
@ -559,16 +559,16 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
/// Returns a new owned tree, with its own root node that is initially empty.
pub fn new<A: Allocator>(alloc: &A) -> Self {
pub fn new<A: Allocator + Clone>(alloc: A) -> Self {
NodeRef::new_leaf(alloc).forget_type()
}
/// Adds a new internal node with a single edge pointing to the previous root node,
/// make that new node the root node, and return it. This increases the height by 1
/// and is the opposite of `pop_internal_level`.
pub fn push_internal_level<A: Allocator>(
pub fn push_internal_level<A: Allocator + Clone>(
&mut self,
alloc: &A,
alloc: A,
) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root, alloc).forget_type());
@ -585,7 +585,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
/// it will not invalidate other handles or references to the root node.
///
/// Panics if there is no internal level, i.e., if the root node is a leaf.
pub fn pop_internal_level<A: Allocator>(&mut self, alloc: &A) {
pub fn pop_internal_level<A: Allocator + Clone>(&mut self, alloc: A) {
assert!(self.height > 0);
let top = self.node;
@ -869,11 +869,11 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
/// this edge. This method splits the node if there isn't enough room.
///
/// The returned pointer points to the inserted value.
fn insert<A: Allocator>(
fn insert<A: Allocator + Clone>(
mut self,
key: K,
val: V,
alloc: &A,
alloc: A,
) -> (Option<SplitResult<'a, K, V, marker::Leaf>>, *mut V) {
if self.node.len() < CAPACITY {
let val_ptr = self.insert_fit(key, val);
@ -930,12 +930,12 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
/// Inserts a new key-value pair and an edge that will go to the right of that new pair
/// between this edge and the key-value pair to the right of this edge. This method splits
/// the node if there isn't enough room.
fn insert<A: Allocator>(
fn insert<A: Allocator + Clone>(
mut self,
key: K,
val: V,
edge: Root<K, V>,
alloc: &A,
alloc: A,
) -> Option<SplitResult<'a, K, V, marker::Internal>> {
assert!(edge.height == self.node.height - 1);
@ -968,23 +968,25 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
/// If the returned result is some `SplitResult`, the `left` field will be the root node.
/// The returned pointer points to the inserted value, which in the case of `SplitResult`
/// is in the `left` or `right` tree.
pub fn insert_recursing<A: Allocator>(
pub fn insert_recursing<A: Allocator + Clone>(
self,
key: K,
value: V,
alloc: &A,
alloc: A,
) -> (Option<SplitResult<'a, K, V, marker::LeafOrInternal>>, *mut V) {
let (mut split, val_ptr) = match self.insert(key, value, alloc) {
let (mut split, val_ptr) = match self.insert(key, value, alloc.clone()) {
(None, val_ptr) => return (None, val_ptr),
(Some(split), val_ptr) => (split.forget_node_type(), val_ptr),
};
loop {
split = match split.left.ascend() {
Ok(parent) => match parent.insert(split.kv.0, split.kv.1, split.right, alloc) {
None => return (None, val_ptr),
Some(split) => split.forget_node_type(),
},
Ok(parent) => {
match parent.insert(split.kv.0, split.kv.1, split.right, alloc.clone()) {
None => return (None, val_ptr),
Some(split) => split.forget_node_type(),
}
}
Err(root) => return (Some(SplitResult { left: root, ..split }), val_ptr),
};
}
@ -1126,7 +1128,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
/// - The key and value pointed to by this handle are extracted.
/// - All the key-value pairs to the right of this handle are put into a newly
/// allocated node.
pub fn split<A: Allocator>(mut self, alloc: &A) -> SplitResult<'a, K, V, marker::Leaf> {
pub fn split<A: Allocator + Clone>(mut self, alloc: A) -> SplitResult<'a, K, V, marker::Leaf> {
let mut new_node = LeafNode::new(alloc);
let kv = self.split_leaf_data(&mut new_node);
@ -1158,7 +1160,10 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
/// - The key and value pointed to by this handle are extracted.
/// - All the edges and key-value pairs to the right of this handle are put into
/// a newly allocated node.
pub fn split<A: Allocator>(mut self, alloc: &A) -> SplitResult<'a, K, V, marker::Internal> {
pub fn split<A: Allocator + Clone>(
mut self,
alloc: A,
) -> SplitResult<'a, K, V, marker::Internal> {
let old_len = self.node.len();
unsafe {
let mut new_node = InternalNode::new(alloc);
@ -1270,7 +1275,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
>(
self,
result: F,
alloc: &A,
alloc: A,
) -> R {
let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent;
let old_parent_len = parent_node.len();
@ -1327,9 +1332,9 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
/// the left child node and returns the shrunk parent node.
///
/// Panics unless we `.can_merge()`.
pub fn merge_tracking_parent<A: Allocator>(
pub fn merge_tracking_parent<A: Allocator + Clone>(
self,
alloc: &A,
alloc: A,
) -> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
self.do_merge(|parent, _child| parent, alloc)
}
@ -1338,9 +1343,9 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
/// the left child node and returns that child node.
///
/// Panics unless we `.can_merge()`.
pub fn merge_tracking_child<A: Allocator>(
pub fn merge_tracking_child<A: Allocator + Clone>(
self,
alloc: &A,
alloc: A,
) -> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
self.do_merge(|_parent, child| child, alloc)
}
@ -1350,10 +1355,10 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
/// where the tracked child edge ended up,
///
/// Panics unless we `.can_merge()`.
pub fn merge_tracking_child_edge<A: Allocator>(
pub fn merge_tracking_child_edge<A: Allocator + Clone>(
self,
track_edge_idx: LeftOrRight<usize>,
alloc: &A,
alloc: A,
) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
let old_left_len = self.left_child.len();
let right_len = self.right_child.len();

View File

@ -7,10 +7,10 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInter
/// the leaf edge corresponding to that former pair. It's possible this empties
/// a root node that is internal, which the caller should pop from the map
/// holding the tree. The caller should also decrement the map's length.
pub fn remove_kv_tracking<F: FnOnce(), A: Allocator>(
pub fn remove_kv_tracking<F: FnOnce(), A: Allocator + Clone>(
self,
handle_emptied_internal_root: F,
alloc: &A,
alloc: A,
) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
match self.force() {
Leaf(node) => node.remove_leaf_kv(handle_emptied_internal_root, alloc),
@ -20,10 +20,10 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInter
}
impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
fn remove_leaf_kv<F: FnOnce(), A: Allocator>(
fn remove_leaf_kv<F: FnOnce(), A: Allocator + Clone>(
self,
handle_emptied_internal_root: F,
alloc: &A,
alloc: A,
) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
let (old_kv, mut pos) = self.remove();
let len = pos.reborrow().into_node().len();
@ -35,7 +35,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
Ok(Left(left_parent_kv)) => {
debug_assert!(left_parent_kv.right_child_len() == MIN_LEN - 1);
if left_parent_kv.can_merge() {
left_parent_kv.merge_tracking_child_edge(Right(idx), alloc)
left_parent_kv.merge_tracking_child_edge(Right(idx), alloc.clone())
} else {
debug_assert!(left_parent_kv.left_child_len() > MIN_LEN);
left_parent_kv.steal_left(idx)
@ -44,7 +44,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
Ok(Right(right_parent_kv)) => {
debug_assert!(right_parent_kv.left_child_len() == MIN_LEN - 1);
if right_parent_kv.can_merge() {
right_parent_kv.merge_tracking_child_edge(Left(idx), alloc)
right_parent_kv.merge_tracking_child_edge(Left(idx), alloc.clone())
} else {
debug_assert!(right_parent_kv.right_child_len() > MIN_LEN);
right_parent_kv.steal_right(idx)
@ -73,10 +73,10 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
}
impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
fn remove_internal_kv<F: FnOnce(), A: Allocator>(
fn remove_internal_kv<F: FnOnce(), A: Allocator + Clone>(
self,
handle_emptied_internal_root: F,
alloc: &A,
alloc: A,
) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
// Remove an adjacent KV from its leaf and then put it back in place of
// the element we were asked to remove. Prefer the left adjacent KV,

View File

@ -79,37 +79,37 @@ use crate::alloc::{Allocator, Global};
#[cfg_attr(not(test), rustc_diagnostic_item = "BTreeSet")]
pub struct BTreeSet<
T,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> {
map: BTreeMap<T, (), A>,
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Hash, A: Allocator> Hash for BTreeSet<T, A> {
impl<T: Hash, A: Allocator + Clone> Hash for BTreeSet<T, A> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.map.hash(state)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialEq, A: Allocator> PartialEq for BTreeSet<T, A> {
impl<T: PartialEq, A: Allocator + Clone> PartialEq for BTreeSet<T, A> {
fn eq(&self, other: &BTreeSet<T, A>) -> bool {
self.map.eq(&other.map)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Eq, A: Allocator> Eq for BTreeSet<T, A> {}
impl<T: Eq, A: Allocator + Clone> Eq for BTreeSet<T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: PartialOrd, A: Allocator> PartialOrd for BTreeSet<T, A> {
impl<T: PartialOrd, A: Allocator + Clone> PartialOrd for BTreeSet<T, A> {
fn partial_cmp(&self, other: &BTreeSet<T, A>) -> Option<Ordering> {
self.map.partial_cmp(&other.map)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord, A: Allocator> Ord for BTreeSet<T, A> {
impl<T: Ord, A: Allocator + Clone> Ord for BTreeSet<T, A> {
fn cmp(&self, other: &BTreeSet<T, A>) -> Ordering {
self.map.cmp(&other.map)
}
@ -156,7 +156,7 @@ impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
#[derive(Debug)]
pub struct IntoIter<
T,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> {
iter: super::map::IntoIter<T, (), A>,
}
@ -186,11 +186,11 @@ pub struct Range<'a, T: 'a> {
pub struct Difference<
'a,
T: 'a,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> {
inner: DifferenceInner<'a, T, A>,
}
enum DifferenceInner<'a, T: 'a, A: Allocator> {
enum DifferenceInner<'a, T: 'a, A: Allocator + Clone> {
Stitch {
// iterate all of `self` and some of `other`, spotting matches along the way
self_iter: Iter<'a, T>,
@ -205,7 +205,7 @@ enum DifferenceInner<'a, T: 'a, A: Allocator> {
}
// Explicit Debug impl necessary because of issue #26925
impl<T: Debug, A: Allocator> Debug for DifferenceInner<'_, T, A> {
impl<T: Debug, A: Allocator + Clone> Debug for DifferenceInner<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
DifferenceInner::Stitch { self_iter, other_iter } => f
@ -224,7 +224,7 @@ impl<T: Debug, A: Allocator> Debug for DifferenceInner<'_, T, A> {
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<T: fmt::Debug, A: Allocator> fmt::Debug for Difference<'_, T, A> {
impl<T: fmt::Debug, A: Allocator + Clone> fmt::Debug for Difference<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Difference").field(&self.inner).finish()
}
@ -260,11 +260,11 @@ impl<T: fmt::Debug> fmt::Debug for SymmetricDifference<'_, T> {
pub struct Intersection<
'a,
T: 'a,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> {
inner: IntersectionInner<'a, T, A>,
}
enum IntersectionInner<'a, T: 'a, A: Allocator> {
enum IntersectionInner<'a, T: 'a, A: Allocator + Clone> {
Stitch {
// iterate similarly sized sets jointly, spotting matches along the way
a: Iter<'a, T>,
@ -279,7 +279,7 @@ enum IntersectionInner<'a, T: 'a, A: Allocator> {
}
// Explicit Debug impl necessary because of issue #26925
impl<T: Debug, A: Allocator> Debug for IntersectionInner<'_, T, A> {
impl<T: Debug, A: Allocator + Clone> Debug for IntersectionInner<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
IntersectionInner::Stitch { a, b } => {
@ -296,7 +296,7 @@ impl<T: Debug, A: Allocator> Debug for IntersectionInner<'_, T, A> {
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<T: Debug, A: Allocator> Debug for Intersection<'_, T, A> {
impl<T: Debug, A: Allocator + Clone> Debug for Intersection<'_, T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Intersection").field(&self.inner).finish()
}
@ -349,7 +349,7 @@ impl<T> BTreeSet<T> {
}
}
impl<T, A: Allocator> BTreeSet<T, A> {
impl<T, A: Allocator + Clone> BTreeSet<T, A> {
/// Makes a new `BTreeSet` with a reasonable choice of B.
///
/// # Examples
@ -1208,7 +1208,7 @@ impl<T: Ord> FromIterator<T> for BTreeSet<T> {
}
}
impl<T: Ord, A: Allocator> BTreeSet<T, A> {
impl<T: Ord, A: Allocator + Clone> BTreeSet<T, A> {
fn from_sorted_iter<I: Iterator<Item = T>>(iter: I, alloc: A) -> BTreeSet<T, A> {
let iter = iter.map(|k| (k, ()));
let map = BTreeMap::bulk_build_from_sorted_iter(iter, alloc);
@ -1241,7 +1241,7 @@ impl<T: Ord, const N: usize> From<[T; N]> for BTreeSet<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: Allocator> IntoIterator for BTreeSet<T, A> {
impl<T, A: Allocator + Clone> IntoIterator for BTreeSet<T, A> {
type Item = T;
type IntoIter = IntoIter<T, A>;
@ -1263,7 +1263,7 @@ impl<T, A: Allocator> IntoIterator for BTreeSet<T, A> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T, A: Allocator> IntoIterator for &'a BTreeSet<T, A> {
impl<'a, T, A: Allocator + Clone> IntoIterator for &'a BTreeSet<T, A> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
@ -1278,18 +1278,18 @@ pub struct DrainFilter<
'a,
T,
F,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global,
> where
T: 'a,
F: 'a + FnMut(&T) -> bool,
{
pred: F,
inner: super::map::DrainFilterInner<'a, T, ()>,
alloc: &'a A,
alloc: A,
}
#[unstable(feature = "btree_drain_filter", issue = "70530")]
impl<T, F, A: Allocator> Drop for DrainFilter<'_, T, F, A>
impl<T, F, A: Allocator + Clone> Drop for DrainFilter<'_, T, F, A>
where
F: FnMut(&T) -> bool,
{
@ -1299,7 +1299,7 @@ where
}
#[unstable(feature = "btree_drain_filter", issue = "70530")]
impl<T, F, A: Allocator> fmt::Debug for DrainFilter<'_, T, F, A>
impl<T, F, A: Allocator + Clone> fmt::Debug for DrainFilter<'_, T, F, A>
where
T: fmt::Debug,
F: FnMut(&T) -> bool,
@ -1310,7 +1310,7 @@ where
}
#[unstable(feature = "btree_drain_filter", issue = "70530")]
impl<'a, T, F, A: Allocator> Iterator for DrainFilter<'_, T, F, A>
impl<'a, T, F, A: Allocator + Clone> Iterator for DrainFilter<'_, T, F, A>
where
F: 'a + FnMut(&T) -> bool,
{
@ -1328,10 +1328,13 @@ where
}
#[unstable(feature = "btree_drain_filter", issue = "70530")]
impl<T, F, A: Allocator> FusedIterator for DrainFilter<'_, T, F, A> where F: FnMut(&T) -> bool {}
impl<T, F, A: Allocator + Clone> FusedIterator for DrainFilter<'_, T, F, A> where
F: FnMut(&T) -> bool
{
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Ord, A: Allocator> Extend<T> for BTreeSet<T, A> {
impl<T: Ord, A: Allocator + Clone> Extend<T> for BTreeSet<T, A> {
#[inline]
fn extend<Iter: IntoIterator<Item = T>>(&mut self, iter: Iter) {
iter.into_iter().for_each(move |elem| {
@ -1346,7 +1349,7 @@ impl<T: Ord, A: Allocator> Extend<T> for BTreeSet<T, A> {
}
#[stable(feature = "extend_ref", since = "1.2.0")]
impl<'a, T: 'a + Ord + Copy, A: Allocator> Extend<&'a T> for BTreeSet<T, A> {
impl<'a, T: 'a + Ord + Copy, A: Allocator + Clone> Extend<&'a T> for BTreeSet<T, A> {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.extend(iter.into_iter().cloned());
}
@ -1466,7 +1469,7 @@ impl<T: Ord + Clone, A: Allocator + Clone> BitOr<&BTreeSet<T, A>> for &BTreeSet<
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Debug, A: Allocator> Debug for BTreeSet<T, A> {
impl<T: Debug, A: Allocator + Clone> Debug for BTreeSet<T, A> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_set().entries(self.iter()).finish()
}
@ -1519,7 +1522,7 @@ impl<T> ExactSizeIterator for Iter<'_, T> {
impl<T> FusedIterator for Iter<'_, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: Allocator> Iterator for IntoIter<T, A> {
impl<T, A: Allocator + Clone> Iterator for IntoIter<T, A> {
type Item = T;
fn next(&mut self) -> Option<T> {
@ -1531,20 +1534,20 @@ impl<T, A: Allocator> Iterator for IntoIter<T, A> {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
impl<T, A: Allocator + Clone> DoubleEndedIterator for IntoIter<T, A> {
fn next_back(&mut self) -> Option<T> {
self.iter.next_back().map(|(k, _)| k)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
impl<T, A: Allocator + Clone> ExactSizeIterator for IntoIter<T, A> {
fn len(&self) -> usize {
self.iter.len()
}
}
#[stable(feature = "fused", since = "1.26.0")]
impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
impl<T, A: Allocator + Clone> FusedIterator for IntoIter<T, A> {}
#[stable(feature = "btree_range", since = "1.17.0")]
impl<T> Clone for Range<'_, T> {
@ -1602,7 +1605,7 @@ impl<T, A: Allocator + Clone> Clone for Difference<'_, T, A> {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T: Ord, A: Allocator> Iterator for Difference<'a, T, A> {
impl<'a, T: Ord, A: Allocator + Clone> Iterator for Difference<'a, T, A> {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
@ -1649,7 +1652,7 @@ impl<'a, T: Ord, A: Allocator> Iterator for Difference<'a, T, A> {
}
#[stable(feature = "fused", since = "1.26.0")]
impl<T: Ord, A: Allocator> FusedIterator for Difference<'_, T, A> {}
impl<T: Ord, A: Allocator + Clone> FusedIterator for Difference<'_, T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for SymmetricDifference<'_, T> {
@ -1703,7 +1706,7 @@ impl<T, A: Allocator + Clone> Clone for Intersection<'_, T, A> {
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T: Ord, A: Allocator> Iterator for Intersection<'a, T, A> {
impl<'a, T: Ord, A: Allocator + Clone> Iterator for Intersection<'a, T, A> {
type Item = &'a T;
fn next(&mut self) -> Option<&'a T> {
@ -1744,7 +1747,7 @@ impl<'a, T: Ord, A: Allocator> Iterator for Intersection<'a, T, A> {
}
#[stable(feature = "fused", since = "1.26.0")]
impl<T: Ord, A: Allocator> FusedIterator for Intersection<'_, T, A> {}
impl<T: Ord, A: Allocator + Clone> FusedIterator for Intersection<'_, T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Union<'_, T> {

View File

@ -29,12 +29,12 @@ impl<K, V> Root<K, V> {
/// and if the ordering of `Q` corresponds to that of `K`.
/// If `self` respects all `BTreeMap` tree invariants, then both
/// `self` and the returned tree will respect those invariants.
pub fn split_off<Q: ?Sized + Ord, A: Allocator>(&mut self, key: &Q, alloc: &A) -> Self
pub fn split_off<Q: ?Sized + Ord, A: Allocator + Clone>(&mut self, key: &Q, alloc: A) -> Self
where
K: Borrow<Q>,
{
let left_root = self;
let mut right_root = Root::new_pillar(left_root.height(), alloc);
let mut right_root = Root::new_pillar(left_root.height(), alloc.clone());
let mut left_node = left_root.borrow_mut();
let mut right_node = right_root.borrow_mut();
@ -57,16 +57,16 @@ impl<K, V> Root<K, V> {
}
}
left_root.fix_right_border(alloc);
left_root.fix_right_border(alloc.clone());
right_root.fix_left_border(alloc);
right_root
}
/// Creates a tree consisting of empty nodes.
fn new_pillar<A: Allocator>(height: usize, alloc: &A) -> Self {
let mut root = Root::new(alloc);
fn new_pillar<A: Allocator + Clone>(height: usize, alloc: A) -> Self {
let mut root = Root::new(alloc.clone());
for _ in 0..height {
root.push_internal_level(alloc);
root.push_internal_level(alloc.clone());
}
root
}