diff --git a/src/const_math_hack.rs b/src/const_math_hack.rs index 1fab9c8..2748ec7 100644 --- a/src/const_math_hack.rs +++ b/src/const_math_hack.rs @@ -175,11 +175,15 @@ pub unsafe trait MappedArray: ArrayHack { /// Produces a new [`ArrayHack`] array of the same length fn map_array E>(self, mut f: F) -> Self::Mapped { + self.map_array_with_index(|_i, e| f(e)) + } + + fn map_array_with_index E>(self, mut f: F) -> Self::Mapped { let mut uninit = GradualUninitArray::new(self); let mut init = GradualInitArray::new(); - for _ in 0..Self::LEN { - unsafe { init.push_unchecked(f(uninit.take_unchecked())) }; + for i in 0..Self::LEN { + unsafe { init.push_unchecked(f(i, uninit.take_unchecked())) }; } mem::forget(uninit); @@ -252,6 +256,25 @@ impl GradualInitArray { self.len += 1; } + /// Converts the partially initialized array into one of the same length, where + /// each element is wrapped in a `MaybeUninit` + /// + /// The first `n` elements, where `n` is the number of calls that have been made to [`push`] or + /// [`push_unchecked`], are guaranteed to be initialized. + /// + /// [`push`]: Self::push + /// [`push_unchecked`]: Self::push_unchecked + pub fn into_uninit(mut self) -> >>::Mapped + where + A: MappedArray::Element>>, + { + // Overwrite the length so calling the destructor is fine + self.len = 0; + + // Sickos: yes... ha ha ha... YES! + unsafe { std::mem::transmute_copy(&self.array) } + } + /// Converts the `GradualInitArray` into the initialized inner array, if it has been completely /// initialized /// diff --git a/src/tree/mod.rs b/src/tree/mod.rs index 8133f42..90dcdb0 100644 --- a/src/tree/mod.rs +++ b/src/tree/mod.rs @@ -668,11 +668,19 @@ where /// trees performing a cheap, shallow clone impl Clone for RleTree where - I: Clone, + I: Copy, S: Clone, { fn clone(&self) -> Self { - todo!() + match self.root.as_ref() { + None => RleTree { root: None }, + Some(root) => RleTree { + root: Some(Root { + handle: ManuallyDrop::new(root.handle.as_immut().deep_clone().erase_unique()), + refs_store: Default::default(), + }), + }, + } } } @@ -699,16 +707,6 @@ impl Clone for RleTree { } } -impl RleTree { - pub fn clone_with_refs(&self) -> Self { - todo!() - } - - pub fn clone_without_refs(&self) -> Self { - todo!() - } -} - /// (*Internal*) Helper type for various tree operations #[derive(Debug, Copy, Clone)] #[repr(u8)] diff --git a/src/tree/node.rs b/src/tree/node.rs index 0a57af5..ffa8e71 100644 --- a/src/tree/node.rs +++ b/src/tree/node.rs @@ -35,7 +35,8 @@ use crate::MaybeDebug; #[cfg(test)] use std::fmt::{self, Debug, Formatter}; -use crate::const_math_hack::{self as hack, ArrayHack}; +use super::NodeBox; +use crate::const_math_hack::{self as hack, ArrayHack, GradualInitArray, MappedArray}; use crate::param::{self, RleTreeConfig, SliceRefStore, StrongCount, SupportsInsert}; use crate::public_traits::Index; @@ -1041,44 +1042,33 @@ where Some(p) => (Some(p.ptr), MaybeUninit::new(p.idx_in_parent)), }; - let mut new_leaf: Leaf = Leaf { + // Copy all of the slices + let mut vals: GradualInitArray> = GradualInitArray::new(); + for i in 0..this_leaf.len() as usize { + // SAFETY: `i < this_leaf.len` guarantees it's within bounds and initialized. Within + // bounds means we won't have written off the end of `vals` as well. The call to + // `P::clone_slice` requires that `P = AllowCow`, which is guaranteed by the caller + unsafe { + let this_val = this_leaf.vals.get_unchecked(i).assume_init_ref(); + let new_val = P::clone_slice(this_val); + vals.push_unchecked(new_val); + } + } + + let new_leaf: Leaf = Leaf { parent, idx_in_parent, holes: [None; 2], - len: 0, + len: this_leaf.len, strong_count: P::StrongCount::one(), keys: this_leaf.keys, - vals: KeyArray::new(), + vals: vals.into_uninit(), + // Note: `P::SliceRefStore::OptionRefId` is zero-sized for `param::AllowCow`, so we can + // leave the entire array uninitialized. refs: KeyArray::new(), - total_size: self.leaf().subtree_size(), + total_size: this_leaf.subtree_size(), }; - // With `parent`, `idx_in_parent`, `holes`, `strong_count`, and `total_size` properly - // instantiated, we only have `vals`/`refs`/`len` left (plus the child pointers, if - // this is an internal node). Let's look at each of these briefly: - // * `vals` -- must be cloned with `SupportsInsert::clone_slice` - // * `refs` -- `P::SliceRefStore::OptionRefId` is zero-sized for `param::AllowCow`, so we - // can leave the entire array uninitialized. - // * `child_ptrs` (if internal) -- can be copied, but strong counts must be incremented - // * `len` -- will be set equal to `self.leaf().len` - - for i in 0..this_leaf.len { - // SAFETY: the calls to various `get_unchecked` => `assume_init_ref` broadly require - // that `i < this_leaf.len` and that `this_leaf.len <= KeyArray<_, M>::LEN`. The first - // is guaranteed by the range above, and the second is always true. The call to - // `P::clone_slice` requires that `P = AllowCow`, which is guaranteed by the caller. - unsafe { - let this_val = this_leaf.vals.get_unchecked(i as usize).assume_init_ref(); - let new_val = P::clone_slice(this_val); - new_leaf.vals.get_mut_unchecked(i as usize).write(new_val); - } - // set `len` as we go so that -- if `clone_slice` panics -- we drop everything that's - // already been initialized. - new_leaf.len = i + 1; - } - - // Everything but `child_ptrs` is now initialized - match self.typed_ref() { Type::Leaf(_) => NodeHandle { ptr: alloc_aligned(new_leaf).cast(), @@ -1129,6 +1119,122 @@ where } } +// any type, borrow::Immut, param::NoFeatures +// * deep_clone (where I: Copy, S: Clone) +impl<'t, Ty, I, S, const M: usize> NodeHandle, I, S, param::NoFeatures, M> +where + Ty: TypeHint, +{ + /// Creates a deep clone of the node, cloning it and performing a deep clone of all of its + /// children + pub fn deep_clone(&self) -> NodeHandle + where + I: Copy, + S: Clone, + { + let this_leaf = self.leaf(); + + if this_leaf.has_holes() { + panic_invalid_state(); + } + + // Copy all of the slices + let mut vals: GradualInitArray> = GradualInitArray::new(); + for i in 0..this_leaf.len() as usize { + // SAFETY: `i < this_leaf.len` guarantees it's within bounds and initialized. Within + // bounds means we won't have written off the end of `vals` as well. + unsafe { + let val = this_leaf.vals.get_unchecked(i).assume_init_ref().clone(); + vals.push_unchecked(val); + } + } + + let new_leaf: Leaf = Leaf { + parent: None, + idx_in_parent: MaybeUninit::uninit(), + holes: [None; 2], + len: this_leaf.len, + strong_count: (), + keys: this_leaf.keys, + vals: vals.into_uninit(), + // Note: We can leave `refs` uninitialized because `param::NoFeatures` has + // `OptionRefId = ()`, so the total size of `refs` is zero. + refs: KeyArray::new(), + total_size: this_leaf.subtree_size(), + }; + + match self.typed_ref() { + Type::Leaf(_) => NodeHandle { + ptr: alloc_aligned(new_leaf).cast::>(), + height: self.height, + borrow: PhantomData, + }, + Type::Internal(this) => { + let mut child_ptrs: GradualInitArray< + ChildArray, M>, + > = GradualInitArray::new(); + + for i in 0..=this_leaf.len { + // SAFETY: `i <= this_leaf.len` guarantees it's within bounds and initiaized. + // Within bounds means we won't write off the end of `child_ptrs` as well. + unsafe { + let mut child = NodeBox::new(this.child(i).deep_clone()); + child.as_mut().with_mut(|leaf| { + let _ = leaf.idx_in_parent.write(i); + }); + child_ptrs.push_unchecked(child); + } + } + + let new_internal = Internal { + leaf: new_leaf, + child_ptrs: child_ptrs.into_uninit().map_array_with_index(|i, p| { + if i as u8 <= this_leaf.len { + // SAFETY: the bound `i <= this_leaf.len` means `p` is initialized + unsafe { MaybeUninit::new(p.assume_init().take().ptr) } + } else { + MaybeUninit::uninit() + } + }), + }; + + // Before we return, we have to overwite the parent pointers in all of this node's + // children. + let mut handle: NodeHandle = + NodeHandle { + ptr: alloc_aligned(new_internal) + .cast::>(), + // SAFETY: `self.height` is known to be non-zero because we already found + // that this is an internal node + height: unsafe { NonZeroU8::new_unchecked(self.height.as_u8()) }, + borrow: PhantomData, + }; + + let new_ptr = handle.ptr; + + unsafe { + handle.as_mut().with_internal(|internal| { + for i in 0..=this_leaf.len { + let p = internal + .child_ptrs + .get_mut_unchecked(i as usize) + .assume_init_mut(); + let leaf_mut = p.cast::>().as_mut(); + leaf_mut.parent = Some(new_ptr); + } + }); + } + + NodeHandle { + ptr: handle.ptr, + height: self.height, + borrow: PhantomData, + } + } + } + } +} + /// An owned, extracted key-value pair from a node pub struct Key, const M: usize> { pub pos: I, diff --git a/src/tree/tests/manual.rs b/src/tree/tests/manual.rs index 9fcc8b3..18f7d72 100644 --- a/src/tree/tests/manual.rs +++ b/src/tree/tests/manual.rs @@ -104,3 +104,19 @@ fn basic_slice_ref() { assert_eq!(r0.range(), 0..2); assert_eq!(&*r0.borrow_slice(), &Constant('a')); } + +#[test] +fn basic_deep_clone() { + let mut tree: RleTree, NoFeatures> = RleTree::new_empty(); + tree.insert(0, Constant('a'), 4); + tree.insert(0, Constant('b'), 2); + tree.insert(6, Constant('c'), 3); + tree.insert(6, Constant('d'), 3); + tree.insert(6, Constant('e'), 3); + tree.insert(6, Constant('f'), 3); + tree.insert(6, Constant('g'), 3); + tree.validate(); + + let copied_tree = tree.clone(); + copied_tree.validate(); +}