diff --git a/src/collections/raw_vec.rs b/src/collections/raw_vec.rs new file mode 100644 index 0000000000000000000000000000000000000000..89ca6c2ac19bfc99e5ec1dad7609213e0dd2df70 --- /dev/null +++ b/src/collections/raw_vec.rs @@ -0,0 +1,124 @@ +use std::{mem, ptr, cmp}; +use std::alloc::{Layout, alloc, dealloc}; + +#[derive(Debug)] +enum AllocError { + CapacityOverflow, +} + +/// Generic raw vector. It has a base pointer, a capacity and a length. Basic +/// operations are supported, but the user of the structure is responsible for +/// ensuring that no illegal mutable access occurs. +/// A lot of the logic is simply stolen from the std lib. The destructor will +/// free the backing memory, but will not run any destructors. +pub struct RawVec { + base: *mut T, + cap: usize, + len: usize, +} + +impl RawVec { + const T_ALIGNMENT: usize = mem::align_of::(); + const T_SIZE: usize = mem::size_of::(); + + const GROWTH_RATE: usize = 2; + + pub fn new() -> Self { + Self{ + base: ptr::null_mut(), + cap: 0, + len: 0, + } + } + + pub fn with_capacity(capacity: usize) -> Self { + // Could be done a bit more efficiently + let mut result = Self::new(); + result.ensure_space(capacity); + return result; + } + + pub unsafe fn get(&self, idx: usize) -> *const T { + debug_assert!(idx < self.len); + return self.base.add(idx); + } + + pub unsafe fn get_mut(&self, idx: usize) -> *mut T { + debug_assert!(idx < self.len); + return self.base.add(idx); + } + + pub fn push(&mut self, item: T) { + self.ensure_space(1); + unsafe { + let target = self.base.add(self.len); + std::ptr::write(target, item); + self.len += 1; + } + } + + pub fn len(&self) -> usize { + return self.len; + } + + fn ensure_space(&mut self, additional: usize) -> Result<(), AllocError>{ + debug_assert!(Self::T_SIZE != 0); + debug_assert!(self.cap >= self.len); + if self.cap - self.len < additional { + // Need to resize. Note that due to all checked conditions we have + // that new_cap >= 1. + debug_assert!(additional > 0); + let new_cap = self.len.checked_add(additional).unwrap(); + let new_cap = cmp::max(new_cap, self.cap * Self::GROWTH_RATE); + + let layout = Layout::array::(new_cap) + .map_err(|_| AllocError::CapacityOverflow)?; + debug_assert_eq!(new_cap * Self::T_SIZE, layout.size()); + + unsafe { + // Allocate new storage, transfer bits, deallocate old store + let new_base = alloc(layout); + + if self.cap > 0 { + let old_base = self.base as *mut u8; + let (old_size, old_layout) = self.current_layout(); + + ptr::copy_nonoverlapping(new_base, old_base, old_size); + dealloc(old_base, old_layout); + } + + self.base = new_base; + self.cap = new_cap; + } + } // else: still enough space + + return Ok(()); + } + + #[inline] + fn current_layout(&self) -> (usize, Layout) { + debug_assert!(Self::T_SIZE > 0); + let old_size = self.cap * Self::T_SIZE; + unsafe { + return ( + old_size, + Layout::from_size_align_unchecked(old_size, Self::T_ALIGNMENT) + ); + } + } +} + +impl Drop for RawVec { + fn drop(&mut self) { + if self.cap > 0 { + debug_assert!(!self.base.is_null()); + let (_, layout) = self.current_layout(); + unsafe { + dealloc(self.base, layout); + if cfg!(debug_assertions) { + self.base = ptr::null_mut(); + } + } + } + } +} \ No newline at end of file