diff --git a/src/runtime2/store/component.rs b/src/runtime2/store/component.rs index 52f8ace8bd4d3b0161ee35d3ce417d5e111d811c..8076e2cd65bf6b646ba448ee27f53f227f7e2ce9 100644 --- a/src/runtime2/store/component.rs +++ b/src/runtime2/store/component.rs @@ -61,7 +61,7 @@ struct Inner { index_mask: usize, } -type InnerRead<'a, T> = UnfairSeLockSharedGuard<'a, Inner>; +type InnerShared<'a, T> = UnfairSeLockSharedGuard<'a, Inner>; impl ComponentStore { pub fn new(initial_size: usize) -> Self { @@ -133,22 +133,22 @@ impl ComponentStore { } #[inline] - fn pop_freelist_index<'a>(&'a self, mut read_lock: InnerRead<'a, T>) -> (InnerRead<'a, T>, u32) { + fn pop_freelist_index<'a>(&'a self, mut shared_lock: InnerShared<'a, T>) -> (InnerShared<'a, T>, u32) { 'attempt_read: loop { // Load indices and check for reallocation condition - let current_size = read_lock.size; + let current_size = shared_lock.size; let mut read_index = self.read_head.load(Ordering::Relaxed); let limit_index = self.limit_head.load(Ordering::Acquire); if read_index == limit_index { - read_lock = self.reallocate(current_size, read_lock); + shared_lock = self.reallocate(current_size, shared_lock); continue 'attempt_read; } loop { - let preemptive_read = read_lock.freelist[read_index & read_lock.index_mask]; + let preemptive_read = shared_lock.freelist[read_index & shared_lock.index_mask]; if let Err(actual_read_index) = self.read_head.compare_exchange( - read_index, (read_index + 1) & read_lock.compare_mask, + read_index, (read_index + 1) & shared_lock.compare_mask, Ordering::AcqRel, Ordering::Acquire ) { // We need to try again @@ -157,13 +157,13 @@ impl ComponentStore { } // If here then we performed the read - return (read_lock, preemptive_read); + return (shared_lock, preemptive_read); } } } #[inline] - fn initialize_at_index(&self, read_lock: InnerRead, index: u32, value: T) { + fn initialize_at_index(&self, read_lock: InnerShared, index: u32, value: T) { let mut target_ptr = read_lock.data[index as usize]; unsafe { @@ -179,7 +179,7 @@ impl ComponentStore { } #[inline] - fn push_freelist_index(&self, read_lock: &InnerRead, index_to_put_back: u32) { + fn push_freelist_index(&self, read_lock: &InnerShared, index_to_put_back: u32) { // Acquire an index in the freelist to which we can write let mut cur_write_index = self.write_head.load(Ordering::Relaxed); let mut new_write_index = (cur_write_index + 1) & read_lock.compare_mask; @@ -208,14 +208,14 @@ impl ComponentStore { } #[inline] - fn destruct_at_index(&self, read_lock: &InnerRead, index: u32) { + fn destruct_at_index(&self, read_lock: &InnerShared, index: u32) { let target_ptr = read_lock.data[index as usize]; unsafe{ ptr::drop_in_place(target_ptr); } } // NOTE: Bit of a mess, and could have a cleanup with better logic for the // resizing. Maybe even a different indexing scheme... - fn reallocate(&self, old_size: usize, inner: InnerRead) -> InnerRead { + fn reallocate(&self, old_size: usize, inner: InnerShared) -> InnerShared { drop(inner); { // After dropping read lock, acquire write lock