Files
@ 3845071002b0
Branch filter:
Location: CSY/reowolf/src/protocol/eval/store.rs - annotation
3845071002b0
6.5 KiB
application/rls-services+xml
reorganization of evaluator, WIP on hooking up to old system
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 | 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 3845071002b0 |
use std::collections::VecDeque;
use super::value::{Value, ValueId, HeapPos};
pub(crate) struct HeapAllocation {
pub values: Vec<Value>,
}
pub(crate) struct Store {
// The stack where variables/parameters are stored. Note that this is a
// non-shrinking stack. So it may be filled with garbage.
pub(crate) stack: Vec<Value>,
// Represents the place in the stack where we find the `PrevStackBoundary`
// value containing the previous stack boundary. This is so we can pop from
// the stack after function calls.
pub(crate) cur_stack_boundary: usize,
// A rather ridiculous simulated heap, but this allows us to "allocate"
// things that occupy more then one stack slot.
pub(crate) heap_regions: Vec<HeapAllocation>,
pub(crate) free_regions: VecDeque<HeapPos>,
}
impl Store {
pub(crate) fn new() -> Self {
let mut store = Self{
stack: Vec::with_capacity(64),
cur_stack_boundary: 0,
heap_regions: Vec::new(),
free_regions: VecDeque::new(),
};
store.stack.push(Value::PrevStackBoundary(-1));
store
}
/// Resizes(!) the stack to fit the required number of values. Any
/// unallocated slots are initialized to `Unassigned`. The specified stack
/// index is exclusive.
pub(crate) fn reserve_stack(&mut self, unique_stack_idx: usize) {
let new_size = self.cur_stack_boundary + unique_stack_idx + 1;
if new_size > self.stack.len() {
self.stack.resize(new_size, Value::Unassigned);
}
}
/// Clears values on the stack and removes their heap allocations when
/// applicable. The specified index itself will also be cleared (so if you
/// specify 0 all values in the frame will be destroyed)
pub(crate) fn clear_stack(&mut self, unique_stack_idx: usize) {
let new_size = self.cur_stack_boundary + unique_stack_idx + 1;
for idx in new_size..self.stack.len() {
self.drop_value(&self.stack[idx]);
self.stack[idx] = Value::Unassigned;
}
}
/// Reads a value from a specific address. The value is always copied, hence
/// if the value ends up not being written, one should call `drop_value` on
/// it.
pub(crate) fn read_copy(&mut self, address: ValueId) -> Value {
match address {
ValueId::Stack(pos) => {
let cur_pos = self.cur_stack_boundary + 1 + pos as usize;
return self.clone_value(&self.stack[cur_pos]);
},
ValueId::Heap(heap_pos, region_idx) => {
return self.clone_value(&self.heap_regions[heap_pos as usize].values[region_idx as usize])
}
}
}
/// Returns an immutable reference to the value pointed to by an address
pub(crate) fn read_ref(&mut self, address: ValueId) -> &Value {
match address {
ValueId::Stack(pos) => {
let cur_pos = self.cur_stack_boundary + 1 + pos as usize;
return &self.stack[cur_pos];
},
ValueId::Heap(heap_pos, region_idx) => {
return &self.heap_regions[heap_pos as usize].values[region_idx as usize];
}
}
}
/// Returns a mutable reference to the value pointed to by an address
pub(crate) fn read_mut_ref(&mut self, address: ValueId) -> &mut Value {
match address {
ValueId::Stack(pos) => {
let cur_pos = self.cur_stack_boundary + 1 + pos as usize;
return &mut self.stack[cur_pos];
},
ValueId::Heap(heap_pos, region_idx) => {
return &mut self.heap_regions[heap_pos as usize].values[region_idx as usize];
}
}
}
/// Writes a value
pub(crate) fn write(&mut self, address: ValueId, value: Value) {
match address {
ValueId::Stack(pos) => {
let cur_pos = self.cur_stack_boundary + 1 + pos as usize;
self.drop_value(&self.stack[cur_pos]);
self.stack[cur_pos] = value;
},
ValueId::Heap(heap_pos, region_idx) => {
let heap_pos = heap_pos as usize;
let region_idx = region_idx as usize;
self.drop_value(&self.heap_regions[heap_pos].values[region_idx]);
self.heap_regions[heap_pos].values[region_idx] = value
}
}
}
fn clone_value(&mut self, value: &Value) -> Value {
// Quickly check if the value is not on the heap
let source_heap_pos = value.get_heap_pos();
if source_heap_pos.is_none() {
// We can do a trivial copy
return value.clone();
}
// Value does live on heap, copy it
let source_heap_pos = source_heap_pos.unwrap() as usize;
let target_heap_pos = self.alloc_heap();
let target_heap_pos_usize = target_heap_pos as usize;
let num_values = self.heap_regions[source_heap_pos].values.len();
for value_idx in 0..num_values {
let cloned = self.clone_value(&self.heap_regions[source_heap_pos].values[value_idx]);
self.heap_regions[target_heap_pos_usize].values.push(cloned);
}
match value {
Value::Message(_) => Value::Message(target_heap_pos),
Value::Array(_) => Value::Array(target_heap_pos),
Value::Union(tag, _) => Value::Union(*tag, target_heap_pos),
Value::Struct(_) => Value::Struct(target_heap_pos),
_ => unreachable!("performed clone_value on heap, but {:?} is not a heap value", value),
}
}
pub(crate) fn drop_value(&mut self, value: &Value) {
if let Some(heap_pos) = value.get_heap_pos() {
self.drop_heap_pos(heap_pos);
}
}
pub(crate) fn drop_heap_pos(&mut self, heap_pos: HeapPos) {
let num_values = self.heap_regions[heap_pos as usize].values.len();
for value_idx in 0..num_values {
if let Some(other_heap_pos) = self.heap_regions[heap_pos as usize].values[value_idx].get_heap_pos() {
self.drop_heap_pos(other_heap_pos);
}
}
self.heap_regions[heap_pos as usize].values.clear();
self.free_regions.push_back(heap_pos);
}
pub(crate) fn alloc_heap(&mut self) -> HeapPos {
if self.free_regions.is_empty() {
let idx = self.heap_regions.len() as HeapPos;
self.heap_regions.push(HeapAllocation{ values: Vec::new() });
return idx;
} else {
let idx = self.free_regions.pop_back().unwrap();
return idx;
}
}
}
|