Changeset - 012b61623f5a
src/collections/scoped_buffer.rs
Show inline comments
 
/// scoped_buffer.rs
 
///
 
/// Solves the common pattern where we are performing some kind of recursive
 
/// pattern while using a temporary buffer. At the start, or during the
 
/// procedure, we push stuff into the buffer. At the end we take out what we
 
/// have put in.
 
///
 
/// It is unsafe because we're using pointers to circumvent borrowing rules in
 
/// the name of code cleanliness. The correctness of use is checked in debug
 
/// mode.
 

	
 
/// The buffer itself. This struct should be the shared buffer. The type `T` is
 
/// intentionally `Copy` such that it can be copied out and the underlying
 
/// container can be truncated.
 
pub(crate) struct ScopedBuffer<T: Sized + Copy> {
 
    pub inner: Vec<T>,
 
}
 

	
 
/// A section of the buffer. Keeps track of where we started the section. When
 
/// done with the section one must call `into_vec` or `forget` to remove the
 
/// section from the underlying buffer.
 
pub(crate) struct ScopedSection<T: Sized + Copy> {
 
    inner: *mut Vec<T>,
 
    start_size: u32,
 
    #[cfg(debug_assertions)] cur_size: u32,
 
}
 

	
 
impl<T: Sized + Copy> ScopedBuffer<T> {
 
    pub(crate) fn new_reserved(capacity: usize) -> Self {
 
        Self{ inner: Vec::with_capacity(capacity) }
 
    }
 

	
 
    pub(crate) fn start_section(&mut self) -> ScopedSection<T> {
 
        let start_size = self.inner.len() as u32;
 
        ScopedSection{
 
            inner: &mut self.inner,
 
            start_size,
 
            cur_size: start_size
 
        }
 
    }
 

	
 
    pub(crate) fn start_section_initialized(&mut self, initialize_with: &[T]) -> ScopedSection<T> {
 
        let start_size = self.inner.len() as u32;
 
        let data_size = initialize_with.len() as u32;
 
        self.inner.extend_from_slice(initialize_with);
 
        ScopedSection{
 
            inner: &mut self.inner,
 
            start_size,
 
            cur_size: start_size + data_size,
 
        }
 
    }
 
}
 

	
 
#[cfg(debug_assertions)]
 
impl<T: Sized + Copy> Drop for ScopedBuffer<T> {
 
    fn drop(&mut self) {
 
        // Make sure that everyone cleaned up the buffer neatly
 
        debug_assert!(self.inner.is_empty(), "dropped non-empty scoped buffer");
 
    }
 
}
 

	
 
impl<T: Sized + Copy> ScopedSection<T> {
 
    #[inline]
 
    pub(crate) fn push(&mut self, value: T) {
 
        let vec = unsafe{&mut *self.inner};
 
        debug_assert_eq!(vec.len(), self.cur_size as usize, "trying to push onto section, but size is larger than expected");
 
        vec.push(value);
 
        if cfg!(debug_assertions) { self.cur_size += 1; }
 
    }
 

	
 
    pub(crate) fn len(&self) -> usize {
 
        let vec = unsafe{&mut *self.inner};
 
        debug_assert_eq!(vec.len(), self.cur_size as usize, "trying to get section length, but size is larger than expected");
 
        return vec.len() - self.start_size;
 
    }
 

	
 
    #[inline]
 
    pub(crate) fn forget(self) {
 
        let vec = unsafe{&mut *self.inner};
 
        debug_assert_eq!(vec.len(), self.cur_size as usize, "trying to forget section, but size is larger than expected");
 
        vec.truncate(self.start_size as usize);
 
    }
 

	
 
    #[inline]
 
    pub(crate) fn into_vec(self) -> Vec<T> {
 
        let vec = unsafe{&mut *self.inner};
 
        debug_assert_eq!(vec.len(), self.cur_size as usize, "trying to turn section into vec, but size is larger than expected");
 
        let section = Vec::from(&vec[self.start_size as usize..]);
 
        vec.truncate(self.start_size as usize);
 
        section
 
    }
 
}
 

	
 
impl<T: Sized + Copy> std::ops::Index<usize> for ScopedSection<T> {
 
    type Output = T;
 

	
 
    fn index(&self, idx: usize) -> &Self::Output {
 
        let vec = unsafe{&*self.inner};
 
        return vec[self.start_size as usize + idx]
 
    }
 
}
 

	
 
#[cfg(debug_assertions)]
 
impl<T: Sized + Copy> Drop for ScopedBuffer<T> {
 
impl<T: Sized + Copy> Drop for ScopedSection<T> {
 
    fn drop(&mut self) {
 
        // Make sure that the data was actually taken out of the scoped section
 
        let vec = unsafe{&*self.inner};
 
        debug_assert_eq!(vec.len(), self.start_size as usize);
 
    }
 
}
 
\ No newline at end of file
src/collections/string_pool.rs
Show inline comments
 
use std::ptr::null_mut;
 
use std::collections::hash_map::DefaultHasher;
 
use std::hash::{Hash, Hasher};
 
use std::marker::PhantomData;
 

	
 
const SLAB_SIZE: usize = u16::max_value() as usize;
 
const SLAB_SIZE: usize = u16::MAX as usize;
 

	
 
#[derive(Clone)]
 
pub struct StringRef<'a> {
 
    data: *const u8,
 
    length: usize,
 
    _phantom: PhantomData<&'a [u8]>,
 
}
 

	
 
impl<'a> StringRef<'a> {
 
    /// `new` constructs a new StringRef whose data is not owned by the
 
    /// `StringPool`, hence cannot have a `'static` lifetime.
 
    pub(crate) fn new(data: &'a [u8]) -> StringRef<'a> {
 
        // This is an internal (compiler) function: so debug_assert that the
 
        // string is valid ascii. Most commonly the input will come from the
 
        // code's source file, which is checked for ASCII-ness anyway.
 
        debug_assert!(data.is_ascii());
 
        let length = data.len();
 
        let data = data.as_ptr();
 
        StringRef{ data, length, _phantom: PhantomData }
 
    }
 

	
 
    pub fn as_str(&self) -> &'a str {
 
        unsafe {
 
            let slice = std::slice::from_raw_parts::<'a, u8>(self.data, self.length);
 
            std::str::from_utf8_unchecked(slice)
 
        }
 
    }
 

	
 
    pub fn as_bytes(&self) -> &'a [u8] {
 
        unsafe {
 
            std::slice::from_raw_parts::<'a, u8>(self.data, self.length)
 
        }
 
    }
 
}
 

	
 
impl PartialEq for StringRef {
 
impl PartialEq for StringRef<'_> {
 
    fn eq(&self, other: &StringRef) -> bool {
 
        self.as_str() == other.as_str()
 
    }
 
}
 

	
 
impl Eq for StringRef {}
 
impl Eq for StringRef<'_> {}
 

	
 
impl Hash for StringRef {
 
impl Hash for StringRef<'_> {
 
    fn hash<H: Hasher>(&self, state: &mut H) {
 
        unsafe{
 
            state.write(std::slice::from_raw_parts(self.data, self.length));
 
            state.write(self.as_bytes());
 
        }
 
    }
 
}
 

	
 
struct StringPoolSlab {
 
    prev: *mut StringPoolSlab,
 
    data: Vec<u8>,
 
    remaining: usize,
 
}
 

	
 
impl StringPoolSlab {
 
    fn new(prev: *mut StringPoolSlab) -> Self {
 
        Self{ prev, data: Vec::with_capacity(SLAB_SIZE), remaining: SLAB_SIZE }
 
    }
 
}
 

	
 
/// StringPool is a ever-growing pool of strings. Strings have a maximum size
 
/// equal to the slab size. The slabs are essentially a linked list to maintain
 
/// pointer-stability of the strings themselves.
 
/// All `StringRef` instances are invalidated when the string pool is dropped
 
pub(crate) struct StringPool {
 
    last: *mut StringPoolSlab,
 
}
 

	
 
impl StringPool {
 
    pub(crate) fn new() -> Self {
 
        // To have some stability we just turn a box into a raw ptr.
 
        let initial_slab = Box::new(StringPoolSlab::new(null_mut()));
 
        let initial_slab = Box::into_raw(initial_slab);
 
        StringPool{
 
            last: initial_slab,
 
        }
 
    }
 

	
 
    /// Interns a string to the `StringPool`, returning a reference to it. The
 
    /// pointer owned by `StringRef` is `'static` as the `StringPool` doesn't
 
    /// reallocate/deallocate until dropped (which only happens at the end of
 
    /// the program.)
 
    pub(crate) fn intern(&mut self, data: &[u8]) -> StringRef<'static> {
 
        // TODO: Large string allocations, if ever needed.
 
        let data_len = data.len();
 
        assert!(data_len <= SLAB_SIZE, "string is too large for slab");
 
        debug_assert!(std::str::from_utf8(data).is_ok(), "string to intern is not valid UTF-8 encoded");
 
        
 
        let mut last = unsafe{&mut *self.last};
 
        if data.len() > last.remaining {
 
            // Doesn't fit: allocate new slab
 
            self.alloc_new_slab();
 
            last = unsafe{&mut *self.last};
 
        }
 

	
 
        // Must fit now, compute hash and put in buffer
 
        debug_assert!(data_len <= last.remaining);
 
        let range_start = last.data.len();
 
        last.data.extend_from_slice(data);
 
        last.remaining -= data_len;
 
        debug_assert_eq!(range_start + data_len, last.data.len());
 

	
 
        unsafe {
 
            let start = last.data.as_ptr().offset(range_start as isize);
 
            StringRef{ data: start, length: data_len, _phantom: PhantomData }
 
        }
 
    }
 

	
 
    fn alloc_new_slab(&mut self) {
 
        let new_slab = Box::new(StringPoolSlab::new(self.last));
 
        let new_slab = Box::into_raw(new_slab);
 
        self.last = new_slab;
 
    }
 
}
 

	
 
impl Drop for StringPool {
 
    fn drop(&mut self) {
 
        let mut new_slab = self.last;
 
        while !new_slab.is_null() {
 
            let cur_slab = new_slab;
 
            unsafe {
 
                new_slab = (*cur_slab).prev;
 
                Box::from_raw(cur_slab); // consume and deallocate
 
            }
 
        }
 
    }
 
}
 

	
 
#[cfg(test)]
 
mod tests {
 
    use super::*;
 

	
 
    #[test]
 
    fn test_string_just_fits() {
 
        let large = "0".repeat(SLAB_SIZE);
 
        let mut pool = StringPool::new();
 
        let interned = pool.intern(large.as_bytes());
 
        assert_eq!(interned.as_str(), large);
 
    }
 

	
 
    #[test]
 
    #[should_panic]
 
    fn test_string_too_large() {
 
        let large = "0".repeat(SLAB_SIZE + 1);
 
        let mut pool = StringPool::new();
 
        let _interned = pool.intern(large.as_bytes());
 
    }
 

	
 
    #[test]
 
    fn test_lots_of_small_allocations() {
 
        const NUM_PER_SLAB: usize = 32;
 
        const NUM_SLABS: usize = 4;
 

	
 
        let to_intern = "0".repeat(SLAB_SIZE / NUM_PER_SLAB);
 
        let mut pool = StringPool::new();
 

	
 
        let mut last_slab = pool.last;
 
        let mut all_refs = Vec::new();
 

	
 
        // Fill up first slab
 
        for _alloc_idx in 0..NUM_PER_SLAB {
 
            let interned = pool.intern(to_intern.as_bytes());
 
            all_refs.push(interned);
 
            assert!(std::ptr::eq(last_slab, pool.last));
 
        }
 

	
 
        for _slab_idx in 0..NUM_SLABS-1 {
 
            for alloc_idx in 0..NUM_PER_SLAB {
 
                let interned = pool.intern(to_intern.as_bytes());
 
                all_refs.push(interned);
 

	
 
                if alloc_idx == 0 {
 
                    // First allocation produces a new slab
 
                    assert!(!std::ptr::eq(last_slab, pool.last));
 
                    last_slab = pool.last;
 
                } else {
 
                    assert!(std::ptr::eq(last_slab, pool.last));
 
                }
 
            }
 
        }
 

	
 
        // All strings are still correct
 
        for string_ref in all_refs {
 
            assert_eq!(string_ref.as_str(), to_intern);
 
        }
 
    }
 
}
 
\ No newline at end of file
src/ffi/pseudo_socket_api.rs
Show inline comments
 
use super::*;
 

	
 
use core::ops::DerefMut;
 
use libc::{sockaddr, socklen_t};
 
use std::{
 
    collections::HashMap,
 
    ffi::c_void,
 
    net::SocketAddr,
 
    os::raw::c_int,
 
    sync::{Mutex, RwLock},
 
};
 
///////////////////////////////////////////////////////////////////
 

	
 
struct FdAllocator {
 
    next: Option<c_int>,
 
    freed: Vec<c_int>,
 
}
 
enum ConnectorComplexPhased {
 
    Setup { local: Option<SocketAddr>, peer: Option<SocketAddr> },
 
    Communication { putter: PortId, getter: PortId },
 
}
 
struct ConnectorComplex {
 
    // invariant: .connector.phased and .phased are variants Setup/Communication in lockstep.
 
    connector: Connector,
 
    phased: ConnectorComplexPhased,
 
}
 
#[derive(Default)]
 
struct CcMap {
 
    fd_to_cc: HashMap<c_int, Mutex<ConnectorComplex>>,
 
    fd_allocator: FdAllocator,
 
}
 
///////////////////////////////////////////////////////////////////
 
unsafe fn payload_from_raw(bytes_ptr: *const c_void, bytes_len: usize) -> Payload {
 
    let bytes_ptr = std::mem::transmute(bytes_ptr);
 
    let bytes = &*slice_from_raw_parts(bytes_ptr, bytes_len);
 
    Payload::from(bytes)
 
}
 
unsafe fn libc_to_std_sockaddr(addr: *const sockaddr, addr_len: socklen_t) -> Option<SocketAddr> {
 
    os_socketaddr::OsSocketAddr::from_raw_parts(addr as _, addr_len as usize).into_addr()
 
}
 
impl Default for FdAllocator {
 
    fn default() -> Self {
 
        // negative FDs aren't used s.t. they are available for error signalling
 
        Self { next: Some(0), freed: vec![] }
 
    }
 
}
 
impl FdAllocator {
 
    fn alloc(&mut self) -> c_int {
 
        if let Some(fd) = self.freed.pop() {
 
            return fd;
 
        }
 
        if let Some(fd) = self.next {
 
            self.next = fd.checked_add(1);
 
            return fd;
 
        }
 
        panic!("No more Connector FDs to allocate!")
 
    }
 
    fn free(&mut self, fd: c_int) {
 
        self.freed.push(fd);
 
    }
 
}
 
lazy_static::lazy_static! {
 
    static ref CC_MAP: RwLock<CcMap> = Default::default();
 
    static ref TRIVIAL_PD: Arc<ProtocolDescription> = {
 
        Arc::new(ProtocolDescription::parse(b"").unwrap())
 
    };
 
}
 
impl ConnectorComplex {
 
    fn try_become_connected(&mut self) {
 
        match self.phased {
 
            ConnectorComplexPhased::Setup { local: Some(local), peer: Some(peer) } => {
 
                // complete setup
 
                let [putter, getter] =
 
                    self.connector.new_udp_mediator_component(local, peer).unwrap();
 
                self.connector.connect(None).unwrap();
 
                self.phased = ConnectorComplexPhased::Communication { putter, getter }
 
            }
 
            _ => {} // setup incomplete
 
        }
 
    }
 
}
 
/////////////////////////////////
 
#[no_mangle]
 
pub extern "C" fn rw_socket(_domain: c_int, _type: c_int, _protocol: c_int) -> c_int {
 
    // get writer lock
 
    let mut w = if let Ok(w) = CC_MAP.write() { w } else { return RW_LOCK_POISONED };
 

	
 
    let fd = w.fd_allocator.alloc();
 
    let cc = ConnectorComplex {
 
        connector: Connector::new(
 
            Box::new(crate::DummyLogger),
 
            crate::TRIVIAL_PD.clone(),
 
            TRIVIAL_PD.clone(),
 
            Connector::random_id(),
 
        ),
 
        phased: ConnectorComplexPhased::Setup { local: None, peer: None },
 
    };
 
    w.fd_to_cc.insert(fd, Mutex::new(cc));
 
    fd
 
}
 
#[no_mangle]
 
pub extern "C" fn rw_close(fd: c_int, _how: c_int) -> c_int {
 
    // ignoring HOW
 
    // get writer lock
 
    let mut w = if let Ok(w) = CC_MAP.write() { w } else { return RW_LOCK_POISONED };
 
    if w.fd_to_cc.remove(&fd).is_some() {
 
        w.fd_allocator.free(fd);
 
        RW_OK
 
    } else {
 
        RW_CLOSE_FAIL
 
    }
 
}
 
#[no_mangle]
 
pub unsafe extern "C" fn rw_bind(fd: c_int, addr: *const sockaddr, addr_len: socklen_t) -> c_int {
 
    // assuming _domain is AF_INET and _type is SOCK_DGRAM
 
    let addr = match libc_to_std_sockaddr(addr, addr_len) {
 
        Some(addr) => addr,
 
        _ => return RW_BAD_SOCKADDR,
 
    };
 
    // get outer reader, inner writer locks
 
    let r = if let Ok(r) = CC_MAP.read() { r } else { return RW_LOCK_POISONED };
 
    let cc = if let Some(cc) = r.fd_to_cc.get(&fd) { cc } else { return RW_BAD_FD };
 
    let mut cc = if let Ok(cc) = cc.lock() { cc } else { return RW_LOCK_POISONED };
 
    match &mut cc.phased {
 
        ConnectorComplexPhased::Communication { .. } => RW_WRONG_STATE,
 
        ConnectorComplexPhased::Setup { local, .. } => {
 
            *local = Some(addr);
 
            cc.try_become_connected();
 
            RW_OK
 
        }
 
    }
 
}
 
#[no_mangle]
 
pub unsafe extern "C" fn rw_connect(
 
    fd: c_int,
 
    addr: *const sockaddr,
 
    addr_len: socklen_t,
 
) -> c_int {
 
    let addr = match libc_to_std_sockaddr(addr, addr_len) {
 
        Some(addr) => addr,
 
        _ => return RW_BAD_SOCKADDR,
 
    };
 
    // assuming _domain is AF_INET and _type is SOCK_DGRAM
 
    // get outer reader, inner writer locks
 
    let r = if let Ok(r) = CC_MAP.read() { r } else { return RW_LOCK_POISONED };
 
    let cc = if let Some(cc) = r.fd_to_cc.get(&fd) { cc } else { return RW_BAD_FD };
 
    let mut cc = if let Ok(cc) = cc.lock() { cc } else { return RW_LOCK_POISONED };
 
    match &mut cc.phased {
 
        ConnectorComplexPhased::Communication { .. } => RW_WRONG_STATE,
 
        ConnectorComplexPhased::Setup { peer, .. } => {
 
            *peer = Some(addr);
 
            cc.try_become_connected();
 
            RW_OK
 
        }
 
    }
 
}
 
#[no_mangle]
 
pub unsafe extern "C" fn rw_send(
 
    fd: c_int,
 
    bytes_ptr: *const c_void,
 
    bytes_len: usize,
 
    _flags: c_int,
 
) -> isize {
 
    // ignoring flags
 
    // get outer reader, inner writer locks
 
    let r = if let Ok(r) = CC_MAP.read() { r } else { return RW_LOCK_POISONED as isize };
 
    let cc = if let Some(cc) = r.fd_to_cc.get(&fd) { cc } else { return RW_BAD_FD as isize };
 
    let mut cc = if let Ok(cc) = cc.lock() { cc } else { return RW_LOCK_POISONED as isize };
 
    let ConnectorComplex { connector, phased } = cc.deref_mut();
 
    match phased {
 
        ConnectorComplexPhased::Setup { .. } => RW_WRONG_STATE as isize,
 
        ConnectorComplexPhased::Communication { putter, .. } => {
 
            let payload = payload_from_raw(bytes_ptr, bytes_len);
 
            connector.put(*putter, payload).unwrap();
 
            connector.sync(None).unwrap();
 
            bytes_len as isize
 
        }
 
    }
 
}
 
#[no_mangle]
 
pub unsafe extern "C" fn rw_recv(
 
    fd: c_int,
 
    bytes_ptr: *mut c_void,
 
    bytes_len: usize,
 
    _flags: c_int,
 
) -> isize {
 
    // ignoring flags
 
    // get outer reader, inner writer locks
 
    let r = if let Ok(r) = CC_MAP.read() { r } else { return RW_LOCK_POISONED as isize };
 
    let cc = if let Some(cc) = r.fd_to_cc.get(&fd) { cc } else { return RW_BAD_FD as isize };
 
    let mut cc = if let Ok(cc) = cc.lock() { cc } else { return RW_LOCK_POISONED as isize };
 
    let ConnectorComplex { connector, phased } = cc.deref_mut();
 
    match phased {
 
        ConnectorComplexPhased::Setup { .. } => RW_WRONG_STATE as isize,
 
        ConnectorComplexPhased::Communication { getter, .. } => {
 
            connector.get(*getter).unwrap();
 
            connector.sync(None).unwrap();
 
            let slice = connector.gotten(*getter).unwrap().as_slice();
 
            if !bytes_ptr.is_null() {
 
                let cpy_msg_bytes = slice.len().min(bytes_len);
 
                std::ptr::copy_nonoverlapping(slice.as_ptr(), bytes_ptr as *mut u8, cpy_msg_bytes);
 
            }
 
            slice.len() as isize
 
        }
 
    }
 
}
src/lib.rs
Show inline comments
 
#[macro_use]
 
mod macros;
 

	
 
mod common;
 
mod protocol;
 
mod runtime;
 
mod collections;
 

	
 
pub use common::{ConnectorId, EndpointPolarity, Payload, Polarity, PortId};
 
pub use protocol::{ProtocolDescription, TRIVIAL_PD};
 
pub use protocol::ProtocolDescription;
 
pub use runtime::{error, Connector, DummyLogger, FileLogger, VecLogger};
 

	
 
// TODO: Remove when not benchmarking
 
pub use protocol::inputsource::InputSource;
 
pub use protocol::input_source::InputSource;
 
pub use protocol::ast::Heap;
 
pub use protocol::lexer::Lexer;
 

	
 
#[cfg(feature = "ffi")]
 
pub mod ffi;
src/protocol/ast.rs
Show inline comments
 
// TODO: @cleanup, rigorous cleanup of dead code and silly object-oriented
 
//  trait impls where I deem them unfit.
 

	
 
use std::fmt;
 
use std::fmt::{Debug, Display, Formatter};
 
use std::ops::{Index, IndexMut};
 

	
 
use super::arena::{Arena, Id};
 
use crate::collections::StringRef;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::input_source2::{InputPosition2, InputSpan};
 
use crate::protocol::input_source::InputSpan;
 

	
 
/// Helper macro that defines a type alias for a AST element ID. In this case 
 
/// only used to alias the `Id<T>` types.
 
macro_rules! define_aliased_ast_id {
 
    // Variant where we just defined the alias, without any indexing
 
    ($name:ident, $parent:ty) => {
 
        pub type $name = $parent;
 
    };
 
    // Variant where we define the type, and the Index and IndexMut traits
 
    (
 
        $name:ident, $parent:ty, 
 
        index($indexed_type:ty, $indexed_arena:ident)
 
    ) => {
 
        define_aliased_ast_id!($name, $parent);
 
        impl Index<$name> for Heap {
 
            type Output = $indexed_type;
 
            fn index(&self, index: $name) -> &Self::Output {
 
                &self.$indexed_arena[index]
 
            }
 
        }
 

	
 
        impl IndexMut<$name> for Heap {
 
            fn index_mut(&mut self, index: $name) -> &mut Self::Output {
 
                &mut self.$indexed_arena[index]
 
            }
 
        }
 
    };
 
    // Variant where we define type, Index(Mut) traits and an allocation function
 
    (
 
        $name:ident, $parent:ty,
 
        index($indexed_type:ty, $indexed_arena:ident),
 
        alloc($fn_name:ident)
 
    ) => {
 
        define_aliased_ast_id!($name, $parent, index($indexed_type, $indexed_arena));
 
        impl Heap {
 
            pub fn $fn_name(&mut self, f: impl FnOnce($name) -> $indexed_type) -> $name {
 
                self.$indexed_arena.alloc_with_id(|id| f(id))
 
            }
 
        }
 
    };
 
}
 

	
 
/// Helper macro that defines a wrapper type for a particular variant of an AST
 
/// element ID. Only used to define single-wrapping IDs.
 
macro_rules! define_new_ast_id {
 
    // Variant where we just defined the new type, without any indexing
 
    ($name:ident, $parent:ty) => {
 
        #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 
        pub struct $name (pub(crate) $parent);
 

	
 
        impl $name {
 
            pub(crate) fn new_invalid() -> Self     { Self($parent::new_invalid()) }
 
            pub(crate) fn new_invalid() -> Self     { Self(<$parent>::new_invalid()) }
 
            pub(crate) fn is_invalid(&self) -> bool { self.0.is_invalid() }
 
            pub fn upcast(self) -> $parent          { self.0 }
 
        }
 
    };
 
    // Variant where we define the type, and the Index and IndexMut traits
 
    (
 
        $name:ident, $parent:ty, 
 
        index($indexed_type:ty, $wrapper_type:path, $indexed_arena:ident)
 
    ) => {
 
        define_new_ast_id!($name, $parent);
 
        impl Index<$name> for Heap {
 
            type Output = $indexed_type;
 
            fn index(&self, index: $name) -> &Self::Output {
 
                if let $wrapper_type(v) = &self.$indexed_arena[index.0] {
 
                    v
 
                } else {
 
                    unreachable!()
 
                }
 
            }
 
        }
 

	
 
        impl IndexMut<$name> for Heap {
 
            fn index_mut(&mut self, index: $name) -> &mut Self::Output {
 
                if let $wrapper_type(v) = &mut self.$indexed_arena[index.0] {
 
                    v
 
                } else {
 
                    unreachable!()
 
                }
 
            }
 
        }
 
    };
 
    // Variant where we define the type, the Index and IndexMut traits, and an allocation function
 
    (
 
        $name:ident, $parent:ty, 
 
        index($indexed_type:ty, $wrapper_type:path, $indexed_arena:ident),
 
        alloc($fn_name:ident)
 
    ) => {
 
        define_new_ast_id!($name, $parent, index($indexed_type, $wrapper_type, $indexed_arena));
 
        impl Heap {
 
            pub fn $fn_name(&mut self, f: impl FnOnce($name) -> $indexed_type) -> $name {
 
                $name(
 
                    self.$indexed_arena.alloc_with_id(|id| {
 
                        $wrapper_type(f($name(id)))
 
                    })
 
                )
 
            }
 
        }
 
    }
 
}
 

	
 
define_aliased_ast_id!(RootId, Id<Root>, index(Root, protocol_descriptions), alloc(alloc_protocol_description));
 
define_aliased_ast_id!(PragmaId, Id<Pragma>, index(Pragma, pragmas), alloc(alloc_pragma));
 
define_aliased_ast_id!(ImportId, Id<Import>, index(Import, imports), alloc(alloc_import));
 
define_aliased_ast_id!(ParserTypeId, Id<ParserType>, index(ParserType, parser_types), alloc(alloc_parser_type));
 

	
 
define_aliased_ast_id!(VariableId, Id<Variable>, index(Variable, variables));
 
define_new_ast_id!(ParameterId, VariableId, index(Parameter, Variable::Parameter, variables), alloc(alloc_parameter));
 
define_new_ast_id!(LocalId, VariableId, index(Local, Variable::Local, variables), alloc(alloc_local));
 

	
 
define_aliased_ast_id!(DefinitionId, Id<Definition>, index(Definition, definitions));
 
define_new_ast_id!(StructDefinitionId, DefinitionId, index(StructDefinition, Definition::Struct, definitions), alloc(alloc_struct_definition));
 
define_new_ast_id!(EnumDefinitionId, DefinitionId, index(EnumDefinition, Definition::Enum, definitions), alloc(alloc_enum_definition));
 
define_new_ast_id!(UnionDefinitionId, DefinitionId, index(UnionDefinition, Definition::Union, definitions), alloc(alloc_union_definition));
 
define_new_ast_id!(ComponentDefinitionId, DefinitionId, index(ComponentDefinition, Definition::Component, definitions), alloc(alloc_component_definition));
 
define_new_ast_id!(FunctionDefinitionId, DefinitionId, index(FunctionDefinition, Definition::Function, definitions), alloc(alloc_function_definition));
 

	
 
define_aliased_ast_id!(StatementId, Id<Statement>, index(Statement, statements));
 
define_new_ast_id!(BlockStatementId, StatementId, index(BlockStatement, Statement::Block, statements), alloc(alloc_block_statement));
 
define_new_ast_id!(LocalStatementId, StatementId, index(LocalStatement, Statement::Local, statements), alloc(alloc_local_statement));
 
define_new_ast_id!(MemoryStatementId, LocalStatementId);
 
define_new_ast_id!(ChannelStatementId, LocalStatementId);
 
define_new_ast_id!(LabeledStatementId, StatementId, index(LabeledStatement, Statement::Labeled, statements), alloc(alloc_labeled_statement));
 
define_new_ast_id!(IfStatementId, StatementId, index(IfStatement, Statement::If, statements), alloc(alloc_if_statement));
 
define_new_ast_id!(EndIfStatementId, StatementId, index(EndIfStatement, Statement::EndIf, statements), alloc(alloc_end_if_statement));
 
define_new_ast_id!(WhileStatementId, StatementId, index(WhileStatement, Statement::While, statements), alloc(alloc_while_statement));
 
define_new_ast_id!(EndWhileStatementId, StatementId, index(EndWhileStatement, Statement::EndWhile, statements), alloc(alloc_end_while_statement));
 
define_new_ast_id!(BreakStatementId, StatementId, index(BreakStatement, Statement::Break, statements), alloc(alloc_break_statement));
 
define_new_ast_id!(ContinueStatementId, StatementId, index(ContinueStatement, Statement::Continue, statements), alloc(alloc_continue_statement));
 
define_new_ast_id!(SynchronousStatementId, StatementId, index(SynchronousStatement, Statement::Synchronous, statements), alloc(alloc_synchronous_statement));
 
define_new_ast_id!(EndSynchronousStatementId, StatementId, index(EndSynchronousStatement, Statement::EndSynchronous, statements), alloc(alloc_end_synchronous_statement));
 
define_new_ast_id!(ReturnStatementId, StatementId, index(ReturnStatement, Statement::Return, statements), alloc(alloc_return_statement));
 
define_new_ast_id!(GotoStatementId, StatementId, index(GotoStatement, Statement::Goto, statements), alloc(alloc_goto_statement));
 
define_new_ast_id!(NewStatementId, StatementId, index(NewStatement, Statement::New, statements), alloc(alloc_new_statement));
 
define_new_ast_id!(ExpressionStatementId, StatementId, index(ExpressionStatement, Statement::Expression, statements), alloc(alloc_expression_statement));
 

	
 
define_aliased_ast_id!(ExpressionId, Id<Expression>, index(Expression, expressions));
 
define_new_ast_id!(AssignmentExpressionId, ExpressionId, index(AssignmentExpression, Expression::Assignment, expressions), alloc(alloc_assignment_expression));
 
define_new_ast_id!(BindingExpressionId, ExpressionId, index(BindingExpression, Expression::Binding, expressions), alloc(alloc_binding_expression));
 
define_new_ast_id!(ConditionalExpressionId, ExpressionId, index(ConditionalExpression, Expression::Conditional, expressions), alloc(alloc_conditional_expression));
 
define_new_ast_id!(BinaryExpressionId, ExpressionId, index(BinaryExpression, Expression::Binary, expressions), alloc(alloc_binary_expression));
 
define_new_ast_id!(UnaryExpressionId, ExpressionId, index(UnaryExpression, Expression::Unary, expressions), alloc(alloc_unary_expression));
 
define_new_ast_id!(IndexingExpressionId, ExpressionId, index(IndexingExpression, Expression::Indexing, expressions), alloc(alloc_indexing_expression));
 
define_new_ast_id!(SlicingExpressionId, ExpressionId, index(SlicingExpression, Expression::Slicing, expressions), alloc(alloc_slicing_expression));
 
define_new_ast_id!(SelectExpressionId, ExpressionId, index(SelectExpression, Expression::Select, expressions), alloc(alloc_select_expression));
 
define_new_ast_id!(LiteralExpressionId, ExpressionId, index(LiteralExpression, Expression::Literal, expressions), alloc(alloc_literal_expression));
 
define_new_ast_id!(CallExpressionId, ExpressionId, index(CallExpression, Expression::Call, expressions), alloc(alloc_call_expression));
 
define_new_ast_id!(VariableExpressionId, ExpressionId, index(VariableExpression, Expression::Variable, expressions), alloc(alloc_variable_expression));
 

	
 
// TODO: @cleanup - pub qualifiers can be removed once done
 
#[derive(Debug)]
 
pub struct Heap {
 
    // Root arena, contains the entry point for different modules. Each root
 
    // contains lists of IDs that correspond to the other arenas.
 
    pub(crate) protocol_descriptions: Arena<Root>,
 
    // Contents of a file, these are the elements the `Root` elements refer to
 
    pragmas: Arena<Pragma>,
 
    pub(crate) imports: Arena<Import>,
 
    pub(crate) parser_types: Arena<ParserType>,
 
    pub(crate) variables: Arena<Variable>,
 
    pub(crate) definitions: Arena<Definition>,
 
    pub(crate) statements: Arena<Statement>,
 
    pub(crate) expressions: Arena<Expression>,
 
}
 

	
 
impl Heap {
 
    pub fn new() -> Heap {
 
        Heap {
 
            // string_alloc: StringAllocator::new(),
 
            protocol_descriptions: Arena::new(),
 
            pragmas: Arena::new(),
 
            imports: Arena::new(),
 
            parser_types: Arena::new(),
 
            variables: Arena::new(),
 
            definitions: Arena::new(),
 
            statements: Arena::new(),
 
            expressions: Arena::new(),
 
        }
 
    }
 
    pub fn alloc_memory_statement(
 
        &mut self,
 
        f: impl FnOnce(MemoryStatementId) -> MemoryStatement,
 
    ) -> MemoryStatementId {
 
        MemoryStatementId(LocalStatementId(self.statements.alloc_with_id(|id| {
 
            Statement::Local(LocalStatement::Memory(
 
                f(MemoryStatementId(LocalStatementId(id)))
 
            ))
 
        })))
 
    }
 
    pub fn alloc_channel_statement(
 
        &mut self,
 
        f: impl FnOnce(ChannelStatementId) -> ChannelStatement,
 
    ) -> ChannelStatementId {
 
        ChannelStatementId(LocalStatementId(self.statements.alloc_with_id(|id| {
 
            Statement::Local(LocalStatement::Channel(
 
                f(ChannelStatementId(LocalStatementId(id)))
 
            ))
 
        })))
 
    }
 
}
 

	
 
impl Index<MemoryStatementId> for Heap {
 
    type Output = MemoryStatement;
 
    fn index(&self, index: MemoryStatementId) -> &Self::Output {
 
        &self.statements[index.0.0].as_memory()
 
    }
 
}
 

	
 
impl Index<ChannelStatementId> for Heap {
 
    type Output = ChannelStatement;
 
    fn index(&self, index: ChannelStatementId) -> &Self::Output {
 
        &self.statements[index.0.0].as_channel()
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct Root {
 
    pub this: RootId,
 
    // Phase 1: parser
 
    // pub position: InputPosition,
 
    pub pragmas: Vec<PragmaId>,
 
    pub imports: Vec<ImportId>,
 
    pub definitions: Vec<DefinitionId>,
 
}
 

	
 
impl Root {
 
    // TODO: @Cleanup
 
    pub fn get_definition_ident(&self, h: &Heap, id: &[u8]) -> Option<DefinitionId> {
 
        for &def in self.definitions.iter() {
 
            if h[def].identifier().value.as_bytes() == id {
 
                return Some(def);
 
            }
 
        }
 
        None
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Pragma {
 
    Version(PragmaVersion),
 
    Module(PragmaModule),
 
}
 

	
 
impl Pragma {
 
    pub(crate) fn as_module(&self) -> &PragmaModule {
 
        match self {
 
            Pragma::Module(pragma) => pragma,
 
            _ => unreachable!("Tried to obtain {:?} as PragmaModule", self),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct PragmaVersion {
 
    pub this: PragmaId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of full pragma
 
    pub version: u64,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct PragmaModule {
 
    pub this: PragmaId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of full pragma
 
    pub value: Identifier,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Import {
 
    Module(ImportModule),
 
    Symbols(ImportSymbols)
 
}
 

	
 
impl Import {
 
    pub(crate) fn span(&self) -> InputSpan {
 
        match self {
 
            Import::Module(v) => v.span,
 
            Import::Symbols(v) => v.span,
 
        }
 
    }
 

	
 
    pub(crate) fn as_module(&self) -> &ImportModule {
 
        match self {
 
            Import::Module(m) => m,
 
            _ => unreachable!("Unable to cast 'Import' to 'ImportModule'")
 
        }
 
    }
 
    pub(crate) fn as_symbols(&self) -> &ImportSymbols {
 
        match self {
 
            Import::Symbols(m) => m,
 
            _ => unreachable!("Unable to cast 'Import' to 'ImportSymbols'")
 
        }
 
    }
 
    pub(crate) fn as_symbols_mut(&mut self) -> &mut ImportSymbols {
 
        match self {
 
            Import::Symbols(m) => m,
 
            _ => unreachable!("Unable to cast 'Import' to 'ImportSymbols'")
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ImportModule {
 
    pub this: ImportId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub module: Identifier,
 
    pub alias: Identifier,
 
    pub module_id: RootId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct AliasedSymbol {
 
    pub name: Identifier,
 
    pub alias: Option<Identifier>,
 
    pub definition_id: DefinitionId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ImportSymbols {
 
    pub this: ImportId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub module: Identifier,
 
    pub module_id: RootId,
 
    pub symbols: Vec<AliasedSymbol>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct Identifier {
 
    pub span: InputSpan,
 
    pub value: StringRef<'static>,
 
}
 

	
 
impl PartialEq for Identifier {
 
    fn eq(&self, other: &Self) -> bool {
 
        return self.value == other.value
 
    }
 
}
 

	
 
impl Display for Identifier {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.value.as_str())
 
    }
 
}
 

	
 
#[derive(Debug, Clone, PartialOrd, Ord)]
 
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)]
 
pub enum ParserTypeVariant {
 
    // Basic builtin
 
    Message,
 
    Bool,
 
    UInt8, UInt16, UInt32, UInt64,
 
    SInt8, SInt16, SInt32, SInt64,
 
    Character, String,
 
    // Literals (need to get concrete builtin type during typechecking)
 
    IntegerLiteral,
 
    // Marker for inference
 
    Inferred,
 
    // Builtins expecting one subsequent type
 
    Array,
 
    Input,
 
    Output,
 
    // User-defined types
 
    PolymorphicArgument(DefinitionId, usize), // usize = index into polymorphic variables
 
    Definition(DefinitionId, usize), // usize = number of following subtypes
 
}
 

	
 
impl ParserTypeVariant {
 
    pub(crate) fn num_embedded(&self) -> usize {
 
        match self {
 
            x if *x <= ParserTypeVariant::Inferred => 0,
 
            x if *x <= ParserTypeVariant::Output => 1,
 
            ParserTypeVariant::PolymorphicArgument(_, _) => 0,
 
            ParserTypeVariant::Definition(_, num) => num,
 
            _ => { debug_assert!(false); 0 },
 
        }
 
    }
 
}
 

	
 
pub struct ParserTypeElement {
 
    // TODO: @cleanup, do we ever need the span of a user-defined type after
 
    //  constructing it?
 
    pub full_span: InputSpan, // full span of type, including any polymorphic arguments
 
    pub variant: ParserTypeVariant,
 
}
 

	
 
/// ParserType is a specification of a type during the parsing phase and initial
 
/// linker/validator phase of the compilation process. These types may be
 
/// (partially) inferred or represent literals (e.g. a integer whose bytesize is
 
/// not yet determined).
 
#[derive(Debug, Clone)]
 
pub struct ParserType {
 
    pub elements: Vec<ParserTypeElement>
 
}
 

	
 
/// Specifies whether the symbolic type points to an actual user-defined type,
 
/// or whether it points to a polymorphic argument within the definition (e.g.
 
/// a defined variable `T var` within a function `int func<T>()`
 
#[derive(Debug, Clone)]
 
pub enum SymbolicParserTypeVariant {
 
    Definition(DefinitionId),
 
    // TODO: figure out if I need the DefinitionId here
 
    PolyArg(DefinitionId, usize), // index of polyarg in the definition
 
}
 

	
 
/// ConcreteType is the representation of a type after resolving symbolic types
 
/// and performing type inference
 
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
 
pub enum ConcreteTypePart {
 
    // Markers for the use of polymorphic types within a procedure's body that
 
    // refer to polymorphic variables on the procedure's definition. Different
 
    // from markers in the `InferenceType`, these will not contain nested types.
 
    Marker(usize),
 
    // Special types (cannot be explicitly constructed by the programmer)
 
    Void,
 
    // Builtin types without nested types
 
    Message,
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    UInt8, UInt16, UInt32, UInt64,
 
    SInt8, SInt16, SInt32, SInt64,
 
    Character, String,
 
    // Builtin types with one nested type
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // User defined type with any number of nested types
 
    Instance(DefinitionId, usize),
 
}
 

	
 
#[derive(Debug, Clone, Eq, PartialEq)]
 
pub struct ConcreteType {
 
    pub(crate) parts: Vec<ConcreteTypePart>
 
}
 

	
 
impl Default for ConcreteType {
 
    fn default() -> Self {
 
        Self{ parts: Vec::new() }
 
    }
 
}
 

	
 
impl ConcreteType {
 
    pub(crate) fn has_marker(&self) -> bool {
 
        self.parts
 
            .iter()
 
            .any(|p| {
 
                if let ConcreteTypePart::Marker(_) = p { true } else { false }
 
            })
 
    }
 
}
 

	
 
// TODO: Remove at some point
 
#[derive(Debug, Clone, PartialEq, Eq)]
 
pub enum PrimitiveType {
 
    Unassigned,
 
    Input,
 
    Output,
 
    Message,
 
    Boolean,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq)]
 
pub struct Type {
 
    pub primitive: PrimitiveType,
 
    pub array: bool,
 
}
 

	
 
#[allow(dead_code)]
 
impl Type {
 
    pub const UNASSIGNED: Type = Type { primitive: PrimitiveType::Unassigned, array: false };
 

	
 
    pub const INPUT: Type = Type { primitive: PrimitiveType::Input, array: false };
 
    pub const OUTPUT: Type = Type { primitive: PrimitiveType::Output, array: false };
 
    pub const MESSAGE: Type = Type { primitive: PrimitiveType::Message, array: false };
 
    pub const BOOLEAN: Type = Type { primitive: PrimitiveType::Boolean, array: false };
 
    pub const BYTE: Type = Type { primitive: PrimitiveType::Byte, array: false };
 
    pub const SHORT: Type = Type { primitive: PrimitiveType::Short, array: false };
 
    pub const INT: Type = Type { primitive: PrimitiveType::Int, array: false };
 
    pub const LONG: Type = Type { primitive: PrimitiveType::Long, array: false };
 

	
 
    pub const INPUT_ARRAY: Type = Type { primitive: PrimitiveType::Input, array: true };
 
    pub const OUTPUT_ARRAY: Type = Type { primitive: PrimitiveType::Output, array: true };
 
    pub const MESSAGE_ARRAY: Type = Type { primitive: PrimitiveType::Message, array: true };
 
    pub const BOOLEAN_ARRAY: Type = Type { primitive: PrimitiveType::Boolean, array: true };
 
    pub const BYTE_ARRAY: Type = Type { primitive: PrimitiveType::Byte, array: true };
 
    pub const SHORT_ARRAY: Type = Type { primitive: PrimitiveType::Short, array: true };
 
    pub const INT_ARRAY: Type = Type { primitive: PrimitiveType::Int, array: true };
 
    pub const LONG_ARRAY: Type = Type { primitive: PrimitiveType::Long, array: true };
 
}
 

	
 
impl Display for Type {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        match &self.primitive {
 
            PrimitiveType::Unassigned => {
 
                write!(f, "unassigned")?;
 
            }
 
            PrimitiveType::Input => {
 
                write!(f, "in")?;
 
            }
 
            PrimitiveType::Output => {
 
                write!(f, "out")?;
 
            }
 
            PrimitiveType::Message => {
 
                write!(f, "msg")?;
 
            }
 
            PrimitiveType::Boolean => {
 
                write!(f, "boolean")?;
 
            }
 
            PrimitiveType::Byte => {
 
                write!(f, "byte")?;
 
            }
 
            PrimitiveType::Short => {
 
                write!(f, "short")?;
 
            }
 
            PrimitiveType::Int => {
 
                write!(f, "int")?;
 
            }
 
            PrimitiveType::Long => {
 
                write!(f, "long")?;
 
            }
 
        }
 
        if self.array {
 
            write!(f, "[]")
 
        } else {
 
            Ok(())
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Field {
 
    Length,
 
    Symbolic(FieldSymbolic),
 
}
 
impl Field {
 
    pub fn is_length(&self) -> bool {
 
        match self {
 
            Field::Length => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    pub fn as_symbolic(&self) -> &FieldSymbolic {
 
        match self {
 
            Field::Symbolic(v) => v,
 
            _ => unreachable!("attempted to get Field::Symbolic from {:?}", self)
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct FieldSymbolic {
 
    // Phase 1: Parser
 
    pub(crate) identifier: Identifier,
 
    // Phase 3: Typing
 
    pub(crate) definition: Option<DefinitionId>,
 
    pub(crate) field_idx: usize,
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub enum Scope {
 
    Definition(DefinitionId),
 
    Regular(BlockStatementId),
 
    Synchronous((SynchronousStatementId, BlockStatementId)),
 
}
 

	
 
impl Scope {
 
    pub fn is_block(&self) -> bool {
 
        match &self {
 
            Scope::Definition(_) => false,
 
            Scope::Regular(_) => true,
 
            Scope::Synchronous(_) => true,
 
        }
 
    }
 
    pub fn to_block(&self) -> BlockStatementId {
 
        match &self {
 
            Scope::Regular(id) => *id,
 
            Scope::Synchronous((_, id)) => *id,
 
            _ => panic!("unable to get BlockStatement from Scope")
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Variable {
 
    Parameter(Parameter),
 
    Local(Local),
 
}
 

	
 
impl Variable {
 
    pub fn identifier(&self) -> &Identifier {
 
        match self {
 
            Variable::Parameter(var) => &var.identifier,
 
            Variable::Local(var) => &var.identifier,
 
        }
 
    }
 
    pub fn is_parameter(&self) -> bool {
 
        match self {
 
            Variable::Parameter(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub fn as_parameter(&self) -> &Parameter {
 
        match self {
 
            Variable::Parameter(result) => result,
 
            _ => panic!("Unable to cast `Variable` to `Parameter`"),
 
        }
 
    }
 
    pub fn as_local(&self) -> &Local {
 
        match self {
 
            Variable::Local(result) => result,
 
            _ => panic!("Unable to cast `Variable` to `Local`"),
 
        }
 
    }
 
    pub fn as_local_mut(&mut self) -> &mut Local {
 
        match self {
 
            Variable::Local(result) => result,
 
            _ => panic!("Unable to cast 'Variable' to 'Local'"),
 
        }
 
    }
 
}
 

	
 
/// TODO: Remove distinction between parameter/local and add an enum to indicate
 
///     the distinction between the two
 
#[derive(Debug, Clone)]
 
pub struct Parameter {
 
    pub this: ParameterId,
 
    // Phase 2: parser
 
    pub span: InputSpan,
 
    pub parser_type: ParserType,
 
    pub identifier: Identifier,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct Local {
 
    pub this: LocalId,
 
    // Phase 1: parser
 
    pub identifier: Identifier,
 
    pub parser_type: ParserType,
 
    // Phase 2: linker
 
    pub relative_pos_in_block: u32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Definition {
 
    Struct(StructDefinition),
 
    Enum(EnumDefinition),
 
    Union(UnionDefinition),
 
    Component(ComponentDefinition),
 
    Function(FunctionDefinition),
 
}
 

	
 
impl Definition {
 
    pub fn is_struct(&self) -> bool {
 
        match self {
 
            Definition::Struct(_) => true,
 
            _ => false
 
        }
 
    }
 
    pub(crate) fn as_struct(&self) -> &StructDefinition {
 
        match self {
 
            Definition::Struct(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'StructDefinition'"),
 
        }
 
    }
 
    pub(crate) fn as_struct_mut(&mut self) -> &mut StructDefinition {
 
        match self {
 
            Definition::Struct(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'StructDefinition'"),
 
        }
 
    }
 
    pub fn is_enum(&self) -> bool {
 
        match self {
 
            Definition::Enum(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub(crate) fn as_enum(&self) -> &EnumDefinition {
 
        match self {
 
            Definition::Enum(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'EnumDefinition'"),
 
        }
 
    }
 
    pub(crate) fn as_enum_mut(&mut self) -> &mut EnumDefinition {
 
        match self {
 
            Definition::Enum(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'EnumDefinition'"),
 
        }
 
    }
 
    pub fn is_union(&self) -> bool {
 
        match self {
 
            Definition::Union(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub(crate) fn as_union(&self) -> &UnionDefinition {
 
        match self {
 
            Definition::Union(result) => result, 
 
            _ => panic!("Unable to cast 'Definition' to 'UnionDefinition'"),
 
        }
 
    }
 
    pub(crate) fn as_union_mut(&mut self) -> &mut UnionDefinition {
 
        match self {
 
            Definition::Union(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'UnionDefinition'"),
 
        }
 
    }
 
    pub fn is_component(&self) -> bool {
 
        match self {
 
            Definition::Component(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub(crate) fn as_component(&self) -> &ComponentDefinition {
 
        match self {
 
            Definition::Component(result) => result,
 
            _ => panic!("Unable to cast `Definition` to `Component`"),
 
        }
 
    }
 
    pub(crate) fn as_component_mut(&mut self) -> &mut ComponentDefinition {
 
        match self {
 
            Definition::Component(result) => result,
 
            _ => panic!("Unable to cast `Definition` to `Component`"),
 
        }
 
    }
 
    pub fn is_function(&self) -> bool {
 
        match self {
 
            Definition::Function(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub(crate) fn as_function(&self) -> &FunctionDefinition {
 
        match self {
 
            Definition::Function(result) => result,
 
            _ => panic!("Unable to cast `Definition` to `Function`"),
 
        }
 
    }
 
    pub(crate) fn as_function_mut(&mut self) -> &mut FunctionDefinition {
 
        match self {
 
            Definition::Function(result) => result,
 
            _ => panic!("Unable to cast `Definition` to `Function`"),
 
        }
 
    }
 
    pub fn defined_in(&self) -> RootId {
 
        match self {
 
            Definition::Struct(def) => def.defined_in,
 
            Definition::Enum(def) => def.defined_in,
 
            Definition::Union(def) => def.defined_in,
 
            Definition::Component(def) => def.defined_in,
 
            Definition::Function(def) => def.defined_in,
 
        }
 
    }
 
    pub fn identifier(&self) -> &Identifier {
 
        match self {
 
            Definition::Struct(def) => &def.identifier,
 
            Definition::Enum(def) => &def.identifier,
 
            Definition::Union(def) => &def.identifier,
 
            Definition::Component(def) => &def.identifier,
 
            Definition::Function(def) => &def.identifier,
 
        }
 
    }
 
    pub fn parameters(&self) -> &Vec<ParameterId> {
 
        // TODO: Fix this
 
        static EMPTY_VEC: Vec<ParameterId> = Vec::new();
 
        match self {
 
            Definition::Component(com) => &com.parameters,
 
            Definition::Function(fun) => &fun.parameters,
 
            _ => &EMPTY_VEC,
 
        }
 
    }
 
    pub fn body(&self) -> BlockStatementId {
 
        match self {
 
            Definition::Component(com) => com.body,
 
            Definition::Function(fun) => fun.body,
 
            _ => panic!("cannot retrieve body (for EnumDefinition/UnionDefinition or StructDefinition)")
 
        }
 
    }
 
    pub fn poly_vars(&self) -> &Vec<Identifier> {
 
        match self {
 
            Definition::Struct(def) => &def.poly_vars,
 
            Definition::Enum(def) => &def.poly_vars,
 
            Definition::Union(def) => &def.poly_vars,
 
            Definition::Component(def) => &def.poly_vars,
 
            Definition::Function(def) => &def.poly_vars,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct StructFieldDefinition {
 
    pub span: InputSpan,
 
    pub field: Identifier,
 
    pub parser_type: ParserType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct StructDefinition {
 
    pub this: StructDefinitionId,
 
    pub defined_in: RootId,
 
    // Phase 1: symbol scanning
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Phase 2: parsing
 
    pub fields: Vec<StructFieldDefinition>
 
}
 

	
 
impl StructDefinition {
 
    pub(crate) fn new_empty(
 
        this: StructDefinitionId, defined_in: RootId, span: InputSpan,
 
        identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self{ this, defined_in, span, identifier, poly_vars, fields: Vec::new() }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum EnumVariantValue {
 
    None,
 
    Integer(i64),
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EnumVariantDefinition {
 
    pub identifier: Identifier,
 
    pub value: EnumVariantValue,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EnumDefinition {
 
    pub this: EnumDefinitionId,
 
    pub defined_in: RootId,
 
    // Phase 1: symbol scanning
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Phase 2: parsing
 
    pub variants: Vec<EnumVariantDefinition>,
 
}
 

	
 
impl EnumDefinition {
 
    pub(crate) fn new_empty(
 
        this: EnumDefinitionId, defined_in: RootId, span: InputSpan,
 
        identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self{ this, defined_in, span, identifier, poly_vars, variants: Vec::new() }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum UnionVariantValue {
 
    None,
 
    Embedded(Vec<ParserType>),
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct UnionVariantDefinition {
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub value: UnionVariantValue,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct UnionDefinition {
 
    pub this: UnionDefinitionId,
 
    pub defined_in: RootId,
 
    // Phase 1: symbol scanning
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Phase 2: parsing
 
    pub variants: Vec<UnionVariantDefinition>,
 
}
 

	
 
impl UnionDefinition {
 
    pub(crate) fn new_empty(
 
        this: UnionDefinitionId, defined_in: RootId, span: InputSpan,
 
        identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self{ this, defined_in, span, identifier, poly_vars, variants: Vec::new() }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub enum ComponentVariant {
 
    Primitive,
 
    Composite,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ComponentDefinition {
 
    pub this: ComponentDefinitionId,
 
    pub defined_in: RootId,
 
    // Phase 1: symbol scanning
 
    pub span: InputSpan,
 
    pub variant: ComponentVariant,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Phase 2: parsing
 
    pub parameters: Vec<ParameterId>,
 
    pub body: BlockStatementId,
 
}
 

	
 
impl ComponentDefinition {
 
    pub(crate) fn new_empty(
 
        this: ComponentDefinitionId, defined_in: RootId, span: InputSpan,
 
        variant: ComponentVariant, identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self{ 
 
            this, defined_in, span, variant, identifier, poly_vars,
 
            parameters: Vec::new(), 
 
            body: BlockStatementId::new_invalid()
 
        }
 
    }
 
}
 

	
 
// Note that we will have function definitions for builtin functions as well. In
 
// that case the span, the identifier span and the body are all invalid.
 
#[derive(Debug, Clone)]
 
pub struct FunctionDefinition {
 
    pub this: FunctionDefinitionId,
 
    pub defined_in: RootId,
 
    // Phase 1: symbol scanning
 
    pub builtin: bool,
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Phase 2: parsing
 
    pub return_types: Vec<ParserType>,
 
    pub parameters: Vec<ParameterId>,
 
    pub body: BlockStatementId,
 
}
 

	
 
impl FunctionDefinition {
 
    pub(crate) fn new_empty(
 
        this: FunctionDefinitionId, defined_in: RootId, span: InputSpan,
 
        identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self {
 
            this, defined_in,
 
            builtin: false,
 
            span, identifier, poly_vars,
 
            return_types: Vec::new(),
 
            parameters: Vec::new(),
 
            body: BlockStatementId::new_invalid(),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Statement {
 
    Block(BlockStatement),
 
    Local(LocalStatement),
 
    Labeled(LabeledStatement),
 
    If(IfStatement),
 
    EndIf(EndIfStatement),
 
    While(WhileStatement),
 
    EndWhile(EndWhileStatement),
 
    Break(BreakStatement),
 
    Continue(ContinueStatement),
 
    Synchronous(SynchronousStatement),
 
    EndSynchronous(EndSynchronousStatement),
 
    Return(ReturnStatement),
 
    Goto(GotoStatement),
 
    New(NewStatement),
 
    Expression(ExpressionStatement),
 
}
 

	
 
impl Statement {
 
    pub fn is_block(&self) -> bool {
 
        match self {
 
            Statement::Block(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub fn as_block(&self) -> &BlockStatement {
 
        match self {
 
            Statement::Block(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `BlockStatement`"),
 
        }
 
    }
 
    pub fn as_block_mut(&mut self) -> &mut BlockStatement {
 
        match self {
 
            Statement::Block(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `BlockStatement`"),
 
        }
 
    }
 
    pub fn as_local(&self) -> &LocalStatement {
 
        match self {
 
            Statement::Local(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `LocalStatement`"),
 
        }
 
    }
 
    pub fn as_memory(&self) -> &MemoryStatement {
 
        self.as_local().as_memory()
 
    }
 
    pub fn as_channel(&self) -> &ChannelStatement {
 
        self.as_local().as_channel()
 
    }
 
    pub fn as_skip(&self) -> &SkipStatement {
 
        match self {
 
            Statement::Skip(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `SkipStatement`"),
 
        }
 
    }
 
    pub fn as_labeled(&self) -> &LabeledStatement {
 
        match self {
 
            Statement::Labeled(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `LabeledStatement`"),
 
        }
 
    }
 
    pub fn as_labeled_mut(&mut self) -> &mut LabeledStatement {
 
        match self {
 
            Statement::Labeled(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `LabeledStatement`"),
 
        }
 
    }
 
    pub fn as_if(&self) -> &IfStatement {
 
        match self {
 
            Statement::If(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `IfStatement`"),
 
        }
 
    }
 
    pub fn as_if_mut(&mut self) -> &mut IfStatement {
 
        match self {
 
            Statement::If(result) => result,
 
            _ => panic!("Unable to cast 'Statement' to 'IfStatement'"),
 
        }
 
    }
 
    pub fn as_end_if(&self) -> &EndIfStatement {
 
        match self {
 
            Statement::EndIf(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `EndIfStatement`"),
 
        }
 
    }
 
    pub fn is_while(&self) -> bool {
 
        match self {
 
            Statement::While(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub fn as_while(&self) -> &WhileStatement {
 
        match self {
 
            Statement::While(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `WhileStatement`"),
 
        }
 
    }
 
    pub fn as_while_mut(&mut self) -> &mut WhileStatement {
 
        match self {
 
            Statement::While(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `WhileStatement`"),
 
        }
 
    }
 
    pub fn as_end_while(&self) -> &EndWhileStatement {
 
        match self {
 
            Statement::EndWhile(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `EndWhileStatement`"),
 
        }
 
    }
 
    pub fn as_break(&self) -> &BreakStatement {
 
        match self {
 
            Statement::Break(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `BreakStatement`"),
 
        }
 
    }
 
    pub fn as_break_mut(&mut self) -> &mut BreakStatement {
 
        match self {
 
            Statement::Break(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `BreakStatement`"),
 
        }
 
    }
 
    pub fn as_continue(&self) -> &ContinueStatement {
 
        match self {
 
            Statement::Continue(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `ContinueStatement`"),
 
        }
 
    }
 
    pub fn as_continue_mut(&mut self) -> &mut ContinueStatement {
 
        match self {
 
            Statement::Continue(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `ContinueStatement`"),
 
        }
 
    }
 
    pub fn as_synchronous(&self) -> &SynchronousStatement {
 
        match self {
 
            Statement::Synchronous(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `SynchronousStatement`"),
 
        }
 
    }
 
    pub fn as_synchronous_mut(&mut self) -> &mut SynchronousStatement {
 
        match self {
 
            Statement::Synchronous(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `SynchronousStatement`"),
 
        }
 
    }
 
    pub fn as_end_synchronous(&self) -> &EndSynchronousStatement {
 
        match self {
 
            Statement::EndSynchronous(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `EndSynchronousStatement`"),
 
        }
 
    }
 
    pub fn as_return(&self) -> &ReturnStatement {
 
        match self {
 
            Statement::Return(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `ReturnStatement`"),
 
        }
 
    }
 
    pub fn as_goto(&self) -> &GotoStatement {
 
        match self {
 
            Statement::Goto(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `GotoStatement`"),
 
        }
 
    }
 
    pub fn as_goto_mut(&mut self) -> &mut GotoStatement {
 
        match self {
 
            Statement::Goto(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `GotoStatement`"),
 
        }
 
    }
 
    pub fn as_new(&self) -> &NewStatement {
 
        match self {
 
            Statement::New(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `NewStatement`"),
 
        }
 
    }
 
    pub fn as_expression(&self) -> &ExpressionStatement {
 
        match self {
 
            Statement::Expression(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `ExpressionStatement`"),
 
        }
 
    }
 
    pub fn span(&self) -> InputSpan {
 
        match self {
 
            Statement::Block(v) => v.span,
 
            Statement::Local(v) => v.span(),
 
            Statement::Labeled(v) => v.label.span,
 
            Statement::If(v) => v.span,
 
            Statement::While(v) => v.span,
 
            Statement::Break(v) => v.span,
 
            Statement::Continue(v) => v.span,
 
            Statement::Synchronous(v) => v.span,
 
            Statement::Return(v) => v.span,
 
            Statement::Goto(v) => v.span,
 
            Statement::New(v) => v.span,
 
            Statement::Expression(v) => v.span,
 
            Statement::EndIf(_) | Statement::EndWhile(_) | Statement::EndSynchronous(_) => unreachable!(),
 
        }
 
    }
 
    pub fn link_next(&mut self, next: StatementId) {
 
        match self {
 
            Statement::Block(_) => todo!(),
 
            Statement::Local(stmt) => match stmt {
 
                LocalStatement::Channel(stmt) => stmt.next = Some(next),
 
                LocalStatement::Memory(stmt) => stmt.next = Some(next),
 
            },
 
            Statement::EndIf(stmt) => stmt.next = Some(next),
 
            Statement::EndWhile(stmt) => stmt.next = Some(next),
 
            Statement::EndSynchronous(stmt) => stmt.next = Some(next),
 
            Statement::New(stmt) => stmt.next = Some(next),
 
            Statement::Expression(stmt) => stmt.next = Some(next),
 
            Statement::Return(_)
 
            | Statement::Break(_)
 
            | Statement::Continue(_)
 
            | Statement::Synchronous(_)
 
            | Statement::Goto(_)
 
            | Statement::While(_)
 
            | Statement::Labeled(_)
 
            | Statement::If(_) => unreachable!(),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BlockStatement {
 
    pub this: BlockStatementId,
 
    // Phase 1: parser
 
    pub is_implicit: bool,
 
    pub span: InputSpan, // of the complete block
 
    pub statements: Vec<StatementId>,
 
    // Phase 2: linker
 
    pub parent_scope: Option<Scope>,
 
    pub relative_pos_in_parent: u32,
 
    pub locals: Vec<LocalId>,
 
    pub labels: Vec<LabeledStatementId>,
 
}
 

	
 
impl BlockStatement {
 
    pub fn parent_block(&self, h: &Heap) -> Option<BlockStatementId> {
 
        let parent = self.parent_scope.unwrap();
 
        match parent {
 
            Scope::Definition(_) => {
 
                // If the parent scope is a definition, then there is no
 
                // parent block.
 
                None
 
            }
 
            Scope::Synchronous((parent, _)) => {
 
                // It is always the case that when this function is called,
 
                // the parent of a synchronous statement is a block statement:
 
                // nested synchronous statements are flagged illegal,
 
                // and that happens before resolving variables that
 
                // creates the parent_scope references in the first place.
 
                Some(h[parent].parent_scope(h).unwrap().to_block())
 
            }
 
            Scope::Regular(parent) => {
 
                // A variable scope is either a definition, sync, or block.
 
                Some(parent)
 
            }
 
        }
 
    }
 
    pub fn first(&self) -> StatementId {
 
        // It is an invariant (guaranteed by the lexer) that block statements have at least one stmt
 
        *self.statements.first().unwrap()
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum LocalStatement {
 
    Memory(MemoryStatement),
 
    Channel(ChannelStatement),
 
}
 

	
 
impl LocalStatement {
 
    pub fn this(&self) -> LocalStatementId {
 
        match self {
 
            LocalStatement::Memory(stmt) => stmt.this.upcast(),
 
            LocalStatement::Channel(stmt) => stmt.this.upcast(),
 
        }
 
    }
 
    pub fn as_memory(&self) -> &MemoryStatement {
 
        match self {
 
            LocalStatement::Memory(result) => result,
 
            _ => panic!("Unable to cast `LocalStatement` to `MemoryStatement`"),
 
        }
 
    }
 
    pub fn as_channel(&self) -> &ChannelStatement {
 
        match self {
 
            LocalStatement::Channel(result) => result,
 
            _ => panic!("Unable to cast `LocalStatement` to `ChannelStatement`"),
 
        }
 
    }
 
    pub fn span(&self) -> InputSpan {
 
        match self {
 
            LocalStatement::Channel(v) => v.span,
 
            LocalStatement::Memory(v) => v.span,
 
        }
 
    }
 
    pub fn next(&self) -> Option<StatementId> {
 
        match self {
 
            LocalStatement::Memory(stmt) => stmt.next,
 
            LocalStatement::Channel(stmt) => stmt.next,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct MemoryStatement {
 
    pub this: MemoryStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub variable: LocalId,
 
    // Phase 2: linker
 
    pub next: Option<StatementId>,
 
}
 

	
 
/// ChannelStatement is the declaration of an input and output port associated
 
/// with the same channel. Note that the polarity of the ports are from the
 
/// point of view of the component. So an output port is something that a
 
/// component uses to send data over (i.e. it is the "input end" of the
 
/// channel), and vice versa.
 
#[derive(Debug, Clone)]
 
pub struct ChannelStatement {
 
    pub this: ChannelStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "channel" keyword
 
    pub from: LocalId, // output
 
    pub to: LocalId,   // input
 
    // Phase 2: linker
 
    pub relative_pos_in_block: u32,
 
    pub next: Option<StatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LabeledStatement {
 
    pub this: LabeledStatementId,
 
    // Phase 1: parser
 
    pub label: Identifier,
 
    pub body: StatementId,
 
    // Phase 2: linker
 
    pub relative_pos_in_block: u32,
 
    pub in_sync: Option<SynchronousStatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct IfStatement {
 
    pub this: IfStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "if" keyword
 
    pub test: ExpressionId,
 
    pub true_body: BlockStatementId,
 
    pub false_body: Option<BlockStatementId>,
 
    // Phase 2: linker
 
    pub end_if: Option<EndIfStatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndIfStatement {
 
    pub this: EndIfStatementId,
 
    pub start_if: IfStatementId,
 
    // Phase 2: linker
 
    pub next: Option<StatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct WhileStatement {
 
    pub this: WhileStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "while" keyword
 
    pub test: ExpressionId,
 
    pub body: BlockStatementId,
 
    // Phase 2: linker
 
    pub end_while: Option<EndWhileStatementId>,
 
    pub in_sync: Option<SynchronousStatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndWhileStatement {
 
    pub this: EndWhileStatementId,
 
    pub start_while: WhileStatementId,
 
    // Phase 2: linker
 
    pub next: Option<StatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BreakStatement {
 
    pub this: BreakStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "break" keyword
 
    pub label: Option<Identifier>,
 
    // Phase 2: linker
 
    pub target: Option<EndWhileStatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ContinueStatement {
 
    pub this: ContinueStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "continue" keyword
 
    pub label: Option<Identifier>,
 
    // Phase 2: linker
 
    pub target: Option<WhileStatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SynchronousStatement {
 
    pub this: SynchronousStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "sync" keyword
 
    pub body: BlockStatementId,
 
    // Phase 2: linker
 
    pub end_sync: Option<EndSynchronousStatementId>,
 
    pub parent_scope: Option<Scope>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndSynchronousStatement {
 
    pub this: EndSynchronousStatementId,
 
    pub start_sync: SynchronousStatementId,
 
    // Phase 2: linker
 
    pub next: Option<StatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ReturnStatement {
 
    pub this: ReturnStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "return" keyword
 
    pub expressions: Vec<ExpressionId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct GotoStatement {
 
    pub this: GotoStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "goto" keyword
 
    pub label: Identifier,
 
    // Phase 2: linker
 
    pub target: Option<LabeledStatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct NewStatement {
 
    pub this: NewStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "new" keyword
 
    pub expression: CallExpressionId,
 
    // Phase 2: linker
 
    pub next: Option<StatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ExpressionStatement {
 
    pub this: ExpressionStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub expression: ExpressionId,
 
    // Phase 2: linker
 
    pub next: Option<StatementId>,
 
}
 

	
 
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
 
pub enum ExpressionParent {
 
    None, // only set during initial parsing
 
    If(IfStatementId),
 
    While(WhileStatementId),
 
    Return(ReturnStatementId),
 
    New(NewStatementId),
 
    ExpressionStmt(ExpressionStatementId),
 
    Expression(ExpressionId, u32) // index within expression (e.g LHS or RHS of expression)
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Expression {
 
    Assignment(AssignmentExpression),
 
    Binding(BindingExpression),
 
    Conditional(ConditionalExpression),
 
    Binary(BinaryExpression),
 
    Unary(UnaryExpression),
 
    Indexing(IndexingExpression),
 
    Slicing(SlicingExpression),
 
    Select(SelectExpression),
 
    Literal(LiteralExpression),
 
    Call(CallExpression),
 
    Variable(VariableExpression),
 
}
 

	
 
impl Expression {
 
    pub fn as_assignment(&self) -> &AssignmentExpression {
 
        match self {
 
            Expression::Assignment(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `AssignmentExpression`"),
 
        }
 
    }
 
    pub fn as_conditional(&self) -> &ConditionalExpression {
 
        match self {
 
            Expression::Conditional(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `ConditionalExpression`"),
 
        }
 
    }
 
    pub fn as_binary(&self) -> &BinaryExpression {
 
        match self {
 
            Expression::Binary(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `BinaryExpression`"),
 
        }
 
    }
 
    pub fn as_unary(&self) -> &UnaryExpression {
 
        match self {
 
            Expression::Unary(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `UnaryExpression`"),
 
        }
 
    }
 
    pub fn as_indexing(&self) -> &IndexingExpression {
 
        match self {
 
            Expression::Indexing(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `IndexingExpression`"),
 
        }
 
    }
 
    pub fn as_slicing(&self) -> &SlicingExpression {
 
        match self {
 
            Expression::Slicing(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `SlicingExpression`"),
 
        }
 
    }
 
    pub fn as_select(&self) -> &SelectExpression {
 
        match self {
 
            Expression::Select(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `SelectExpression`"),
 
        }
 
    }
 
    pub fn as_array(&self) -> &ArrayExpression {
 
        match self {
 
            Expression::Array(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `ArrayExpression`"),
 
        }
 
    }
 
    pub fn as_constant(&self) -> &LiteralExpression {
 
        match self {
 
            Expression::Literal(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `ConstantExpression`"),
 
        }
 
    }
 
    pub fn as_call(&self) -> &CallExpression {
 
        match self {
 
            Expression::Call(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `CallExpression`"),
 
        }
 
    }
 
    pub fn as_call_mut(&mut self) -> &mut CallExpression {
 
        match self {
 
            Expression::Call(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `CallExpression`"),
 
        }
 
    }
 
    pub fn as_variable(&self) -> &VariableExpression {
 
        match self {
 
            Expression::Variable(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `VariableExpression`"),
 
        }
 
    }
 
    pub fn as_variable_mut(&mut self) -> &mut VariableExpression {
 
        match self {
 
            Expression::Variable(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `VariableExpression`"),
 
        }
 
    }
 
    pub fn span(&self) -> InputSpan {
 
        match self {
 
            Expression::Assignment(expr) => expr.span,
 
            Expression::Binding(expr) => expr.span,
 
            Expression::Conditional(expr) => expr.span,
 
            Expression::Binary(expr) => expr.span,
 
            Expression::Unary(expr) => expr.span,
 
            Expression::Indexing(expr) => expr.span,
 
            Expression::Slicing(expr) => expr.span,
 
            Expression::Select(expr) => expr.span,
 
            Expression::Literal(expr) => expr.span,
 
            Expression::Call(expr) => expr.span,
 
            Expression::Variable(expr) => expr.identifier.span,
 
        }
 
    }
 
    // TODO: @cleanup
 
    pub fn parent(&self) -> &ExpressionParent {
 
        match self {
 
            Expression::Assignment(expr) => &expr.parent,
 
            Expression::Binding(expr) => &expr.parent,
 
            Expression::Conditional(expr) => &expr.parent,
 
            Expression::Binary(expr) => &expr.parent,
 
            Expression::Unary(expr) => &expr.parent,
 
            Expression::Indexing(expr) => &expr.parent,
 
            Expression::Slicing(expr) => &expr.parent,
 
            Expression::Select(expr) => &expr.parent,
 
            Expression::Literal(expr) => &expr.parent,
 
            Expression::Call(expr) => &expr.parent,
 
            Expression::Variable(expr) => &expr.parent,
 
        }
 
    }
 
    // TODO: @cleanup
 
    pub fn parent_expr_id(&self) -> Option<ExpressionId> {
 
        if let ExpressionParent::Expression(id, _) = self.parent() {
 
            Some(*id)
 
        } else {
 
            None
 
        }
 
    }
 
    // TODO: @cleanup
 
    pub fn set_parent(&mut self, parent: ExpressionParent) {
 
        match self {
 
            Expression::Assignment(expr) => expr.parent = parent,
 
            Expression::Binding(expr) => expr.parent = parent,
 
            Expression::Conditional(expr) => expr.parent = parent,
 
            Expression::Binary(expr) => expr.parent = parent,
 
            Expression::Unary(expr) => expr.parent = parent,
 
            Expression::Indexing(expr) => expr.parent = parent,
 
            Expression::Slicing(expr) => expr.parent = parent,
 
            Expression::Select(expr) => expr.parent = parent,
 
            Expression::Literal(expr) => expr.parent = parent,
 
            Expression::Call(expr) => expr.parent = parent,
 
            Expression::Variable(expr) => expr.parent = parent,
 
        }
 
    }
 
    pub fn get_type(&self) -> &ConcreteType {
 
        match self {
 
            Expression::Assignment(expr) => &expr.concrete_type,
 
            Expression::Binding(expr) => &expr.concrete_type,
 
            Expression::Conditional(expr) => &expr.concrete_type,
 
            Expression::Binary(expr) => &expr.concrete_type,
 
            Expression::Unary(expr) => &expr.concrete_type,
 
            Expression::Indexing(expr) => &expr.concrete_type,
 
            Expression::Slicing(expr) => &expr.concrete_type,
 
            Expression::Select(expr) => &expr.concrete_type,
 
            Expression::Literal(expr) => &expr.concrete_type,
 
            Expression::Call(expr) => &expr.concrete_type,
 
            Expression::Variable(expr) => &expr.concrete_type,
 
        }
 
    }
 

	
 
    // TODO: @cleanup
 
    pub fn get_type_mut(&mut self) -> &mut ConcreteType {
 
        match self {
 
            Expression::Assignment(expr) => &mut expr.concrete_type,
 
            Expression::Binding(expr) => &mut expr.concrete_type,
 
            Expression::Conditional(expr) => &mut expr.concrete_type,
 
            Expression::Binary(expr) => &mut expr.concrete_type,
 
            Expression::Unary(expr) => &mut expr.concrete_type,
 
            Expression::Indexing(expr) => &mut expr.concrete_type,
 
            Expression::Slicing(expr) => &mut expr.concrete_type,
 
            Expression::Select(expr) => &mut expr.concrete_type,
 
            Expression::Literal(expr) => &mut expr.concrete_type,
 
            Expression::Call(expr) => &mut expr.concrete_type,
 
            Expression::Variable(expr) => &mut expr.concrete_type,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum AssignmentOperator {
 
    Set,
 
    Multiplied,
 
    Divided,
 
    Remained,
 
    Added,
 
    Subtracted,
 
    ShiftedLeft,
 
    ShiftedRight,
 
    BitwiseAnded,
 
    BitwiseXored,
 
    BitwiseOred,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct AssignmentExpression {
 
    pub this: AssignmentExpressionId,
 
    // Phase 2: parser
 
    pub span: InputSpan, // of the operator
 
    pub left: ExpressionId,
 
    pub operation: AssignmentOperator,
 
    pub right: ExpressionId,
 
    // Phase 3: linker
 
    pub parent: ExpressionParent,
 
    // Phase 4: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BindingExpression {
 
    pub this: BindingExpressionId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub left: LiteralExpressionId,
 
    pub right: ExpressionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ConditionalExpression {
 
    pub this: ConditionalExpressionId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of question mark operator
 
    pub test: ExpressionId,
 
    pub true_expression: ExpressionId,
 
    pub false_expression: ExpressionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq)]
 
pub enum BinaryOperator {
 
    Concatenate,
 
    LogicalOr,
 
    LogicalAnd,
 
    BitwiseOr,
 
    BitwiseXor,
 
    BitwiseAnd,
 
    Equality,
 
    Inequality,
 
    LessThan,
 
    GreaterThan,
 
    LessThanEqual,
 
    GreaterThanEqual,
 
    ShiftLeft,
 
    ShiftRight,
 
    Add,
 
    Subtract,
 
    Multiply,
 
    Divide,
 
    Remainder,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BinaryExpression {
 
    pub this: BinaryExpressionId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the operator
 
    pub left: ExpressionId,
 
    pub operation: BinaryOperator,
 
    pub right: ExpressionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq)]
 
pub enum UnaryOperation {
 
    Positive,
 
    Negative,
 
    BitwiseNot,
 
    LogicalNot,
 
    PreIncrement,
 
    PreDecrement,
 
    PostIncrement,
 
    PostDecrement,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct UnaryExpression {
 
    pub this: UnaryExpressionId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the operator
 
    pub operation: UnaryOperation,
 
    pub expression: ExpressionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct IndexingExpression {
 
    pub this: IndexingExpressionId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub subject: ExpressionId,
 
    pub index: ExpressionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SlicingExpression {
 
    pub this: SlicingExpressionId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // from '[' to ']';
 
    pub subject: ExpressionId,
 
    pub from_index: ExpressionId,
 
    pub to_index: ExpressionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SelectExpression {
 
    pub this: SelectExpressionId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the '.'
 
    pub subject: ExpressionId,
 
    pub field: Field,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct CallExpression {
 
    pub this: CallExpressionId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub parser_type: ParserType, // of the function call
 
    pub method: Method,
 
    pub arguments: Vec<ExpressionId>,
 
    pub definition: DefinitionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
#[derive(Debug, Clone, PartialEq, Eq)]
 
pub enum Method {
 
    // Builtin
 
    Get,
 
    Put,
 
    Fires,
 
    Create,
 
    Length,
 
    Assert,
 
    UserFunction,
 
    UserComponent,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct MethodSymbolic {
 
    pub(crate) parser_type: ParserType,
 
    pub(crate) definition: DefinitionId
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralExpression {
 
    pub this: LiteralExpressionId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub value: Literal,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Literal {
 
    Null, // message
 
    True,
 
    False,
 
    Character(char),
 
    String(StringRef<'static>),
 
    Integer(LiteralInteger),
 
    Struct(LiteralStruct),
 
    Enum(LiteralEnum),
 
    Union(LiteralUnion),
 
    Array(Vec<ExpressionId>),
 
}
 

	
 
impl Literal {
 
    pub(crate) fn as_struct(&self) -> &LiteralStruct {
 
        if let Literal::Struct(literal) = self{
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Struct", self)
 
        }
 
    }
 

	
 
    pub(crate) fn as_struct_mut(&mut self) -> &mut LiteralStruct {
 
        if let Literal::Struct(literal) = self{
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Struct", self)
 
        }
 
    }
 

	
 
    pub(crate) fn as_enum(&self) -> &LiteralEnum {
 
        if let Literal::Enum(literal) = self {
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Enum", self)
 
        }
 
    }
 

	
 
    pub(crate) fn as_union(&self) -> &LiteralUnion {
 
        if let Literal::Union(literal) = self {
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Union", self)
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralInteger {
 
    pub(crate) unsigned_value: u64,
 
    pub(crate) negated: bool, // for constant expression evaluation, TODO
 
    pub(crate) negated: bool, // for constant expression evaluation, TODO: @Int
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralStructField {
 
    // Phase 1: parser
 
    pub(crate) identifier: Identifier,
 
    pub(crate) value: ExpressionId,
 
    // Phase 2: linker
 
    pub(crate) field_idx: usize, // in struct definition
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralStruct {
 
    // Phase 1: parser
 
    pub(crate) parser_type: ParserType,
 
    pub(crate) fields: Vec<LiteralStructField>,
 
    pub(crate) definition: DefinitionId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralEnum {
 
    // Phase 1: parser
 
    pub(crate) parser_type: ParserType,
 
    pub(crate) variant: Identifier,
 
    pub(crate) definition: DefinitionId,
 
    // Phase 2: linker
 
    pub(crate) variant_idx: usize, // as present in the type table
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralUnion {
 
    // Phase 1: parser
 
    pub(crate) parser_type: ParserType,
 
    pub(crate) variant: Identifier,
 
    pub(crate) values: Vec<ExpressionId>,
 
    pub(crate) definition: DefinitionId,
 
    // Phase 2: linker
 
    pub(crate) variant_idx: usize, // as present in type table
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct VariableExpression {
 
    pub this: VariableExpressionId,
 
    // Phase 1: parser
 
    pub identifier: Identifier,
 
    // Phase 2: linker
 
    pub declaration: Option<VariableId>,
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 
\ No newline at end of file
src/protocol/ast_printer.rs
Show inline comments
 
use std::fmt::{Debug, Display, Write};
 
use std::io::Write as IOWrite;
 

	
 
use super::ast::*;
 
use super::token_parsing::*;
 

	
 
const INDENT: usize = 2;
 

	
 
const PREFIX_EMPTY: &'static str = "    ";
 
const PREFIX_ROOT_ID: &'static str = "Root";
 
const PREFIX_PRAGMA_ID: &'static str = "Prag";
 
const PREFIX_IMPORT_ID: &'static str = "Imp ";
 
const PREFIX_TYPE_ANNOT_ID: &'static str = "TyAn";
 
const PREFIX_VARIABLE_ID: &'static str = "Var ";
 
const PREFIX_PARAMETER_ID: &'static str = "Par ";
 
const PREFIX_LOCAL_ID: &'static str = "Loc ";
 
const PREFIX_DEFINITION_ID: &'static str = "Def ";
 
const PREFIX_STRUCT_ID: &'static str = "DefS";
 
const PREFIX_ENUM_ID: &'static str = "DefE";
 
const PREFIX_UNION_ID: &'static str = "DefU";
 
const PREFIX_COMPONENT_ID: &'static str = "DefC";
 
const PREFIX_FUNCTION_ID: &'static str = "DefF";
 
const PREFIX_STMT_ID: &'static str = "Stmt";
 
const PREFIX_BLOCK_STMT_ID: &'static str = "SBl ";
 
const PREFIX_LOCAL_STMT_ID: &'static str = "SLoc";
 
const PREFIX_MEM_STMT_ID: &'static str = "SMem";
 
const PREFIX_CHANNEL_STMT_ID: &'static str = "SCha";
 
const PREFIX_SKIP_STMT_ID: &'static str = "SSki";
 
const PREFIX_LABELED_STMT_ID: &'static str = "SLab";
 
const PREFIX_IF_STMT_ID: &'static str = "SIf ";
 
const PREFIX_ENDIF_STMT_ID: &'static str = "SEIf";
 
const PREFIX_WHILE_STMT_ID: &'static str = "SWhi";
 
const PREFIX_ENDWHILE_STMT_ID: &'static str = "SEWh";
 
const PREFIX_BREAK_STMT_ID: &'static str = "SBre";
 
const PREFIX_CONTINUE_STMT_ID: &'static str = "SCon";
 
const PREFIX_SYNC_STMT_ID: &'static str = "SSyn";
 
const PREFIX_ENDSYNC_STMT_ID: &'static str = "SESy";
 
const PREFIX_RETURN_STMT_ID: &'static str = "SRet";
 
const PREFIX_ASSERT_STMT_ID: &'static str = "SAsr";
 
const PREFIX_GOTO_STMT_ID: &'static str = "SGot";
 
const PREFIX_NEW_STMT_ID: &'static str = "SNew";
 
const PREFIX_PUT_STMT_ID: &'static str = "SPut";
 
const PREFIX_EXPR_STMT_ID: &'static str = "SExp";
 
const PREFIX_ASSIGNMENT_EXPR_ID: &'static str = "EAsi";
 
const PREFIX_BINDING_EXPR_ID: &'static str = "EBnd";
 
const PREFIX_CONDITIONAL_EXPR_ID: &'static str = "ECnd";
 
const PREFIX_BINARY_EXPR_ID: &'static str = "EBin";
 
const PREFIX_UNARY_EXPR_ID: &'static str = "EUna";
 
const PREFIX_INDEXING_EXPR_ID: &'static str = "EIdx";
 
const PREFIX_SLICING_EXPR_ID: &'static str = "ESli";
 
const PREFIX_SELECT_EXPR_ID: &'static str = "ESel";
 
const PREFIX_ARRAY_EXPR_ID: &'static str = "EArr";
 
const PREFIX_CONST_EXPR_ID: &'static str = "ECns";
 
const PREFIX_LITERAL_EXPR_ID: &'static str = "ELit";
 
const PREFIX_CALL_EXPR_ID: &'static str = "ECll";
 
const PREFIX_VARIABLE_EXPR_ID: &'static str = "EVar";
 

	
 
struct KV<'a> {
 
    buffer: &'a mut String,
 
    prefix: Option<(&'static str, u32)>,
 
    prefix: Option<(&'static str, i32)>,
 
    indent: usize,
 
    temp_key: &'a mut String,
 
    temp_val: &'a mut String,
 
}
 

	
 
impl<'a> KV<'a> {
 
    fn new(buffer: &'a mut String, temp_key: &'a mut String, temp_val: &'a mut String, indent: usize) -> Self {
 
        temp_key.clear();
 
        temp_val.clear();
 
        KV{
 
            buffer,
 
            prefix: None,
 
            indent,
 
            temp_key,
 
            temp_val
 
        }
 
    }
 

	
 
    fn with_id(mut self, prefix: &'static str, id: u32) -> Self {
 
    fn with_id(mut self, prefix: &'static str, id: i32) -> Self {
 
        self.prefix = Some((prefix, id));
 
        self
 
    }
 

	
 
    fn with_s_key(self, key: &str) -> Self {
 
        self.temp_key.push_str(key);
 
        self
 
    }
 

	
 
    fn with_d_key<D: Display>(self, key: &D) -> Self {
 
        self.temp_key.push_str(&key.to_string());
 
        self
 
    }
 

	
 
    fn with_s_val(self, val: &str) -> Self {
 
        self.temp_val.push_str(val);
 
        self
 
    }
 

	
 
    fn with_disp_val<D: Display>(self, val: &D) -> Self {
 
        self.temp_val.push_str(&format!("{}", val));
 
        self
 
    }
 

	
 
    fn with_debug_val<D: Debug>(self, val: &D) -> Self {
 
        self.temp_val.push_str(&format!("{:?}", val));
 
        self
 
    }
 

	
 
    fn with_ascii_val(self, val: &[u8]) -> Self {
 
        self.temp_val.push_str(&*String::from_utf8_lossy(val));
 
    fn with_identifier_val(self, val: &Identifier) -> Self {
 
        self.temp_val.push_str(val.value.as_str());
 
        self
 
    }
 

	
 
    fn with_opt_disp_val<D: Display>(self, val: Option<&D>) -> Self {
 
        match val {
 
            Some(v) => { self.temp_val.push_str(&format!("Some({})", v)); },
 
            None => { self.temp_val.push_str("None"); }
 
        }
 
        self
 
    }
 

	
 
    fn with_opt_ascii_val(self, val: Option<&[u8]>) -> Self {
 
    fn with_opt_identifier_val(self, val: Option<&Identifier>) -> Self {
 
        match val {
 
            Some(v) => {
 
                self.temp_val.push_str("Some(");
 
                self.temp_val.push_str(&*String::from_utf8_lossy(v));
 
                self.temp_val.push_str(v.value.as_str());
 
                self.temp_val.push(')');
 
            },
 
            None => {
 
                self.temp_val.push_str("None");
 
            }
 
        }
 
        self
 
    }
 

	
 
    fn with_custom_val<F: Fn(&mut String)>(mut self, val_fn: F) -> Self {
 
        val_fn(&mut self.temp_val);
 
        self
 
    }
 
}
 

	
 
impl<'a> Drop for KV<'a> {
 
    fn drop(&mut self) {
 
        // Prefix and indent
 
        if let Some((prefix, id)) = &self.prefix {
 
            self.buffer.push_str(&format!("{}[{:04}]", prefix, id));
 
        } else {
 
            self.buffer.push_str("           ");
 
        }
 

	
 
        for _ in 0..self.indent * INDENT {
 
            self.buffer.push(' ');
 
        }
 

	
 
        // Leading dash
 
        self.buffer.push_str("- ");
 

	
 
        // Key and value
 
        self.buffer.push_str(self.temp_key);
 
        if self.temp_val.is_empty() {
 
            self.buffer.push(':');
 
        } else {
 
            self.buffer.push_str(": ");
 
            self.buffer.push_str(&self.temp_val);
 
        }
 
        self.buffer.push('\n');
 
    }
 
}
 

	
 
pub(crate) struct ASTWriter {
 
    cur_definition: Option<DefinitionId>,
 
    buffer: String,
 
    temp1: String,
 
    temp2: String,
 
}
 

	
 
impl ASTWriter {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            cur_definition: None,
 
            buffer: String::with_capacity(4096),
 
            temp1: String::with_capacity(256),
 
            temp2: String::with_capacity(256),
 
        }
 
    }
 
    pub(crate) fn write_ast<W: IOWrite>(&mut self, w: &mut W, heap: &Heap) {
 
        for root_id in heap.protocol_descriptions.iter().map(|v| v.this) {
 
            self.write_module(heap, root_id);
 
            w.write_all(self.buffer.as_bytes()).expect("flush buffer");
 
            self.buffer.clear();
 
        }
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Top-level module writing
 
    //--------------------------------------------------------------------------
 

	
 
    fn write_module(&mut self, heap: &Heap, root_id: RootId) {
 
        self.kv(0).with_id(PREFIX_ROOT_ID, root_id.index)
 
            .with_s_key("Module");
 

	
 
        let root = &heap[root_id];
 
        self.kv(1).with_s_key("Pragmas");
 
        for pragma_id in &root.pragmas {
 
            self.write_pragma(heap, *pragma_id, 2);
 
        }
 

	
 
        self.kv(1).with_s_key("Imports");
 
        for import_id in &root.imports {
 
            self.write_import(heap, *import_id, 2);
 
        }
 

	
 
        self.kv(1).with_s_key("Definitions");
 
        for def_id in &root.definitions {
 
            self.write_definition(heap, *def_id, 2);
 
        }
 
    }
 

	
 
    fn write_pragma(&mut self, heap: &Heap, pragma_id: PragmaId, indent: usize) {
 
        match &heap[pragma_id] {
 
            Pragma::Version(pragma) => {
 
                self.kv(indent).with_id(PREFIX_PRAGMA_ID, pragma.this.index)
 
                    .with_s_key("PragmaVersion")
 
                    .with_disp_val(&pragma.version);
 
            },
 
            Pragma::Module(pragma) => {
 
                self.kv(indent).with_id(PREFIX_PRAGMA_ID, pragma.this.index)
 
                    .with_s_key("PragmaModule")
 
                    .with_ascii_val(&pragma.value);
 
                    .with_identifier_val(&pragma.value);
 
            }
 
        }
 
    }
 

	
 
    fn write_import(&mut self, heap: &Heap, import_id: ImportId, indent: usize) {
 
        let import = &heap[import_id];
 
        let indent2 = indent + 1;
 

	
 
        match import {
 
            Import::Module(import) => {
 
                self.kv(indent).with_id(PREFIX_IMPORT_ID, import.this.index)
 
                    .with_s_key("ImportModule");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&import.module);
 
                self.kv(indent2).with_s_key("Alias").with_ascii_val(&import.alias.value);
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&import.module);
 
                self.kv(indent2).with_s_key("Alias").with_identifier_val(&import.alias);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(import.module_id.as_ref().map(|v| &v.index));
 
            },
 
            Import::Symbols(import) => {
 
                self.kv(indent).with_id(PREFIX_IMPORT_ID, import.this.index)
 
                    .with_s_key("ImportSymbol");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&import.module);
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&import.module);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(import.module_id.as_ref().map(|v| &v.index));
 

	
 
                self.kv(indent2).with_s_key("Symbols");
 

	
 
                let indent3 = indent2 + 1;
 
                let indent4 = indent3 + 1;
 
                for symbol in &import.symbols {
 
                    self.kv(indent3).with_s_key("AliasedSymbol");
 
                    self.kv(indent4).with_s_key("Name").with_ascii_val(&symbol.name.value);
 
                    self.kv(indent4).with_s_key("Alias").with_ascii_val(&symbol.alias.value);
 
                    self.kv(indent4).with_s_key("Name").with_identifier_val(&symbol.name);
 
                    self.kv(indent4).with_s_key("Alias").with_opt_identifier_val(symbol.alias.as_ref());
 
                    self.kv(indent4).with_s_key("Definition")
 
                        .with_opt_disp_val(symbol.definition_id.as_ref().map(|v| &v.index));
 
                }
 
            }
 
        }
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Top-level definition writing
 
    //--------------------------------------------------------------------------
 

	
 
    fn write_definition(&mut self, heap: &Heap, def_id: DefinitionId, indent: usize) {
 
        self.cur_definition = Some(def_id);
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 
        let indent4 = indent3 + 1;
 

	
 
        match &heap[def_id] {
 
            Definition::Struct(def) => {
 
                self.kv(indent).with_id(PREFIX_STRUCT_ID, def.this.0.index)
 
                    .with_s_key("DefinitionStruct");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&def.identifier);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar").with_ascii_val(&poly_var_id.value);
 
                    self.kv(indent3).with_s_key("PolyVar").with_identifier_val(&poly_var_id);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Fields");
 
                for field in &def.fields {
 
                    self.kv(indent3).with_s_key("Field");
 
                    self.kv(indent4).with_s_key("Name")
 
                        .with_ascii_val(&field.field.value);
 
                        .with_identifier_val(&field.field);
 
                    self.kv(indent4).with_s_key("Type")
 
                        .with_custom_val(|s| write_parser_type(s, heap, &heap[field.parser_type]));
 
                        .with_custom_val(|s| write_parser_type(s, heap, &field.parser_type));
 
                }
 
            },
 
            Definition::Enum(def) => {
 
                self.kv(indent).with_id(PREFIX_ENUM_ID, def.this.0.index)
 
                    .with_s_key("DefinitionEnum");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&def.identifier);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar").with_ascii_val(&poly_var_id.value);
 
                    self.kv(indent3).with_s_key("PolyVar").with_identifier_val(&poly_var_id);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Variants");
 
                for variant in &def.variants {
 
                    self.kv(indent3).with_s_key("Variant");
 
                    self.kv(indent4).with_s_key("Name")
 
                        .with_ascii_val(&variant.identifier.value);
 
                        .with_identifier_val(&variant.identifier);
 
                    let variant_value = self.kv(indent4).with_s_key("Value");
 
                    match &variant.value {
 
                        EnumVariantValue::None => variant_value.with_s_val("None"),
 
                        EnumVariantValue::Integer(value) => variant_value.with_disp_val(value),
 
                    };
 
                }
 
            },
 
            Definition::Union(def) => {
 
                self.kv(indent).with_id(PREFIX_UNION_ID, def.this.0.index)
 
                    .with_s_key("DefinitionUnion");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&def.identifier);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar").with_ascii_val(&poly_var_id.value);
 
                    self.kv(indent3).with_s_key("PolyVar").with_identifier_val(&poly_var_id);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Variants");
 
                for variant in &def.variants {
 
                    self.kv(indent3).with_s_key("Variant");
 
                    self.kv(indent4).with_s_key("Name")
 
                        .with_ascii_val(&variant.identifier.value);
 
                        .with_identifier_val(&variant.identifier);
 
                        
 
                    match &variant.value {
 
                        UnionVariantValue::None => {
 
                            self.kv(indent4).with_s_key("Value").with_s_val("None");
 
                        }
 
                        UnionVariantValue::Embedded(embedded) => {
 
                            self.kv(indent4).with_s_key("Values");
 
                            for embedded in embedded {
 
                                self.kv(indent4+1).with_s_key("Value")
 
                                    .with_custom_val(|v| write_parser_type(v, heap, &heap[*embedded]));
 
                                    .with_custom_val(|v| write_parser_type(v, heap, embedded));
 
                            }
 
                        }
 
                    }
 
                }
 
            }
 
            Definition::Function(def) => {
 
                self.kv(indent).with_id(PREFIX_FUNCTION_ID, def.this.0.index)
 
                    .with_s_key("DefinitionFunction");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&def.identifier);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar").with_ascii_val(&poly_var_id.value);
 
                    self.kv(indent3).with_s_key("PolyVar").with_identifier_val(&poly_var_id);
 
                }
 

	
 
                self.kv(indent2).with_s_key("ReturnParserType").with_custom_val(|s| write_parser_type(s, heap, &heap[def.return_type]));
 
                self.kv(indent2).with_s_key("ReturnParserTypes");
 
                for return_type in &def.return_types {
 
                    self.kv(indent3).with_s_key("ReturnParserType")
 
                        .with_custom_val(|s| write_parser_type(s, heap, return_type));
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parameters");
 
                for param_id in &def.parameters {
 
                    self.write_parameter(heap, *param_id, indent3);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, def.body, indent3);
 
                self.write_stmt(heap, def.body.upcast(), indent3);
 
            },
 
            Definition::Component(def) => {
 
                self.kv(indent).with_id(PREFIX_COMPONENT_ID,def.this.0.index)
 
                    .with_s_key("DefinitionComponent");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&def.identifier);
 
                self.kv(indent2).with_s_key("Variant").with_debug_val(&def.variant);
 

	
 
                self.kv(indent2).with_s_key("PolymorphicVariables");
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar").with_ascii_val(&poly_var_id.value);
 
                    self.kv(indent3).with_s_key("PolyVar").with_identifier_val(&poly_var_id);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parameters");
 
                for param_id in &def.parameters {
 
                    self.write_parameter(heap, *param_id, indent3)
 
                }
 

	
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, def.body, indent3);
 
                self.write_stmt(heap, def.body.upcast(), indent3);
 
            }
 
        }
 
    }
 

	
 
    fn write_parameter(&mut self, heap: &Heap, param_id: ParameterId, indent: usize) {
 
        let indent2 = indent + 1;
 
        let param = &heap[param_id];
 

	
 
        self.kv(indent).with_id(PREFIX_PARAMETER_ID, param_id.0.index)
 
            .with_s_key("Parameter");
 
        self.kv(indent2).with_s_key("Name").with_ascii_val(&param.identifier.value);
 
        self.kv(indent2).with_s_key("ParserType").with_custom_val(|w| write_parser_type(w, heap, &heap[param.parser_type]));
 
    }
 

	
 
    fn write_poly_args(&mut self, heap: &Heap, poly_args: &[ParserTypeId], indent: usize) {
 
        if poly_args.is_empty() {
 
            return
 
        }
 

	
 
        let indent2 = indent + 1;
 
        self.kv(indent).with_s_key("PolymorphicArguments");
 
        for poly_arg in poly_args {
 
            self.kv(indent2).with_s_key("Argument")
 
                .with_custom_val(|v| write_parser_type(v, heap, &heap[*poly_arg]));
 
        }
 
        self.kv(indent2).with_s_key("Name").with_identifier_val(&param.identifier);
 
        self.kv(indent2).with_s_key("ParserType").with_custom_val(|w| write_parser_type(w, heap, &param.parser_type));
 
    }
 

	
 
    fn write_stmt(&mut self, heap: &Heap, stmt_id: StatementId, indent: usize) {
 
        let stmt = &heap[stmt_id];
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 

	
 
        match stmt {
 
            Statement::Block(stmt) => {
 
                self.kv(indent).with_id(PREFIX_BLOCK_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Block");
 

	
 
                for stmt_id in &stmt.statements {
 
                    self.write_stmt(heap, *stmt_id, indent2);
 
                }
 
            },
 
            Statement::Local(stmt) => {
 
                match stmt {
 
                    LocalStatement::Channel(stmt) => {
 
                        self.kv(indent).with_id(PREFIX_CHANNEL_STMT_ID, stmt.this.0.0.index)
 
                            .with_s_key("LocalChannel");
 

	
 
                        self.kv(indent2).with_s_key("From");
 
                        self.write_local(heap, stmt.from, indent3);
 
                        self.kv(indent2).with_s_key("To");
 
                        self.write_local(heap, stmt.to, indent3);
 
                        self.kv(indent2).with_s_key("Next")
 
                            .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
                    },
 
                    LocalStatement::Memory(stmt) => {
 
                        self.kv(indent).with_id(PREFIX_MEM_STMT_ID, stmt.this.0.0.index)
 
                            .with_s_key("LocalMemory");
 

	
 
                        self.kv(indent2).with_s_key("Variable");
 
                        self.write_local(heap, stmt.variable, indent3);
 
                        self.kv(indent2).with_s_key("Next")
 
                            .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
                    }
 
                }
 
            },
 
            Statement::Skip(stmt) => {
 
                self.kv(indent).with_id(PREFIX_SKIP_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Skip");
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Labeled(stmt) => {
 
                self.kv(indent).with_id(PREFIX_LABELED_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Labeled");
 

	
 
                self.kv(indent2).with_s_key("Label").with_ascii_val(&stmt.label.value);
 
                self.kv(indent2).with_s_key("Label").with_identifier_val(&stmt.label);
 
                self.kv(indent2).with_s_key("Statement");
 
                self.write_stmt(heap, stmt.body, indent3);
 
            },
 
            Statement::If(stmt) => {
 
                self.kv(indent).with_id(PREFIX_IF_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("If");
 

	
 
                self.kv(indent2).with_s_key("EndIf")
 
                    .with_opt_disp_val(stmt.end_if.as_ref().map(|v| &v.0.index));
 

	
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, stmt.test, indent3);
 

	
 
                self.kv(indent2).with_s_key("TrueBody");
 
                self.write_stmt(heap, stmt.true_body, indent3);
 
                self.write_stmt(heap, stmt.true_body.upcast(), indent3);
 

	
 
                if let Some(false_body) = stmt.false_body {
 
                    self.kv(indent2).with_s_key("FalseBody");
 
                self.write_stmt(heap, stmt.false_body, indent3);
 
                    self.write_stmt(heap, false_body.upcast(), indent3);
 
                }
 
            },
 
            Statement::EndIf(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDIF_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndIf");
 
                self.kv(indent2).with_s_key("StartIf").with_disp_val(&stmt.start_if.0.index);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::While(stmt) => {
 
                self.kv(indent).with_id(PREFIX_WHILE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("While");
 

	
 
                self.kv(indent2).with_s_key("EndWhile")
 
                    .with_opt_disp_val(stmt.end_while.as_ref().map(|v| &v.0.index));
 
                self.kv(indent2).with_s_key("InSync")
 
                    .with_opt_disp_val(stmt.in_sync.as_ref().map(|v| &v.0.index));
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, stmt.test, indent3);
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, stmt.body, indent3);
 
                self.write_stmt(heap, stmt.body.upcast(), indent3);
 
            },
 
            Statement::EndWhile(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDWHILE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndWhile");
 
                self.kv(indent2).with_s_key("StartWhile").with_disp_val(&stmt.start_while.0.index);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Break(stmt) => {
 
                self.kv(indent).with_id(PREFIX_BREAK_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Break");
 
                self.kv(indent2).with_s_key("Label")
 
                    .with_opt_ascii_val(stmt.label.as_ref().map(|v| v.value.as_slice()));
 
                    .with_opt_identifier_val(stmt.label.as_ref());
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::Continue(stmt) => {
 
                self.kv(indent).with_id(PREFIX_CONTINUE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Continue");
 
                self.kv(indent2).with_s_key("Label")
 
                    .with_opt_ascii_val(stmt.label.as_ref().map(|v| v.value.as_slice()));
 
                    .with_opt_identifier_val(stmt.label.as_ref());
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::Synchronous(stmt) => {
 
                self.kv(indent).with_id(PREFIX_SYNC_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Synchronous");
 
                self.kv(indent2).with_s_key("EndSync")
 
                    .with_opt_disp_val(stmt.end_sync.as_ref().map(|v| &v.0.index));
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, stmt.body, indent3);
 
                self.write_stmt(heap, stmt.body.upcast(), indent3);
 
            },
 
            Statement::EndSynchronous(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDSYNC_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndSynchronous");
 
                self.kv(indent2).with_s_key("StartSync").with_disp_val(&stmt.start_sync.0.index);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Return(stmt) => {
 
                self.kv(indent).with_id(PREFIX_RETURN_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Return");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression, indent3);
 
            },
 
            Statement::Assert(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ASSERT_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Assert");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression, indent3);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Goto(stmt) => {
 
                self.kv(indent).with_id(PREFIX_GOTO_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Goto");
 
                self.kv(indent2).with_s_key("Label").with_ascii_val(&stmt.label.value);
 
                self.kv(indent2).with_s_key("Label").with_identifier_val(&stmt.label);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::New(stmt) => {
 
                self.kv(indent).with_id(PREFIX_NEW_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("New");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression.upcast(), indent3);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Expression(stmt) => {
 
                self.kv(indent).with_id(PREFIX_EXPR_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("ExpressionStatement");
 
                self.write_expr(heap, stmt.expression, indent2);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            }
 
        }
 
    }
 

	
 
    fn write_expr(&mut self, heap: &Heap, expr_id: ExpressionId, indent: usize) {
 
        let expr = &heap[expr_id];
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 
        let def_id = self.cur_definition.unwrap();
 

	
 
        match expr {
 
            Expression::Assignment(expr) => {
 
                self.kv(indent).with_id(PREFIX_ASSIGNMENT_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("AssignmentExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Left");
 
                self.write_expr(heap, expr.left, indent3);
 
                self.kv(indent2).with_s_key("Right");
 
                self.write_expr(heap, expr.right, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Binding(expr) => {
 
                self.kv(indent).with_id(PREFIX_BINARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("BindingExpr");
 
                self.kv(indent2).with_s_key("LeftExpression");
 
                self.write_expr(heap, expr.left.upcast(), indent3);
 
                self.kv(indent2).with_s_key("RightExpression");
 
                self.write_expr(heap, expr.right, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Conditional(expr) => {
 
                self.kv(indent).with_id(PREFIX_CONDITIONAL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ConditionalExpr");
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, expr.test, indent3);
 
                self.kv(indent2).with_s_key("TrueExpression");
 
                self.write_expr(heap, expr.true_expression, indent3);
 
                self.kv(indent2).with_s_key("FalseExpression");
 
                self.write_expr(heap, expr.false_expression, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Binary(expr) => {
 
                self.kv(indent).with_id(PREFIX_BINARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("BinaryExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Left");
 
                self.write_expr(heap, expr.left, indent3);
 
                self.kv(indent2).with_s_key("Right");
 
                self.write_expr(heap, expr.right, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Unary(expr) => {
 
                self.kv(indent).with_id(PREFIX_UNARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("UnaryExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Argument");
 
                self.write_expr(heap, expr.expression, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Indexing(expr) => {
 
                self.kv(indent).with_id(PREFIX_INDEXING_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("IndexingExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 
                self.kv(indent2).with_s_key("Index");
 
                self.write_expr(heap, expr.index, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Slicing(expr) => {
 
                self.kv(indent).with_id(PREFIX_SLICING_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("SlicingExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 
                self.kv(indent2).with_s_key("FromIndex");
 
                self.write_expr(heap, expr.from_index, indent3);
 
                self.kv(indent2).with_s_key("ToIndex");
 
                self.write_expr(heap, expr.to_index, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Select(expr) => {
 
                self.kv(indent).with_id(PREFIX_SELECT_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("SelectExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 

	
 
                match &expr.field {
 
                    Field::Length => {
 
                        self.kv(indent2).with_s_key("Field").with_s_val("length");
 
                    },
 
                    Field::Symbolic(field) => {
 
                        self.kv(indent2).with_s_key("Field").with_ascii_val(&field.identifier.value);
 
                        self.kv(indent2).with_s_key("Field").with_identifier_val(&field.identifier);
 
                        self.kv(indent3).with_s_key("Definition").with_opt_disp_val(field.definition.as_ref().map(|v| &v.index));
 
                        self.kv(indent3).with_s_key("Index").with_disp_val(&field.field_idx);
 
                    }
 
                }
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Array(expr) => {
 
                self.kv(indent).with_id(PREFIX_ARRAY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ArrayExpr");
 
                self.kv(indent2).with_s_key("Elements");
 
                for expr_id in &expr.elements {
 
                    self.write_expr(heap, *expr_id, indent3);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Literal(expr) => {
 
                self.kv(indent).with_id(PREFIX_CONST_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ConstantExpr");
 
                self.kv(indent).with_id(PREFIX_LITERAL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("LiteralExpr");
 

	
 
                let val = self.kv(indent2).with_s_key("Value");
 
                match &expr.value {
 
                    Literal::Null => { val.with_s_val("null"); },
 
                    Literal::True => { val.with_s_val("true"); },
 
                    Literal::False => { val.with_s_val("false"); },
 
                    Literal::Character(data) => { val.with_ascii_val(data); },
 
                    Literal::Integer(data) => { val.with_disp_val(data); },
 
                    Literal::Character(data) => { val.with_disp_val(data); },
 
                    Literal::String(data) => { val.with_disp_val(data.as_str()); },
 
                    Literal::Integer(data) => { val.with_debug_val(data); },
 
                    Literal::Struct(data) => {
 
                        val.with_s_val("Struct");
 
                        let indent4 = indent3 + 1;
 

	
 
                        self.write_poly_args(heap, &data.poly_args2, indent3);
 
                        self.kv(indent3).with_s_key("ParserType")
 
                            .with_custom_val(|t| write_parser_type(t, heap, &data.parser_type));
 
                        self.kv(indent3).with_s_key("Definition").with_custom_val(|s| {
 
                            write_option(s, data.definition.as_ref().map(|v| &v.index));
 
                        });
 

	
 
                        for field in &data.fields {
 
                            self.kv(indent3).with_s_key("Field");
 
                            self.kv(indent4).with_s_key("Name").with_ascii_val(&field.identifier.value);
 
                            self.kv(indent4).with_s_key("Name").with_identifier_val(&field.identifier);
 
                            self.kv(indent4).with_s_key("Index").with_disp_val(&field.field_idx);
 
                            self.kv(indent4).with_s_key("ParserType");
 
                            self.write_expr(heap, field.value, indent4 + 1);
 
                        }
 
                    },
 
                    Literal::Enum(data) => {
 
                        val.with_s_val("Enum");
 

	
 
                        self.write_poly_args(heap, &data.poly_args2, indent3);
 
                        self.kv(indent3).with_s_key("ParserType")
 
                            .with_custom_val(|t| write_parser_type(t, heap, &data.parser_type));
 
                        self.kv(indent3).with_s_key("Definition").with_custom_val(|s| {
 
                            write_option(s, data.definition.as_ref().map(|v| &v.index))
 
                        });
 
                        self.kv(indent3).with_s_key("VariantIdx").with_disp_val(&data.variant_idx);
 
                    },
 
                    Literal::Union(data) => {
 
                        val.with_s_val("Union");
 
                        let indent4 = indent3 + 1;
 
                        self.write_poly_args(heap, &data.poly_args2, indent3);
 

	
 
                        self.kv(indent3).with_s_key("ParserType")
 
                            .with_custom_val(|t| write_parser_type(t, heap, &data.parser_type));
 
                        self.kv(indent3).with_s_key("Definition").with_custom_val(|s| {
 
                            write_option(s, data.definition.as_ref().map(|v| &v.index));
 
                        });
 
                        self.kv(indent3).with_s_key("VariantIdx").with_disp_val(&data.variant_idx);
 

	
 
                        for value in &data.values {
 
                            self.kv(indent3).with_s_key("Value");
 
                            self.write_expr(heap, *value, indent4);
 
                        }
 
                    }
 
                    Literal::Array(data) => {
 
                        val.with_s_val("Array");
 
                        let indent4 = indent3 + 1;
 

	
 
                        self.kv(indent3).with_s_key("Elements");
 
                        for expr_id in data {
 
                            self.write_expr(heap, *expr_id, indent4);
 
                        }
 
                    }
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Call(expr) => {
 
                self.kv(indent).with_id(PREFIX_CALL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("CallExpr");
 

	
 
                // Method
 
                let method = self.kv(indent2).with_s_key("Method");
 
                match &expr.method {
 
                    Method::Get => { method.with_s_val("get"); },
 
                    Method::Put => { method.with_s_val("put"); },
 
                    Method::Fires => { method.with_s_val("fires"); },
 
                    Method::Create => { method.with_s_val("create"); },
 
                    Method::Symbolic(symbolic) => {
 
                        method.with_s_val("symbolic");
 
                        self.kv(indent3).with_s_key("Name").with_ascii_val(&symbolic.identifier.value);
 
                        self.kv(indent3).with_s_key("Definition")
 
                            .with_opt_disp_val(symbolic.definition.as_ref().map(|v| &v.index));
 
                    }
 
                let definition = &heap[expr.definition];
 
                match definition {
 
                    Definition::Component(definition) => {
 
                        self.kv(indent2).with_s_key("BuiltIn").with_disp_val(&false);
 
                        self.kv(indent2).with_s_key("Variant").with_debug_val(&definition.variant);
 
                    },
 
                    Definition::Function(definition) => {
 
                        self.kv(indent2).with_s_key("BuiltIn").with_disp_val(&definition.builtin);
 
                        self.kv(indent2).with_s_key("Variant").with_s_val("Function");
 
                    },
 
                    _ => unreachable!()
 
                }
 

	
 
                self.write_poly_args(heap, &expr.poly_args, indent2);
 
                self.kv(indent2).with_s_key("MethodName").with_identifier_val(definition.identifier());
 
                self.kv(indent2).with_s_key("ParserType")
 
                    .with_custom_val(|t| write_parser_type(t, heap, &expr.parser_type));
 

	
 
                // Arguments
 
                self.kv(indent2).with_s_key("Arguments");
 
                for arg_id in &expr.arguments {
 
                    self.write_expr(heap, *arg_id, indent3);
 
                }
 

	
 
                // Parent
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Variable(expr) => {
 
                self.kv(indent).with_id(PREFIX_VARIABLE_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("VariableExpr");
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&expr.identifier.value);
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&expr.identifier);
 
                self.kv(indent2).with_s_key("Definition")
 
                    .with_opt_disp_val(expr.declaration.as_ref().map(|v| &v.index));
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            }
 
        }
 
    }
 

	
 
    fn write_local(&mut self, heap: &Heap, local_id: LocalId, indent: usize) {
 
        let local = &heap[local_id];
 
        let indent2 = indent + 1;
 

	
 
        self.kv(indent).with_id(PREFIX_LOCAL_ID, local_id.0.index)
 
            .with_s_key("Local");
 

	
 
        self.kv(indent2).with_s_key("Name").with_ascii_val(&local.identifier.value);
 
        self.kv(indent2).with_s_key("Name").with_identifier_val(&local.identifier);
 
        self.kv(indent2).with_s_key("ParserType")
 
            .with_custom_val(|w| write_parser_type(w, heap, &heap[local.parser_type]));
 
            .with_custom_val(|w| write_parser_type(w, heap, &local.parser_type));
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Printing Utilities
 
    //--------------------------------------------------------------------------
 

	
 
    fn kv(&mut self, indent: usize) -> KV {
 
        KV::new(&mut self.buffer, &mut self.temp1, &mut self.temp2, indent)
 
    }
 

	
 
    fn flush<W: IOWrite>(&mut self, w: &mut W) {
 
        w.write(self.buffer.as_bytes()).unwrap();
 
        self.buffer.clear()
 
    }
 
}
 

	
 
fn write_option<V: Display>(target: &mut String, value: Option<V>) {
 
    target.clear();
 
    match &value {
 
        Some(v) => target.push_str(&format!("Some({})", v)),
 
        None => target.push_str("None")
 
    };
 
}
 

	
 
fn write_parser_type(target: &mut String, heap: &Heap, t: &ParserType) {
 
    use ParserTypeVariant as PTV;
 

	
 
    let mut embedded = Vec::new();
 
    match &t.variant {
 
        PTV::Input(id) => { target.push_str("in"); embedded.push(*id); }
 
        PTV::Output(id) => { target.push_str("out"); embedded.push(*id) }
 
        PTV::Array(id) => { target.push_str("array"); embedded.push(*id) }
 
        PTV::Message => { target.push_str("msg"); }
 
        PTV::Bool => { target.push_str("bool"); }
 
        PTV::Byte => { target.push_str("byte"); }
 
        PTV::Short => { target.push_str("short"); }
 
        PTV::Int => { target.push_str("int"); }
 
        PTV::Long => { target.push_str("long"); }
 
        PTV::String => { target.push_str("str"); }
 
        PTV::IntegerLiteral => { target.push_str("int_lit"); }
 
        PTV::Inferred => { target.push_str("auto"); }
 
        PTV::Symbolic(symbolic) => {
 
            target.push_str(&String::from_utf8_lossy(&symbolic.identifier.value));
 
            match symbolic.variant {
 
                Some(SymbolicParserTypeVariant::PolyArg(def_id, idx)) => {
 
                    target.push_str(&format!("{{def: {}, idx: {}}}", def_id.index, idx));
 
                },
 
                Some(SymbolicParserTypeVariant::Definition(def_id)) => {
 
                    target.push_str(&format!("{{def: {}}}", def_id.index));
 
    fn push_bytes(target: &mut String, msg: &[u8]) {
 
        target.push_str(&String::from_utf8_lossy(msg));
 
    }
 

	
 
    fn write_element(target: &mut String, heap: &Heap, t: &ParserType, mut element_idx: usize) -> usize {
 
        let element = &t.elements[element_idx];
 
        match &element.variant {
 
            PTV::Message => { push_bytes(target, KW_TYPE_MESSAGE); },
 
            PTV::Bool => { push_bytes(target, KW_TYPE_BOOL); },
 
            PTV::UInt8 => { push_bytes(target, KW_TYPE_UINT8); },
 
            PTV::UInt16 => { push_bytes(target, KW_TYPE_UINT16); },
 
            PTV::UInt32 => { push_bytes(target, KW_TYPE_UINT32); },
 
            PTV::UInt64 => { push_bytes(target, KW_TYPE_UINT64); },
 
            PTV::SInt8 => { push_bytes(target, KW_TYPE_SINT8); },
 
            PTV::SInt16 => { push_bytes(target, KW_TYPE_SINT16); },
 
            PTV::SInt32 => { push_bytes(target, KW_TYPE_SINT32); },
 
            PTV::SInt64 => { push_bytes(target, KW_TYPE_SINT64); },
 
            PTV::Character => { push_bytes(target, KW_TYPE_CHAR); },
 
            PTV::String => { push_bytes(target, KW_TYPE_STRING); },
 
            PTV::IntegerLiteral => { target.push_str("int_literal"); },
 
            PTV::Inferred => { push_bytes(target, KW_TYPE_INFERRED); },
 
            PTV::Array => {
 
                element_idx = write_element(target, heap, t, element_idx + 1);
 
                target.push_str("[]");
 
            },
 
                None => {
 
                    target.push_str("{None}");
 
            PTV::Input => {
 
                push_bytes(target, KW_TYPE_IN_PORT);
 
                target.push('<');
 
                element_idx = write_element(target, heap, t, element_idx + 1);
 
                target.push('>');
 
            },
 
            PTV::Output => {
 
                push_bytes(target, KW_TYPE_OUT_PORT);
 
                target.push('<');
 
                element_idx = write_element(target, heap, t, element_idx + 1);
 
                target.push('>');
 
            },
 
            PTV::PolymorphicArgument(definition_id, arg_idx) => {
 
                let definition = &heap[*definition_id];
 
                let poly_var = &definition.poly_vars()[*arg_idx].value;
 
                target.write_str(poly_var.as_str());
 
            },
 
            PTV::Definition(definition_id, num_embedded) => {
 
                let definition = &heap[*definition_id];
 
                let definition_ident = definition.identifier().value.as_str();
 
                target.write_str(definition_ident);
 

	
 
                let num_embedded = *num_embedded;
 
                if num_embedded != 0 {
 
                    target.push('<');
 
                    for embedded_idx in 0..num_embedded {
 
                        if embedded_idx != 0 {
 
                            target.push(',');
 
                        }
 
                        element_idx = write_element(target, heap, t, element_idx + 1);
 
                    }
 
                    target.push('>');
 
                }
 
            embedded.extend(&symbolic.poly_args2);
 
            }
 
    };
 

	
 
    if !embedded.is_empty() {
 
        target.push_str("<");
 
        for (idx, embedded_id) in embedded.into_iter().enumerate() {
 
            if idx != 0 { target.push_str(", "); }
 
            write_parser_type(target, heap, &heap[embedded_id]);
 
        }
 
        target.push_str(">");
 

	
 
        element_idx
 
    }
 

	
 
    write_element(target, heap, t, 0);
 
}
 

	
 
fn write_concrete_type(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType) {
 
    use ConcreteTypePart as CTP;
 

	
 
    fn write_concrete_part(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType, mut idx: usize) -> usize {
 
        if idx >= t.parts.len() {
 
            return idx;
 
        }
 

	
 
        match &t.parts[idx] {
 
            CTP::Marker(marker) => {
 
                // Marker points to polymorphic variable index
 
                let definition = &heap[def_id];
 
                let poly_var_ident = match definition {
 
                    Definition::Struct(_) | Definition::Enum(_) | Definition::Union(_) => unreachable!(),
 
                    Definition::Function(definition) => &definition.poly_vars[*marker].value,
 
                    Definition::Component(definition) => &definition.poly_vars[*marker].value,
 
                };
 
                target.push_str(&String::from_utf8_lossy(&poly_var_ident));
 
                let poly_var_ident = &definition.poly_vars()[*marker];
 
                target.push_str(poly_var_ident.value.as_str());
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
            },
 
            CTP::Void => target.push_str("void"),
 
            CTP::Message => target.push_str("msg"),
 
            CTP::Bool => target.push_str("bool"),
 
            CTP::Byte => target.push_str("byte"),
 
            CTP::Short => target.push_str("short"),
 
            CTP::Int => target.push_str("int"),
 
            CTP::Long => target.push_str("long"),
 
            CTP::String => target.push_str("string"),
 
            CTP::Array => {
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push_str("[]");
 
            },
 
            CTP::Slice => {
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push_str("[..]");
 
            }
 
            CTP::Input => {
 
                target.push_str("in<");
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push('>');
 
            },
 
            CTP::Output => {
 
                target.push_str("out<");
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push('>')
 
            },
 
            CTP::Instance(definition_id, num_embedded) => {
 
                let identifier = heap[*definition_id].identifier();
 
                target.push_str(&String::from_utf8_lossy(&identifier.value));
 
                target.push_str(identifier.value.as_str());
 
                target.push('<');
 
                for idx_embedded in 0..*num_embedded {
 
                    if idx_embedded != 0 {
 
                        target.push_str(", ");
 
                    }
 
                    idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                }
 
                target.push('>');
 
            }
 
        }
 

	
 
        idx + 1
 
    }
 

	
 
    write_concrete_part(target, heap, def_id, t, 0);
 
}
 

	
 
fn write_expression_parent(target: &mut String, parent: &ExpressionParent) {
 
    use ExpressionParent as EP;
 

	
 
    *target = match parent {
 
        EP::None => String::from("None"),
 
        EP::If(id) => format!("IfStmt({})", id.0.index),
 
        EP::While(id) => format!("WhileStmt({})", id.0.index),
 
        EP::Return(id) => format!("ReturnStmt({})", id.0.index),
 
        EP::Assert(id) => format!("AssertStmt({})", id.0.index),
 
        EP::New(id) => format!("NewStmt({})", id.0.index),
 
        EP::ExpressionStmt(id) => format!("ExprStmt({})", id.0.index),
 
        EP::Expression(id, idx) => format!("Expr({}, {})", id.index, idx)
 
    };
 
}
 
\ No newline at end of file
src/protocol/eval.rs
Show inline comments
 
use std::collections::HashMap;
 
use std::fmt;
 
use std::fmt::{Debug, Display, Formatter};
 
use std::{i16, i32, i64, i8};
 

	
 
use crate::common::*;
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::EvalContext;
 

	
 
// const MAX_RECURSION: usize = 1024;
 

	
 
const BYTE_MIN: i64 = i8::MIN as i64;
 
const BYTE_MAX: i64 = i8::MAX as i64;
 
const SHORT_MIN: i64 = i16::MIN as i64;
 
const SHORT_MAX: i64 = i16::MAX as i64;
 
const INT_MIN: i64 = i32::MIN as i64;
 
const INT_MAX: i64 = i32::MAX as i64;
 

	
 
const MESSAGE_MAX_LENGTH: i64 = SHORT_MAX;
 

	
 
const ONE: Value = Value::Byte(ByteValue(1));
 

	
 
// TODO: All goes one day anyway, so dirty typechecking hack
 
trait ValueImpl {
 
    fn exact_type(&self) -> Type;
 
    fn is_type_compatible(&self, h: &Heap, t: &ParserType) -> bool {
 
        Self::is_type_compatible_hack(h, t)
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool;
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Value {
 
    Unassigned,
 
    Input(InputValue),
 
    Output(OutputValue),
 
    Message(MessageValue),
 
    Boolean(BooleanValue),
 
    Byte(ByteValue),
 
    Short(ShortValue),
 
    Int(IntValue),
 
    Long(LongValue),
 
    InputArray(InputArrayValue),
 
    OutputArray(OutputArrayValue),
 
    MessageArray(MessageArrayValue),
 
    BooleanArray(BooleanArrayValue),
 
    ByteArray(ByteArrayValue),
 
    ShortArray(ShortArrayValue),
 
    IntArray(IntArrayValue),
 
    LongArray(LongArrayValue),
 
}
 
impl Value {
 
    pub fn receive_message(buffer: &Payload) -> Value {
 
        Value::Message(MessageValue(Some(buffer.clone())))
 
    }
 
    fn create_message(length: Value) -> Value {
 
        match length {
 
            Value::Byte(_) | Value::Short(_) | Value::Int(_) | Value::Long(_) => {
 
                let length: i64 = i64::from(length);
 
                if length < 0 || length > MESSAGE_MAX_LENGTH {
 
                    // Only messages within the expected length are allowed
 
                    Value::Message(MessageValue(None))
 
                } else {
 
                    Value::Message(MessageValue(Some(Payload::new(length as usize))))
 
                }
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn from_constant(constant: &Literal) -> Value {
 
        match constant {
 
            Literal::Null => Value::Message(MessageValue(None)),
 
            Literal::True => Value::Boolean(BooleanValue(true)),
 
            Literal::False => Value::Boolean(BooleanValue(false)),
 
            Literal::Integer(val) => {
 
                // Convert raw ASCII data to UTF-8 string
 
                let val = *val;
 
                if val >= BYTE_MIN && val <= BYTE_MAX {
 
                    Value::Byte(ByteValue(val as i8))
 
                } else if val >= SHORT_MIN && val <= SHORT_MAX {
 
                    Value::Short(ShortValue(val as i16))
 
                } else if val >= INT_MIN && val <= INT_MAX {
 
                    Value::Int(IntValue(val as i32))
 
                let mut integer_value = val.unsigned_value as i64; // TODO: @Int
 
                if val.negated { integer_value = -integer_value; };
 

	
 
                if integer_value >= BYTE_MIN && integer_value <= BYTE_MAX {
 
                    Value::Byte(ByteValue(integer_value as i8))
 
                } else if integer_value >= SHORT_MIN && integer_value <= SHORT_MAX {
 
                    Value::Short(ShortValue(integer_value as i16))
 
                } else if integer_value >= INT_MIN && integer_value <= INT_MAX {
 
                    Value::Int(IntValue(integer_value as i32))
 
                } else {
 
                    Value::Long(LongValue(val))
 
                    Value::Long(LongValue(integer_value))
 
                }
 
            }
 
            Literal::Character(_data) => unimplemented!(),
 
            Literal::String(_data) => unimplemented!(),
 
            Literal::Struct(_data) => unimplemented!(),
 
            Literal::Enum(_data) => unimplemented!(),
 
            Literal::Union(_data) => unimplemented!(),
 
            Literal::Array(expressions) => unimplemented!(),
 
        }
 
    }
 
    fn set(&mut self, index: &Value, value: &Value) -> Option<Value> {
 
        // The index must be of integer type, and non-negative
 
        let the_index: usize;
 
        match index {
 
            Value::Byte(_) | Value::Short(_) | Value::Int(_) | Value::Long(_) => {
 
                let index = i64::from(index);
 
                if index < 0 || index >= MESSAGE_MAX_LENGTH {
 
                    // It is inconsistent to update out of bounds
 
                    return None;
 
                }
 
                the_index = index.try_into().unwrap();
 
            }
 
            _ => unreachable!(),
 
        }
 
        // The subject must be either a message or an array
 
        // And the value and the subject must be compatible
 
        match (self, value) {
 
            (Value::Message(MessageValue(None)), _) => {
 
                // It is inconsistent to update the null message
 
                None
 
            }
 
            (Value::Message(MessageValue(Some(payload))), Value::Byte(ByteValue(b))) => {
 
                if *b < 0 {
 
                    // It is inconsistent to update with a negative value
 
                    return None;
 
                }
 
                if let Some(slot) = payload.as_mut_vec().get_mut(the_index) {
 
                    *slot = (*b).try_into().unwrap();
 
                    Some(value.clone())
 
                } else {
 
                    // It is inconsistent to update out of bounds
 
                    None
 
                }
 
            }
 
            (Value::Message(MessageValue(Some(payload))), Value::Short(ShortValue(b))) => {
 
                if *b < 0 || *b > BYTE_MAX as i16 {
 
                    // It is inconsistent to update with a negative value or a too large value
 
                    return None;
 
                }
 
                if let Some(slot) = payload.as_mut_vec().get_mut(the_index) {
 
                    *slot = (*b).try_into().unwrap();
 
                    Some(value.clone())
 
                } else {
 
                    // It is inconsistent to update out of bounds
 
                    None
 
                }
 
            }
 
            (Value::InputArray(_), Value::Input(_)) => todo!(),
 
            (Value::OutputArray(_), Value::Output(_)) => todo!(),
 
            (Value::MessageArray(_), Value::Message(_)) => todo!(),
 
            (Value::BooleanArray(_), Value::Boolean(_)) => todo!(),
 
            (Value::ByteArray(_), Value::Byte(_)) => todo!(),
 
            (Value::ShortArray(_), Value::Short(_)) => todo!(),
 
            (Value::IntArray(_), Value::Int(_)) => todo!(),
 
            (Value::LongArray(_), Value::Long(_)) => todo!(),
 
            _ => unreachable!(),
 
        }
 
    }
 
    fn get(&self, index: &Value) -> Option<Value> {
 
        // The index must be of integer type, and non-negative
 
        let the_index: usize;
 
        match index {
 
            Value::Byte(_) | Value::Short(_) | Value::Int(_) | Value::Long(_) => {
 
                let index = i64::from(index);
 
                if index < 0 || index >= MESSAGE_MAX_LENGTH {
 
                    // It is inconsistent to update out of bounds
 
                    return None;
 
                }
 
                the_index = index.try_into().unwrap();
 
            }
 
            _ => unreachable!(),
 
        }
 
        // The subject must be either a message or an array
 
        match self {
 
            Value::Message(MessageValue(None)) => {
 
                // It is inconsistent to read from the null message
 
                None
 
            }
 
            Value::Message(MessageValue(Some(payload))) => {
 
                if let Some(slot) = payload.as_slice().get(the_index) {
 
                    Some(Value::Short(ShortValue((*slot).try_into().unwrap())))
 
                } else {
 
                    // It is inconsistent to update out of bounds
 
                    None
 
                }
 
            }
 
            _ => panic!("Can only get from port value"),
 
        }
 
    }
 
    fn length(&self) -> Option<Value> {
 
        // The subject must be either a message or an array
 
        match self {
 
            Value::Message(MessageValue(None)) => {
 
                // It is inconsistent to get length from the null message
 
                None
 
            }
 
            Value::Message(MessageValue(Some(buffer))) => {
 
                Some(Value::Int(IntValue((buffer.len()).try_into().unwrap())))
 
            }
 
            Value::InputArray(InputArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::OutputArray(OutputArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::MessageArray(MessageArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::BooleanArray(BooleanArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::ByteArray(ByteArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::ShortArray(ShortArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::IntArray(IntArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::LongArray(LongArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            _ => unreachable!(),
 
        }
 
    }
 
    fn plus(&self, other: &Value) -> Value {
 
        match (self, other) {
 
            (Value::Byte(ByteValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Byte(ByteValue(*s + *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s as i16 + *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 + *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 + *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Short(ShortValue(*s + *o as i16))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s + *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 + *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 + *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Int(IntValue(*s + *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Int(IntValue(*s + *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Int(IntValue(o))) => Value::Int(IntValue(*s + *o)),
 
            (Value::Int(IntValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 + *o))
 
            }
 
            (Value::Long(LongValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Long(LongValue(*s + *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Long(LongValue(*s + *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Long(LongValue(*s + *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s + *o))
 
            }
 

	
 
            (Value::Message(MessageValue(s)), Value::Message(MessageValue(o))) => {
 
                let payload = if let [Some(s), Some(o)] = [s, o] {
 
                    let mut payload = s.clone();
 
                    payload.concatenate_with(o);
 
                    Some(payload)
 
                } else {
 
                    None
 
                };
 
                Value::Message(MessageValue(payload))
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn minus(&self, other: &Value) -> Value {
 
        match (self, other) {
 
            (Value::Byte(ByteValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Byte(ByteValue(*s - *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s as i16 - *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 - *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 - *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Short(ShortValue(*s - *o as i16))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s - *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 - *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 - *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Int(IntValue(*s - *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Int(IntValue(*s - *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Int(IntValue(o))) => Value::Int(IntValue(*s - *o)),
 
            (Value::Int(IntValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 - *o))
 
            }
 
            (Value::Long(LongValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Long(LongValue(*s - *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Long(LongValue(*s - *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Long(LongValue(*s - *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s - *o))
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn modulus(&self, other: &Value) -> Value {
 
        match (self, other) {
 
            (Value::Byte(ByteValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Byte(ByteValue(*s % *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s as i16 % *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 % *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 % *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Short(ShortValue(*s % *o as i16))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s % *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 % *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 % *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Int(IntValue(*s % *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Int(IntValue(*s % *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Int(IntValue(o))) => Value::Int(IntValue(*s % *o)),
 
            (Value::Int(IntValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 % *o))
 
            }
 
            (Value::Long(LongValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Long(LongValue(*s % *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Long(LongValue(*s % *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Long(LongValue(*s % *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s % *o))
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn eq(&self, other: &Value) -> Value {
 
        match (self, other) {
 
            (Value::Byte(ByteValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i16 == *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i32 == *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i64 == *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o as i16))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i32 == *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i64 == *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i64 == *o))
 
            }
 
            (Value::Long(LongValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o))
 
            }
 
            (Value::Message(MessageValue(s)), Value::Message(MessageValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o))
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn neq(&self, other: &Value) -> Value {
 
        match (self, other) {
 
            (Value::Byte(ByteValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s != *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i16 != *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i32 != *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i64 != *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s != *o as i16))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s != *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i32 != *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i64 != *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s != *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s != *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s != *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Long(LongValue(o))) => {
 
@@ -532,1271 +536,1274 @@ impl Value {
 
            (PrimitiveType::Int, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i32::from(self) < i32::from(other)))
 
            }
 
            (PrimitiveType::Int, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) < i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Byte) => {
 
                Value::Boolean(BooleanValue(i64::from(self) < i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Short) => {
 
                Value::Boolean(BooleanValue(i64::from(self) < i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i64::from(self) < i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) < i64::from(other)))
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn lte(&self, other: &Value) -> Value {
 
        assert!(!self.exact_type().array);
 
        assert!(!other.exact_type().array);
 
        match (self.exact_type().primitive, other.exact_type().primitive) {
 
            (PrimitiveType::Byte, PrimitiveType::Byte) => {
 
                Value::Boolean(BooleanValue(i8::from(self) <= i8::from(other)))
 
            }
 
            (PrimitiveType::Byte, PrimitiveType::Short) => {
 
                Value::Boolean(BooleanValue(i16::from(self) <= i16::from(other)))
 
            }
 
            (PrimitiveType::Byte, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i32::from(self) <= i32::from(other)))
 
            }
 
            (PrimitiveType::Byte, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) <= i64::from(other)))
 
            }
 
            (PrimitiveType::Short, PrimitiveType::Byte) => {
 
                Value::Boolean(BooleanValue(i16::from(self) <= i16::from(other)))
 
            }
 
            (PrimitiveType::Short, PrimitiveType::Short) => {
 
                Value::Boolean(BooleanValue(i16::from(self) <= i16::from(other)))
 
            }
 
            (PrimitiveType::Short, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i32::from(self) <= i32::from(other)))
 
            }
 
            (PrimitiveType::Short, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) <= i64::from(other)))
 
            }
 
            (PrimitiveType::Int, PrimitiveType::Byte) => {
 
                Value::Boolean(BooleanValue(i32::from(self) <= i32::from(other)))
 
            }
 
            (PrimitiveType::Int, PrimitiveType::Short) => {
 
                Value::Boolean(BooleanValue(i32::from(self) <= i32::from(other)))
 
            }
 
            (PrimitiveType::Int, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i32::from(self) <= i32::from(other)))
 
            }
 
            (PrimitiveType::Int, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) <= i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Byte) => {
 
                Value::Boolean(BooleanValue(i64::from(self) <= i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Short) => {
 
                Value::Boolean(BooleanValue(i64::from(self) <= i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i64::from(self) <= i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) <= i64::from(other)))
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn gt(&self, other: &Value) -> Value {
 
        assert!(!self.exact_type().array);
 
        assert!(!other.exact_type().array);
 
        match (self.exact_type().primitive, other.exact_type().primitive) {
 
            (PrimitiveType::Byte, PrimitiveType::Byte) => {
 
                Value::Boolean(BooleanValue(i8::from(self) > i8::from(other)))
 
            }
 
            (PrimitiveType::Byte, PrimitiveType::Short) => {
 
                Value::Boolean(BooleanValue(i16::from(self) > i16::from(other)))
 
            }
 
            (PrimitiveType::Byte, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i32::from(self) > i32::from(other)))
 
            }
 
            (PrimitiveType::Byte, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) > i64::from(other)))
 
            }
 
            (PrimitiveType::Short, PrimitiveType::Byte) => {
 
                Value::Boolean(BooleanValue(i16::from(self) > i16::from(other)))
 
            }
 
            (PrimitiveType::Short, PrimitiveType::Short) => {
 
                Value::Boolean(BooleanValue(i16::from(self) > i16::from(other)))
 
            }
 
            (PrimitiveType::Short, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i32::from(self) > i32::from(other)))
 
            }
 
            (PrimitiveType::Short, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) > i64::from(other)))
 
            }
 
            (PrimitiveType::Int, PrimitiveType::Byte) => {
 
                Value::Boolean(BooleanValue(i32::from(self) > i32::from(other)))
 
            }
 
            (PrimitiveType::Int, PrimitiveType::Short) => {
 
                Value::Boolean(BooleanValue(i32::from(self) > i32::from(other)))
 
            }
 
            (PrimitiveType::Int, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i32::from(self) > i32::from(other)))
 
            }
 
            (PrimitiveType::Int, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) > i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Byte) => {
 
                Value::Boolean(BooleanValue(i64::from(self) > i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Short) => {
 
                Value::Boolean(BooleanValue(i64::from(self) > i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i64::from(self) > i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) > i64::from(other)))
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn gte(&self, other: &Value) -> Value {
 
        assert!(!self.exact_type().array);
 
        assert!(!other.exact_type().array);
 
        match (self.exact_type().primitive, other.exact_type().primitive) {
 
            (PrimitiveType::Byte, PrimitiveType::Byte) => {
 
                Value::Boolean(BooleanValue(i8::from(self) >= i8::from(other)))
 
            }
 
            (PrimitiveType::Byte, PrimitiveType::Short) => {
 
                Value::Boolean(BooleanValue(i16::from(self) >= i16::from(other)))
 
            }
 
            (PrimitiveType::Byte, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i32::from(self) >= i32::from(other)))
 
            }
 
            (PrimitiveType::Byte, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) >= i64::from(other)))
 
            }
 
            (PrimitiveType::Short, PrimitiveType::Byte) => {
 
                Value::Boolean(BooleanValue(i16::from(self) >= i16::from(other)))
 
            }
 
            (PrimitiveType::Short, PrimitiveType::Short) => {
 
                Value::Boolean(BooleanValue(i16::from(self) >= i16::from(other)))
 
            }
 
            (PrimitiveType::Short, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i32::from(self) >= i32::from(other)))
 
            }
 
            (PrimitiveType::Short, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) >= i64::from(other)))
 
            }
 
            (PrimitiveType::Int, PrimitiveType::Byte) => {
 
                Value::Boolean(BooleanValue(i32::from(self) >= i32::from(other)))
 
            }
 
            (PrimitiveType::Int, PrimitiveType::Short) => {
 
                Value::Boolean(BooleanValue(i32::from(self) >= i32::from(other)))
 
            }
 
            (PrimitiveType::Int, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i32::from(self) >= i32::from(other)))
 
            }
 
            (PrimitiveType::Int, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) >= i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Byte) => {
 
                Value::Boolean(BooleanValue(i64::from(self) >= i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Short) => {
 
                Value::Boolean(BooleanValue(i64::from(self) >= i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Int) => {
 
                Value::Boolean(BooleanValue(i64::from(self) >= i64::from(other)))
 
            }
 
            (PrimitiveType::Long, PrimitiveType::Long) => {
 
                Value::Boolean(BooleanValue(i64::from(self) >= i64::from(other)))
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn as_boolean(&self) -> &BooleanValue {
 
        match self {
 
            Value::Boolean(result) => result,
 
            _ => panic!("Unable to cast `Value` to `BooleanValue`"),
 
        }
 
    }
 
}
 

	
 
impl From<bool> for Value {
 
    fn from(b: bool) -> Self {
 
        Value::Boolean(BooleanValue(b))
 
    }
 
}
 
impl From<Value> for bool {
 
    fn from(val: Value) -> Self {
 
        match val {
 
            Value::Boolean(BooleanValue(b)) => b,
 
            _ => unimplemented!(),
 
        }
 
    }
 
}
 
impl From<&Value> for bool {
 
    fn from(val: &Value) -> Self {
 
        match val {
 
            Value::Boolean(BooleanValue(b)) => *b,
 
            _ => unimplemented!(),
 
        }
 
    }
 
}
 

	
 
impl From<Value> for i8 {
 
    fn from(val: Value) -> Self {
 
        match val {
 
            Value::Byte(ByteValue(b)) => b,
 
            _ => unimplemented!(),
 
        }
 
    }
 
}
 
impl From<&Value> for i8 {
 
    fn from(val: &Value) -> Self {
 
        match val {
 
            Value::Byte(ByteValue(b)) => *b,
 
            _ => unimplemented!(),
 
        }
 
    }
 
}
 

	
 
impl From<Value> for i16 {
 
    fn from(val: Value) -> Self {
 
        match val {
 
            Value::Byte(ByteValue(b)) => i16::from(b),
 
            Value::Short(ShortValue(s)) => s,
 
            _ => unimplemented!(),
 
        }
 
    }
 
}
 
impl From<&Value> for i16 {
 
    fn from(val: &Value) -> Self {
 
        match val {
 
            Value::Byte(ByteValue(b)) => i16::from(*b),
 
            Value::Short(ShortValue(s)) => *s,
 
            _ => unimplemented!(),
 
        }
 
    }
 
}
 

	
 
impl From<Value> for i32 {
 
    fn from(val: Value) -> Self {
 
        match val {
 
            Value::Byte(ByteValue(b)) => i32::from(b),
 
            Value::Short(ShortValue(s)) => i32::from(s),
 
            Value::Int(IntValue(i)) => i,
 
            _ => unimplemented!(),
 
        }
 
    }
 
}
 
impl From<&Value> for i32 {
 
    fn from(val: &Value) -> Self {
 
        match val {
 
            Value::Byte(ByteValue(b)) => i32::from(*b),
 
            Value::Short(ShortValue(s)) => i32::from(*s),
 
            Value::Int(IntValue(i)) => *i,
 
            _ => unimplemented!(),
 
        }
 
    }
 
}
 

	
 
impl From<Value> for i64 {
 
    fn from(val: Value) -> Self {
 
        match val {
 
            Value::Byte(ByteValue(b)) => i64::from(b),
 
            Value::Short(ShortValue(s)) => i64::from(s),
 
            Value::Int(IntValue(i)) => i64::from(i),
 
            Value::Long(LongValue(l)) => l,
 
            _ => unimplemented!(),
 
        }
 
    }
 
}
 
impl From<&Value> for i64 {
 
    fn from(val: &Value) -> Self {
 
        match val {
 
            Value::Byte(ByteValue(b)) => i64::from(*b),
 
            Value::Short(ShortValue(s)) => i64::from(*s),
 
            Value::Int(IntValue(i)) => i64::from(*i),
 
            Value::Long(LongValue(l)) => *l,
 
            _ => unimplemented!(),
 
        }
 
    }
 
}
 

	
 
impl ValueImpl for Value {
 
    fn exact_type(&self) -> Type {
 
        match self {
 
            Value::Unassigned => Type::UNASSIGNED,
 
            Value::Input(val) => val.exact_type(),
 
            Value::Output(val) => val.exact_type(),
 
            Value::Message(val) => val.exact_type(),
 
            Value::Boolean(val) => val.exact_type(),
 
            Value::Byte(val) => val.exact_type(),
 
            Value::Short(val) => val.exact_type(),
 
            Value::Int(val) => val.exact_type(),
 
            Value::Long(val) => val.exact_type(),
 
            Value::InputArray(val) => val.exact_type(),
 
            Value::OutputArray(val) => val.exact_type(),
 
            Value::MessageArray(val) => val.exact_type(),
 
            Value::BooleanArray(val) => val.exact_type(),
 
            Value::ByteArray(val) => val.exact_type(),
 
            Value::ShortArray(val) => val.exact_type(),
 
            Value::IntArray(val) => val.exact_type(),
 
            Value::LongArray(val) => val.exact_type(),
 
        }
 
    }
 
    fn is_type_compatible(&self, h: &Heap, t: &ParserType) -> bool {
 
        match self {
 
            Value::Unassigned => true,
 
            Value::Input(_) => InputValue::is_type_compatible_hack(h, t),
 
            Value::Output(_) => OutputValue::is_type_compatible_hack(h, t),
 
            Value::Message(_) => MessageValue::is_type_compatible_hack(h, t),
 
            Value::Boolean(_) => BooleanValue::is_type_compatible_hack(h, t),
 
            Value::Byte(_) => ByteValue::is_type_compatible_hack(h, t),
 
            Value::Short(_) => ShortValue::is_type_compatible_hack(h, t),
 
            Value::Int(_) => IntValue::is_type_compatible_hack(h, t),
 
            Value::Long(_) => LongValue::is_type_compatible_hack(h, t),
 
            Value::InputArray(_) => InputArrayValue::is_type_compatible_hack(h, t),
 
            Value::OutputArray(_) => OutputArrayValue::is_type_compatible_hack(h, t),
 
            Value::MessageArray(_) => MessageArrayValue::is_type_compatible_hack(h, t),
 
            Value::BooleanArray(_) => BooleanArrayValue::is_type_compatible_hack(h, t),
 
            Value::ByteArray(_) => ByteArrayValue::is_type_compatible_hack(h, t),
 
            Value::ShortArray(_) => ShortArrayValue::is_type_compatible_hack(h, t),
 
            Value::IntArray(_) => InputArrayValue::is_type_compatible_hack(h, t),
 
            Value::LongArray(_) => LongArrayValue::is_type_compatible_hack(h, t),
 
        }
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, _t: &ParserType) -> bool { false }
 
}
 

	
 
impl Display for Value {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        let disp: &dyn Display;
 
        match self {
 
            Value::Unassigned => disp = &Type::UNASSIGNED,
 
            Value::Input(val) => disp = val,
 
            Value::Output(val) => disp = val,
 
            Value::Message(val) => disp = val,
 
            Value::Boolean(val) => disp = val,
 
            Value::Byte(val) => disp = val,
 
            Value::Short(val) => disp = val,
 
            Value::Int(val) => disp = val,
 
            Value::Long(val) => disp = val,
 
            Value::InputArray(val) => disp = val,
 
            Value::OutputArray(val) => disp = val,
 
            Value::MessageArray(val) => disp = val,
 
            Value::BooleanArray(val) => disp = val,
 
            Value::ByteArray(val) => disp = val,
 
            Value::ShortArray(val) => disp = val,
 
            Value::IntArray(val) => disp = val,
 
            Value::LongArray(val) => disp = val,
 
        }
 
        disp.fmt(f)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct InputValue(pub PortId);
 

	
 
impl Display for InputValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "#in")
 
    }
 
}
 

	
 
impl ValueImpl for InputValue {
 
    fn exact_type(&self) -> Type {
 
        Type::INPUT
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        use ParserTypeVariant::*;
 
        match &t.variant {
 
            Input(_) | Inferred | Symbolic(_) => true,
 
            Input | Inferred | Definition(_, _) => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct OutputValue(pub PortId);
 

	
 
impl Display for OutputValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "#out")
 
    }
 
}
 

	
 
impl ValueImpl for OutputValue {
 
    fn exact_type(&self) -> Type {
 
        Type::OUTPUT
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        use ParserTypeVariant::*;
 
        match &t.variant {
 
            Output(_) | Inferred | Symbolic(_) => true,
 
        match &t.elements[0].variant {
 
            Output | Inferred | Definition(_, _) => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct MessageValue(pub Option<Payload>);
 

	
 
impl Display for MessageValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        match &self.0 {
 
            None => write!(f, "null"),
 
            Some(payload) => {
 
                // format print up to 10 bytes
 
                let mut slice = payload.as_slice();
 
                if slice.len() > 10 {
 
                    slice = &slice[..10];
 
                }
 
                f.debug_list().entries(slice.iter().copied()).finish()
 
            }
 
        }
 
    }
 
}
 

	
 
impl ValueImpl for MessageValue {
 
    fn exact_type(&self) -> Type {
 
        Type::MESSAGE
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        use ParserTypeVariant::*;
 
        match &t.variant {
 
            Message | Inferred | Symbolic(_) => true,
 
        match &t.elements[0].variant {
 
            Message | Inferred | Definition(_, _) => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BooleanValue(bool);
 

	
 
impl Display for BooleanValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for BooleanValue {
 
    fn exact_type(&self) -> Type {
 
        Type::BOOLEAN
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        use ParserTypeVariant::*;
 
        match t.variant {
 
            Symbolic(_) | Inferred | Bool | Byte | Short | Int | Long => true,
 
        match t.elements[0].variant {
 
            Definition(_, _) | Inferred | Bool |
 
            UInt8 | UInt16 | UInt32 | UInt64 |
 
            SInt8 | SInt16 | SInt32 | SInt64 => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ByteValue(i8);
 

	
 
impl Display for ByteValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for ByteValue {
 
    fn exact_type(&self) -> Type {
 
        Type::BYTE
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        use ParserTypeVariant::*;
 
        match t.variant {
 
            Symbolic(_) | Inferred | Byte | Short | Int | Long => true,
 
        match t.elements[0].variant {
 
            Definition(_, _) | Inferred |
 
            UInt8 | UInt16 | UInt32 | UInt64 |
 
            SInt8 | SInt16 | SInt32 | SInt64 => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ShortValue(i16);
 

	
 
impl Display for ShortValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for ShortValue {
 
    fn exact_type(&self) -> Type {
 
        Type::SHORT
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        use ParserTypeVariant::*;
 
        match t.variant {
 
            Symbolic(_) | Inferred | Short | Int | Long => true,
 
        match t.elements[0].variant {
 
            Definition(_, _) | Inferred |
 
            UInt16 | UInt32 | UInt64 |
 
            SInt16 | SInt32 | SInt64=> true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct IntValue(i32);
 

	
 
impl Display for IntValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for IntValue {
 
    fn exact_type(&self) -> Type {
 
        Type::INT
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        use ParserTypeVariant::*;
 
        match t.variant {
 
            Symbolic(_) | Inferred | Int | Long => true,
 
        match t.elements[0].variant {
 
            Definition(_, _) | Inferred |
 
            UInt32 | UInt64 |
 
            SInt32 | SInt64 => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LongValue(i64);
 

	
 
impl Display for LongValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for LongValue {
 
    fn exact_type(&self) -> Type {
 
        Type::LONG
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        use ParserTypeVariant::*;
 
        match &t.variant {
 
            Long | Inferred | Symbolic(_) => true,
 
        match &t.elements[0].variant {
 
            UInt64 | SInt64 | Inferred | Definition(_, _) => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
fn get_array_inner(t: &ParserType) -> Option<ParserTypeId> {
 
    match t.variant {
 
        ParserTypeVariant::Array(inner) => Some(inner),
 
        _ => None
 
fn get_array_inner(t: &ParserType) -> Option<ParserTypeVariant> {
 
    if t.elements[0].variant == ParserTypeVariant::Array {
 
        return Some(t.elements[1].variant.clone())
 
    } else {
 
        return None;
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct InputArrayValue(Vec<InputValue>);
 

	
 
impl Display for InputArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for InputArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::INPUT_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
        get_array_inner(t)
 
            .map(|v| InputValue::is_type_compatible_hack(h, &h[v]))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct OutputArrayValue(Vec<OutputValue>);
 

	
 
impl Display for OutputArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for OutputArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::OUTPUT_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
        get_array_inner(t)
 
            .map(|v| OutputValue::is_type_compatible_hack(h, &h[v]))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct MessageArrayValue(Vec<MessageValue>);
 

	
 
impl Display for MessageArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for MessageArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::MESSAGE_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
        get_array_inner(t)
 
            .map(|v| MessageValue::is_type_compatible_hack(h, &h[v]))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BooleanArrayValue(Vec<BooleanValue>);
 

	
 
impl Display for BooleanArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for BooleanArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::BOOLEAN_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
        get_array_inner(t)
 
            .map(|v| BooleanValue::is_type_compatible_hack(h, &h[v]))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ByteArrayValue(Vec<ByteValue>);
 

	
 
impl Display for ByteArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for ByteArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::BYTE_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
        get_array_inner(t)
 
            .map(|v| ByteValue::is_type_compatible_hack(h, &h[v]))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ShortArrayValue(Vec<ShortValue>);
 

	
 
impl Display for ShortArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for ShortArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::SHORT_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
        get_array_inner(t)
 
            .map(|v| ShortValue::is_type_compatible_hack(h, &h[v]))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct IntArrayValue(Vec<IntValue>);
 

	
 
impl Display for IntArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for IntArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::INT_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
        get_array_inner(t)
 
            .map(|v| IntValue::is_type_compatible_hack(h, &h[v]))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LongArrayValue(Vec<LongValue>);
 

	
 
impl Display for LongArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for LongArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::LONG_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
        get_array_inner(t)
 
            .map(|v| LongValue::is_type_compatible_hack(h, &h[v]))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
struct Store {
 
    map: HashMap<VariableId, Value>,
 
}
 
impl Store {
 
    fn new() -> Self {
 
        Store { map: HashMap::new() }
 
    }
 
    fn initialize(&mut self, h: &Heap, var: VariableId, value: Value) {
 
        // Ensure value is compatible with type of variable
 
        let parser_type = match &h[var] {
 
            Variable::Local(v) => v.parser_type,
 
            Variable::Parameter(v) => v.parser_type,
 
            Variable::Local(v) => &v.parser_type,
 
            Variable::Parameter(v) => &v.parser_type,
 
        };
 
        assert!(value.is_type_compatible(h, &h[parser_type]));
 
        assert!(value.is_type_compatible(h, parser_type));
 
        // Overwrite mapping
 
        self.map.insert(var, value.clone());
 
    }
 
    fn update(
 
        &mut self,
 
        h: &Heap,
 
        ctx: &mut EvalContext,
 
        lexpr: ExpressionId,
 
        value: Value,
 
    ) -> EvalResult {
 
        match &h[lexpr] {
 
            Expression::Variable(var) => {
 
                let var = var.declaration.unwrap();
 
                // Ensure value is compatible with type of variable
 
                let parser_type_id = match &h[var] {
 
                    Variable::Local(v) => v.parser_type,
 
                    Variable::Parameter(v) => v.parser_type
 
                let parser_type = match &h[var] {
 
                    Variable::Local(v) => &v.parser_type,
 
                    Variable::Parameter(v) => &v.parser_type
 
                };
 
                let parser_type = &h[parser_type_id];
 
                assert!(value.is_type_compatible(h, parser_type));
 
                // Overwrite mapping
 
                self.map.insert(var, value.clone());
 
                Ok(value)
 
            }
 
            Expression::Indexing(indexing) => {
 
                // Evaluate index expression, which must be some integral type
 
                let index = self.eval(h, ctx, indexing.index)?;
 
                // Mutable reference to the subject
 
                let subject;
 
                match &h[indexing.subject] {
 
                    Expression::Variable(var) => {
 
                        let var = var.declaration.unwrap();
 
                        subject = self.map.get_mut(&var).unwrap();
 
                    }
 
                    _ => unreachable!(),
 
                }
 
                match subject.set(&index, &value) {
 
                    Some(value) => Ok(value),
 
                    None => Err(EvalContinuation::Inconsistent),
 
                }
 
            }
 
            _ => unimplemented!("{:?}", h[lexpr]),
 
        }
 
    }
 
    fn get(&mut self, h: &Heap, ctx: &mut EvalContext, rexpr: ExpressionId) -> EvalResult {
 
        match &h[rexpr] {
 
            Expression::Variable(var) => {
 
                let var_id = var.declaration.unwrap();
 
                let value = self
 
                    .map
 
                    .get(&var_id)
 
                    .expect(&format!("Uninitialized variable {:?}", String::from_utf8_lossy(&var.identifier.value)));
 
                    .expect(&format!("Uninitialized variable {:?}", var.identifier.value.as_str()));
 
                Ok(value.clone())
 
            }
 
            Expression::Indexing(indexing) => {
 
                // Evaluate index expression, which must be some integral type
 
                let index = self.eval(h, ctx, indexing.index)?;
 
                // Reference to subject
 
                let subject;
 
                match &h[indexing.subject] {
 
                    Expression::Variable(var) => {
 
                        let var = var.declaration.unwrap();
 
                        subject = self.map.get(&var).unwrap();
 
                    }
 
                    q => unreachable!("Reached {:?}", q),
 
                }
 
                match subject.get(&index) {
 
                    Some(value) => Ok(value),
 
                    None => Err(EvalContinuation::Inconsistent),
 
                }
 
            }
 
            Expression::Select(selecting) => {
 
                // Reference to subject
 
                let subject;
 
                match &h[selecting.subject] {
 
                    Expression::Variable(var) => {
 
                        let var = var.declaration.unwrap();
 
                        subject = self.map.get(&var).unwrap();
 
                    }
 
                    q => unreachable!("Reached {:?}", q),
 
                }
 
                match subject.length() {
 
                    Some(value) => Ok(value),
 
                    None => Err(EvalContinuation::Inconsistent),
 
                }
 
            }
 
            _ => unimplemented!("{:?}", h[rexpr]),
 
        }
 
    }
 
    fn eval(&mut self, h: &Heap, ctx: &mut EvalContext, expr: ExpressionId) -> EvalResult {
 
        match &h[expr] {
 
            Expression::Assignment(expr) => {
 
                let value = self.eval(h, ctx, expr.right)?;
 
                match expr.operation {
 
                    AssignmentOperator::Set => {
 
                        self.update(h, ctx, expr.left, value.clone())?;
 
                    }
 
                    AssignmentOperator::Added => {
 
                        let old = self.get(h, ctx, expr.left)?;
 
                        self.update(h, ctx, expr.left, old.plus(&value))?;
 
                    }
 
                    AssignmentOperator::Subtracted => {
 
                        let old = self.get(h, ctx, expr.left)?;
 
                        self.update(h, ctx, expr.left, old.minus(&value))?;
 
                    }
 
                    _ => unimplemented!("{:?}", expr),
 
                }
 
                Ok(value)
 
            }
 
            Expression::Binding(expr) => {
 
                unimplemented!("eval binding expression");
 
            }
 
            Expression::Conditional(expr) => {
 
                let test = self.eval(h, ctx, expr.test)?;
 
                if test.as_boolean().0 {
 
                    self.eval(h, ctx, expr.true_expression)
 
                } else {
 
                    self.eval(h, ctx, expr.false_expression)
 
                }
 
            }
 
            Expression::Binary(expr) => {
 
                let left = self.eval(h, ctx, expr.left)?;
 
                let right;
 
                match expr.operation {
 
                    BinaryOperator::LogicalAnd => {
 
                        if left.as_boolean().0 == false {
 
                            return Ok(left);
 
                        }
 
                        right = self.eval(h, ctx, expr.right)?;
 
                        right.as_boolean(); // panics if not a boolean
 
                        return Ok(right);
 
                    }
 
                    BinaryOperator::LogicalOr => {
 
                        if left.as_boolean().0 == true {
 
                            return Ok(left);
 
                        }
 
                        right = self.eval(h, ctx, expr.right)?;
 
                        right.as_boolean(); // panics if not a boolean
 
                        return Ok(right);
 
                    }
 
                    _ => {}
 
                }
 
                right = self.eval(h, ctx, expr.right)?;
 
                match expr.operation {
 
                    BinaryOperator::Equality => Ok(left.eq(&right)),
 
                    BinaryOperator::Inequality => Ok(left.neq(&right)),
 
                    BinaryOperator::LessThan => Ok(left.lt(&right)),
 
                    BinaryOperator::LessThanEqual => Ok(left.lte(&right)),
 
                    BinaryOperator::GreaterThan => Ok(left.gt(&right)),
 
                    BinaryOperator::GreaterThanEqual => Ok(left.gte(&right)),
 
                    BinaryOperator::Remainder => Ok(left.modulus(&right)),
 
                    BinaryOperator::Add => Ok(left.plus(&right)),
 
                    _ => unimplemented!("{:?}", expr.operation),
 
                }
 
            }
 
            Expression::Unary(expr) => {
 
                let mut value = self.eval(h, ctx, expr.expression)?;
 
                match expr.operation {
 
                    UnaryOperation::PostIncrement => {
 
                        self.update(h, ctx, expr.expression, value.plus(&ONE))?;
 
                    }
 
                    UnaryOperation::PreIncrement => {
 
                        value = value.plus(&ONE);
 
                        self.update(h, ctx, expr.expression, value.clone())?;
 
                    }
 
                    UnaryOperation::PostDecrement => {
 
                        self.update(h, ctx, expr.expression, value.minus(&ONE))?;
 
                    }
 
                    UnaryOperation::PreDecrement => {
 
                        value = value.minus(&ONE);
 
                        self.update(h, ctx, expr.expression, value.clone())?;
 
                    }
 
                    _ => unimplemented!(),
 
                }
 
                Ok(value)
 
            }
 
            Expression::Indexing(expr) => self.get(h, ctx, expr.this.upcast()),
 
            Expression::Slicing(_expr) => unimplemented!(),
 
            Expression::Select(expr) => self.get(h, ctx, expr.this.upcast()),
 
            Expression::Array(expr) => {
 
                let mut elements = Vec::new();
 
                for &elem in expr.elements.iter() {
 
                    elements.push(self.eval(h, ctx, elem)?);
 
                }
 
                todo!()
 
            }
 
            Expression::Literal(expr) => Ok(Value::from_constant(&expr.value)),
 
            Expression::Call(expr) => match &expr.method {
 
                Method::Get => {
 
                    assert_eq!(1, expr.arguments.len());
 
                    let value = self.eval(h, ctx, expr.arguments[0])?;
 
                    match ctx.get(value.clone()) {
 
                        None => Err(EvalContinuation::BlockGet(value)),
 
                        Some(result) => Ok(result),
 
                    }
 
                }
 
                Method::Put => {
 
                    assert_eq!(2, expr.arguments.len());
 
                    let port_value = self.eval(h, ctx, expr.arguments[0])?;
 
                    let msg_value = self.eval(h, ctx, expr.arguments[1])?;
 
                    if ctx.did_put(port_value.clone()) {
 
                        // Return bogus, replacing this at some point anyway
 
                        Ok(Value::Message(MessageValue(None)))
 
                    } else {
 
                        Err(EvalContinuation::Put(port_value, msg_value))
 
                    }
 
                }
 
                Method::Fires => {
 
                    assert_eq!(1, expr.arguments.len());
 
                    let value = self.eval(h, ctx, expr.arguments[0])?;
 
                    match ctx.fires(value.clone()) {
 
                        None => Err(EvalContinuation::BlockFires(value)),
 
                        Some(result) => Ok(result),
 
                    }
 
                }
 
                Method::Create => {
 
                    assert_eq!(1, expr.arguments.len());
 
                    let length = self.eval(h, ctx, expr.arguments[0])?;
 
                    Ok(Value::create_message(length))
 
                }
 
                Method::Symbolic(_symbol) => unimplemented!(),
 
            },
 
            Expression::Variable(expr) => self.get(h, ctx, expr.this.upcast()),
 
        }
 
    }
 
}
 

	
 
type EvalResult = Result<Value, EvalContinuation>;
 
pub enum EvalContinuation {
 
    Stepping,
 
    Inconsistent,
 
    Terminal,
 
    SyncBlockStart,
 
    SyncBlockEnd,
 
    NewComponent(DefinitionId, Vec<Value>),
 
    BlockFires(Value),
 
    BlockGet(Value),
 
    Put(Value, Value),
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub(crate) struct Prompt {
 
    definition: DefinitionId,
 
    store: Store,
 
    position: Option<StatementId>,
 
}
 

	
 
impl Prompt {
 
    pub fn new(h: &Heap, def: DefinitionId, args: &Vec<Value>) -> Self {
 
        let mut prompt =
 
            Prompt { definition: def, store: Store::new(), position: Some((&h[def]).body()) };
 
            Prompt { definition: def, store: Store::new(), position: Some((&h[def]).body().upcast()) };
 
        prompt.set_arguments(h, args);
 
        prompt
 
    }
 
    fn set_arguments(&mut self, h: &Heap, args: &Vec<Value>) {
 
        let def = &h[self.definition];
 
        let params = def.parameters();
 
        assert_eq!(params.len(), args.len());
 
        for (param, value) in params.iter().zip(args.iter()) {
 
            let hparam = &h[*param];
 
            let parser_type = &h[hparam.parser_type];
 
            assert!(value.is_type_compatible(h, parser_type));
 
            assert!(value.is_type_compatible(h, &hparam.parser_type));
 
            self.store.initialize(h, param.upcast(), value.clone());
 
        }
 
    }
 
    pub fn step(&mut self, h: &Heap, ctx: &mut EvalContext) -> EvalResult {
 
        if self.position.is_none() {
 
            return Err(EvalContinuation::Terminal);
 
        }
 

	
 
        let stmt = &h[self.position.unwrap()];
 
        match stmt {
 
            Statement::Block(stmt) => {
 
                // Continue to first statement
 
                self.position = Some(stmt.first());
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Local(stmt) => {
 
                match stmt {
 
                    LocalStatement::Memory(stmt) => {
 
                        // Update store
 
                        self.store.initialize(h, stmt.variable.upcast(), Value::Unassigned);
 
                    }
 
                    LocalStatement::Channel(stmt) => {
 
                        let [from, to] = ctx.new_channel();
 
                        // Store the values in the declared variables
 
                        self.store.initialize(h, stmt.from.upcast(), from);
 
                        self.store.initialize(h, stmt.to.upcast(), to);
 
                    }
 
                }
 
                // Continue to next statement
 
                self.position = stmt.next();
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Skip(stmt) => {
 
                // Continue to next statement
 
                self.position = stmt.next;
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Labeled(stmt) => {
 
                // Continue to next statement
 
                self.position = Some(stmt.body);
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::If(stmt) => {
 
                // Evaluate test
 
                let value = self.store.eval(h, ctx, stmt.test)?;
 
                // Continue with either branch
 
                if value.as_boolean().0 {
 
                    self.position = Some(stmt.true_body);
 
                    self.position = Some(stmt.true_body.upcast());
 
                } else if let Some(false_body) = stmt.false_body {
 
                    self.position = Some(false_body.upcast());
 
                } else {
 
                    self.position = Some(stmt.false_body);
 
                    // No false body
 
                    self.position = Some(stmt.end_if.unwrap().upcast());
 
                }
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::EndIf(stmt) => {
 
                // Continue to next statement
 
                self.position = stmt.next;
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::While(stmt) => {
 
                // Evaluate test
 
                let value = self.store.eval(h, ctx, stmt.test)?;
 
                // Either continue with body, or go to next
 
                if value.as_boolean().0 {
 
                    self.position = Some(stmt.body);
 
                    self.position = Some(stmt.body.upcast());
 
                } else {
 
                    self.position = stmt.end_while.map(|x| x.upcast());
 
                }
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::EndWhile(stmt) => {
 
                // Continue to next statement
 
                self.position = stmt.next;
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Synchronous(stmt) => {
 
                // Continue to next statement, and signal upward
 
                self.position = Some(stmt.body);
 
                self.position = Some(stmt.body.upcast());
 
                Err(EvalContinuation::SyncBlockStart)
 
            }
 
            Statement::EndSynchronous(stmt) => {
 
                // Continue to next statement, and signal upward
 
                self.position = stmt.next;
 
                Err(EvalContinuation::SyncBlockEnd)
 
            }
 
            Statement::Break(stmt) => {
 
                // Continue to end of while
 
                self.position = stmt.target.map(EndWhileStatementId::upcast);
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Continue(stmt) => {
 
                // Continue to beginning of while
 
                self.position = stmt.target.map(WhileStatementId::upcast);
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Assert(stmt) => {
 
                // Evaluate expression
 
                let value = self.store.eval(h, ctx, stmt.expression)?;
 
                if value.as_boolean().0 {
 
                    // Continue to next statement
 
                    self.position = stmt.next;
 
                    Err(EvalContinuation::Stepping)
 
                } else {
 
                    // Assertion failed: inconsistent
 
                    Err(EvalContinuation::Inconsistent)
 
                }
 
            }
 
            Statement::Return(stmt) => {
 
                // Evaluate expression
 
                let value = self.store.eval(h, ctx, stmt.expression)?;
 
                // Done with evaluation
 
                Ok(value)
 
            }
 
            Statement::Goto(stmt) => {
 
                // Continue to target
 
                self.position = stmt.target.map(|x| x.upcast());
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::New(stmt) => {
 
                let expr = &h[stmt.expression];
 
                let mut args = Vec::new();
 
                for &arg in expr.arguments.iter() {
 
                    let value = self.store.eval(h, ctx, arg)?;
 
                    args.push(value);
 
                }
 
                self.position = stmt.next;
 
                match &expr.method {
 
                    Method::Symbolic(symbolic) => {
 
                         Err(EvalContinuation::NewComponent(symbolic.definition.unwrap(), args))
 
                    },
 
                    _ => unreachable!("not a symbolic call expression")
 
                }
 
            }
 
            Statement::Expression(stmt) => {
 
                // Evaluate expression
 
                let _value = self.store.eval(h, ctx, stmt.expression)?;
 
                // Continue to next statement
 
                self.position = stmt.next;
 
                Err(EvalContinuation::Stepping)
 
            }
 
        }
 
    }
 
    // fn compute_function(_h: &Heap, _fun: FunctionId, _args: &Vec<Value>) -> Option<Value> {
 
    // let mut prompt = Self::new(h, fun.upcast(), args);
 
    // let mut context = EvalContext::None;
 
    // loop {
 
    //     let result = prompt.step(h, &mut context);
 
    //     match result {
 
    //         Ok(val) => return Some(val),
 
    //         Err(cont) => match cont {
 
    //             EvalContinuation::Stepping => continue,
 
    //             EvalContinuation::Inconsistent => return None,
 
    //             // Functions never terminate without returning
 
    //             EvalContinuation::Terminal => unreachable!(),
 
    //             // Functions never encounter any blocking behavior
 
    //             EvalContinuation::SyncBlockStart => unreachable!(),
 
    //             EvalContinuation::SyncBlockEnd => unreachable!(),
 
    //             EvalContinuation::NewComponent(_, _) => unreachable!(),
 
    //             EvalContinuation::BlockFires(val) => unreachable!(),
 
    //             EvalContinuation::BlockGet(val) => unreachable!(),
 
    //             EvalContinuation::Put(port, msg) => unreachable!(),
 
    //         },
 
    //     }
 
    // }
 
    // }
 
}
 

	
 
// #[cfg(test)]
 
// mod tests {
 
//     extern crate test_generator;
 

	
 
//     use std::fs::File;
 
//     use std::io::Read;
 
//     use std::path::Path;
 
//     use test_generator::test_resources;
 

	
 
//     use super::*;
 

	
 
//     #[test_resources("testdata/eval/positive/*.pdl")]
 
//     fn batch1(resource: &str) {
 
//         let path = Path::new(resource);
 
//         let expect = path.with_extension("txt");
 
//         let mut heap = Heap::new();
 
//         let mut source = InputSource::from_file(&path).unwrap();
 
//         let mut parser = Parser::new(&mut source);
 
//         let pd = parser.parse(&mut heap).unwrap();
 
//         let def = heap[pd].get_definition_ident(&heap, b"test").unwrap();
 
//         let fun = heap[def].as_function().this;
 
//         let args = Vec::new();
 
//         let result = Prompt::compute_function(&heap, fun, &args).unwrap();
 
//         let valstr: String = format!("{}", result);
 
//         println!("{}", valstr);
 

	
 
//         let mut cev: Vec<u8> = Vec::new();
 
//         let mut f = File::open(expect).unwrap();
 
//         f.read_to_end(&mut cev).unwrap();
 
//         let lavstr = String::from_utf8_lossy(&cev);
 
//         println!("{}", lavstr);
 

	
 
//         assert_eq!(valstr, lavstr);
 
//     }
 
// }
src/protocol/input_source.rs
Show inline comments
 
file renamed from src/protocol/input_source2.rs to src/protocol/input_source.rs
 
use std::fmt;
 
use std::cell::{Ref, RefCell};
 
use std::fmt::Write;
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub struct InputPosition2 {
 
pub struct InputPosition {
 
    pub line: u32,
 
    pub offset: u32,
 
}
 

	
 
impl InputPosition2 {
 
impl InputPosition {
 
    pub(crate) fn with_offset(&self, offset: u32) -> Self {
 
        InputPosition2{ line: self.line, offset: self.offset + offset }
 
        InputPosition { line: self.line, offset: self.offset + offset }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub struct InputSpan {
 
    pub begin: InputPosition2,
 
    pub end: InputPosition2,
 
    pub begin: InputPosition,
 
    pub end: InputPosition,
 
}
 

	
 
impl InputSpan {
 
    #[inline]
 
    pub fn from_positions(begin: InputPosition2, end: InputPosition2) -> Self {
 
    pub fn from_positions(begin: InputPosition, end: InputPosition) -> Self {
 
        Self { begin, end }
 
    }
 
}
 

	
 
/// Wrapper around source file with optional filename. Ensures that the file is
 
/// only scanned once.
 
pub struct InputSource2 {
 
pub struct InputSource {
 
    pub(crate) filename: String,
 
    pub(crate) input: Vec<u8>,
 
    // Iteration
 
    line: u32,
 
    offset: usize,
 
    // State tracking
 
    pub(crate) had_error: Option<ParseError>,
 
    // The offset_lookup is built on-demand upon attempting to report an error.
 
    // As the compiler is currently not multithreaded, we simply put it in a 
 
    // RefCell to allow interior mutability.
 
    offset_lookup: RefCell<Vec<u32>>,
 
}
 

	
 
impl InputSource2 {
 
impl InputSource {
 
    pub fn new(filename: String, input: Vec<u8>) -> Self {
 
        Self{
 
            filename,
 
            input,
 
            line: 1,
 
            offset: 0,
 
            had_error: None,
 
            offset_lookup: RefCell::new(Vec::new()),
 
        }
 
    }
 

	
 
    #[cfg(test)]
 
    pub fn new_test(input: &str) -> Self {
 
        let bytes = Vec::from(input.as_bytes());
 
        return Self::new(String::from("test"), bytes)
 
    }
 

	
 
    #[inline]
 
    pub fn pos(&self) -> InputPosition2 {
 
        InputPosition2{ line: self.line, offset: self.offset as u32 }
 
    pub fn pos(&self) -> InputPosition {
 
        InputPosition { line: self.line, offset: self.offset as u32 }
 
    }
 

	
 
    pub fn next(&self) -> Option<u8> {
 
        if self.offset < self.input.len() {
 
            Some(self.input[self.offset])
 
        } else {
 
            None
 
        }
 
    }
 

	
 
    pub fn lookahead(&self, offset: usize) -> Option<u8> {
 
        let offset_pos = self.offset + offset;
 
        if offset_pos < self.input.len() {
 
            Some(self.input[offset_pos])
 
        } else {
 
            None
 
        }
 
    }
 

	
 
    #[inline]
 
    pub fn section_at_pos(&self, start: InputPosition2, end: InputPosition2) -> &[u8] {
 
    pub fn section_at_pos(&self, start: InputPosition, end: InputPosition) -> &[u8] {
 
        &self.input[start.offset as usize..end.offset as usize]
 
    }
 

	
 
    #[inline]
 
    pub fn section_at_span(&self, span: InputSpan) -> &[u8] {
 
        &self.input[span.begin.offset as usize..span.end.offset as usize]
 
    }
 

	
 
    // Consumes the next character. Will check well-formedness of newlines: \r
 
    // must be followed by a \n, because this is used for error reporting. Will
 
    // not check for ascii-ness of the file, better left to a tokenizer.
 
    pub fn consume(&mut self) {
 
        match self.next() {
 
            Some(b'\r') => {
 
                if Some(b'\n') == self.lookahead(1) {
 
                    // Well formed file
 
                    self.offset += 1;
 
                } else {
 
                    // Not a well-formed file, pretend like we can continue
 
                    self.offset += 1;
 
                    self.set_error("Encountered carriage-feed without a following newline");
 
                }
 
            },
 
            Some(b'\n') => {
 
                self.line += 1;
 
                self.offset += 1;
 
            },
 
            Some(_) => {
 
                self.offset += 1;
 
            }
 
            None => {}
 
        }
 

	
 
        // Maybe we actually want to check this in release mode. Then again:
 
        // a 4 gigabyte source file... Really?
 
        debug_assert!(self.offset < u32::max_value() as usize);
 
    }
 

	
 
    fn set_error(&mut self, msg: &str) {
 
        if self.had_error.is_none() {
 
            self.had_error = Some(ParseError::new_error(self, self.pos(), msg));
 
            self.had_error = Some(ParseError::new_error_str_at_pos(self, self.pos(), msg));
 
        }
 
    }
 

	
 
    fn get_lookup(&self) -> Ref<Vec<u32>> {
 
        // Once constructed the lookup always contains one element. We use this
 
        // to see if it is constructed already.
 
        let lookup = self.offset_lookup.borrow();
 
        if !lookup.is_empty() {
 
            return lookup;
 
        }
 

	
 
        // Build the line number (!) to offset lookup, so offset by 1. We 
 
        // assume the entire source file is scanned (most common case) for
 
        // preallocation.
 
        let mut lookup = self.offset_lookup.borrow_mut();
 
        lookup.reserve(self.line as usize + 2);
 
        lookup.push(0); // line 0: never used
 
        lookup.push(0); // first line: first character
 

	
 
        for char_idx in 0..self.input.len() {
 
            if self.input[char_idx] == b'\n' {
 
                lookup.push(char_idx as u32 + 1);
 
            }
 
        }
 

	
 
        lookup.push(self.input.len() as u32); // for lookup_line_end
 
        debug_assert_eq!(self.line as usize + 2, lookup.len(), "remove me: i am a testing assert and sometimes invalid");
 

	
 
        // Return created lookup
 
        let lookup = self.offset_lookup.borrow();
 
        return lookup;
 
    }
 

	
 
    /// Retrieves offset at which line starts (right after newline)
 
    fn lookup_line_start_offset(&self, line_number: u32) -> u32 {
 
        let lookup = self.get_lookup();
 
        lookup[line_number as usize]
 
    }
 

	
 
    /// Retrieves offset at which line ends (at the newline character or the
 
    /// preceding carriage feed for \r\n-encoded newlines)
 
    fn lookup_line_end_offset(&self, line_number: u32) -> u32 {
 
        let lookup = self.get_lookup();
 
        let offset = lookup[(line_number + 1) as usize] - 1;
 
        let offset_usize = offset as usize;
 

	
 
        // Compensate for newlines and a potential carriage feed
 
        if self.input[offset_usize] == b'\n' {
 
            if offset_usize > 0 && self.input[offset_usize] == b'\r' {
 
                offset - 2
 
            } else {
 
                offset - 1
 
            }
 
        } else {
 
            offset
 
        }
 
    }
 
}
 

	
 
#[derive(Debug)]
 
pub enum StatementKind {
 
    Info,
 
    Error
 
}
 

	
 
#[derive(Debug)]
 
pub enum ContextKind {
 
    SingleLine,
 
    MultiLine,
 
}
 

	
 
#[derive(Debug)]
 
pub struct ParseErrorStatement {
 
    pub(crate) statement_kind: StatementKind,
 
    pub(crate) context_kind: ContextKind,
 
    pub(crate) start_line: u32,
 
    pub(crate) start_column: u32,
 
    pub(crate) end_line: u32,
 
    pub(crate) end_column: u32,
 
    pub(crate) filename: String,
 
    pub(crate) context: String,
 
    pub(crate) message: String,
 
}
 

	
 
impl ParseErrorStatement {
 
    fn from_source_at_pos(statement_kind: StatementKind, source: &InputSource2, position: InputPosition2, message: String) -> Self {
 
    fn from_source_at_pos(statement_kind: StatementKind, source: &InputSource, position: InputPosition, message: String) -> Self {
 
        // Seek line start and end
 
        let line_start = source.lookup_line_start_offset(position.line);
 
        let line_end = source.lookup_line_end_offset(position.line);
 
        let context = Self::create_context(source, line_start as usize, line_end as usize);
 
        debug_assert!(position.offset >= line_start);
 
        let column = position.offset - line_start + 1;
 

	
 
        Self{
 
            statement_kind,
 
            context_kind: ContextKind::SingleLine,
 
            start_line: position.line,
 
            start_column: column,
 
            end_line: position.line,
 
            end_column: column + 1,
 
            filename: source.filename.clone(),
 
            context,
 
            message,
 
        }
 
    }
 

	
 
    fn from_source_at_span(statement_kind: StatementKind, source: &InputSource2, span: InputSpan, message: String) -> Self {
 
    fn from_source_at_span(statement_kind: StatementKind, source: &InputSource, span: InputSpan, message: String) -> Self {
 
        debug_assert!(span.end.line >= span.begin.line);
 
        debug_assert!(span.end.offset >= span.begin.offset);
 

	
 
        let first_line_start = source.lookup_line_start_offset(span.begin.line);
 
        let last_line_start = source.lookup_line_start_offset(span.end.line);
 
        let last_line_end = source.lookup_line_end_offset(span.end.line);
 
        let context = Self::create_context(source, first_line_start as usize, last_line_end as usize);
 
        debug_assert!(span.begin.offset >= first_line_start);
 
        let start_column = span.begin.offset - first_line_start + 1;
 
        let end_column = span.end.offset - last_line_start + 1;
 

	
 
        let context_kind = if span.begin.line == span.end.line {
 
            ContextKind::SingleLine
 
        } else {
 
            ContextKind::MultiLine
 
        };
 

	
 
        Self{
 
            statement_kind,
 
            context_kind,
 
            start_line: first_line_start,
 
            start_column,
 
            end_line: last_line_start,
 
            end_column,
 
            filename: source.filename.clone(),
 
            context,
 
            message,
 
        }
 
    }
 

	
 
    /// Produces context from source
 
    fn create_context(source: &InputSource2, start: usize, end: usize) -> String {
 
    fn create_context(source: &InputSource, start: usize, end: usize) -> String {
 
        let context_raw = &source.input[start..end];
 
        String::from_utf8_lossy(context_raw).to_string()
 
    }
 
}
 

	
 
impl fmt::Display for ParseErrorStatement {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        // Write kind of statement and message
 
        match self.statement_kind {
 
            StatementKind::Info => f.write_str(" INFO: ")?,
 
            StatementKind::Error => f.write_str("ERROR: ")?,
 
        }
 
        f.write_str(&self.message)?;
 
        f.write_char('\n')?;
 

	
 
        // Write originating file/line/column
 
        f.write_str(" +- ")?;
 
        if !self.filename.is_empty() {
 
            write!(f, "in {} ", self.filename)?;
 
        }
 

	
 
        match self.context_kind {
 
            ContextKind::SingleLine => writeln!(f, " at {}:{}", self.start_line, self.start_column),
 
            ContextKind::MultiLine => writeln!(
 
                f, " from {}:{} to {}:{}",
 
                self.start_line, self.start_column, self.end_line, self.end_column
 
            )
 
        }?;
 

	
 
        // Helper function for writing context: converting tabs into 4 spaces
 
        // (oh, the controversy!) and creating an annotated line
 
        fn transform_context(source: &str, target: &mut String) {
 
            for char in source.chars() {
 
                if char == '\t' {
 
                    target.push_str("    ");
 
                } else {
 
                    target.push(char);
 
                }
 
            }
 
        }
 

	
 
        fn extend_annotation(first_col: u32, last_col: u32, source: &str, target: &mut String, extend_char: char) {
 
            debug_assert!(first_col > 0 && last_col > first_col);
 
            for (char_idx, char) in source.chars().enumerate().skip(first_col as usize - 1) {
 
                if char_idx == last_col as usize {
 
                    break;
 
                }
 

	
 
                if char == '\t' {
 
                    for _ in 0..4 { target.push(extend_char); }
 
                } else {
 
                    target.push(extend_char);
 
                }
 
            }
 
        }
 

	
 
        // Write source context
 
        writeln!(f, " | ")?;
 

	
 
        let mut context = String::with_capacity(128);
 
        let mut annotation = String::with_capacity(128);
 

	
 
        match self.context_kind {
 
            ContextKind::SingleLine => {
 
                // Write single line of context with indicator for the offending
 
                // span underneath.
 
                transform_context(&self.context, &mut context);
 
                context.push('\n');
 
                f.write_str(&context)?;
 

	
 
                annotation.push_str(" | ");
 
                extend_annotation(1, self.start_column, &self.source, &mut annotation, ' ');
 
                extend_annotation(self.start_column, self.end_column, &self.source, &mut annotation, '~');
 
                extend_annotation(1, self.start_column, &self.context, &mut annotation, ' ');
 
                extend_annotation(self.start_column, self.end_column, &self.context, &mut annotation, '~');
 
                annotation.push('\n');
 

	
 
                f.write_str(&annotation)?;
 
            },
 
            ContextKind::MultiLine => {
 
                // Annotate all offending lines
 
                // - first line
 
                let mut lines = self.context.lines();
 
                let first_line = lines.next().unwrap();
 
                transform_context(first_line, &mut context);
 
                writeln!(" |- {}", &context)?;
 
                writeln!(f, " |- {}", &context)?;
 

	
 
                // - remaining lines
 
                let mut last_line = first_line;
 
                while let Some(cur_line) = lines.next() {
 
                    context.clear();
 
                    transform_context(cur_line, &mut context);
 
                    writeln!(" |  {}", &context);
 
                    writeln!(f, " |  {}", &context);
 
                    last_line = cur_line;
 
                }
 

	
 
                // - underline beneath last line
 
                annotation.push_str(" \\__");
 
                extend_annotation(1, self.end_column, &last_line, &mut annotation, '_');
 
                annotation.push_str("/\n");
 
                f.write_str(&annotation)?;
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 
}
 

	
 
#[derive(Debug)]
 
pub struct ParseError {
 
    pub(crate) statements: Vec<ParseErrorStatement>
 
}
 

	
 
impl fmt::Display for ParseError {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        if self.statements.is_empty() {
 
            return Ok(())
 
        }
 

	
 
        self.statements[0].fmt(f)?;
 
        for statement in self.statements.iter().skip(1) {
 
            writeln!(f)?;
 
            statement.fmt(f)?;
 
        }
 

	
 
        Ok(())
 
    }
 
}
 

	
 
impl ParseError {
 
    pub fn empty() -> Self {
 
        Self{ statements: Vec::new() }
 
    }
 

	
 
    pub fn new_error_at_pos(source: &InputSource2, position: InputPosition2, message: String) -> Self {
 
    pub fn new_error_at_pos(source: &InputSource, position: InputPosition, message: String) -> Self {
 
        Self{ statements: vec!(ParseErrorStatement::from_source_at_pos(
 
            StatementKind::Error, source, position, message
 
        )) }
 
    }
 

	
 
    pub fn new_error_str_at_pos(source: &InputSource2, position: InputPosition2, message: &str) -> Self {
 
    pub fn new_error_str_at_pos(source: &InputSource, position: InputPosition, message: &str) -> Self {
 
        Self{ statements: vec!(ParseErrorStatement::from_source_at_pos(
 
            StatementKind::Error, source, position, message.to_string()
 
        )) }
 
    }
 

	
 
    pub fn new_error_at_span(source: &InputSource2, span: InputSpan, message: String) -> Self {
 
    pub fn new_error_at_span(source: &InputSource, span: InputSpan, message: String) -> Self {
 
        Self{ statements: vec!(ParseErrorStatement::from_source_at_span(
 
            StatementKind::Error, source, span, message
 
        )) }
 
    }
 

	
 
    pub fn new_error_str_at_span(source: &InputSource2, span: InputSpan, message: &str) -> Self {
 
    pub fn new_error_str_at_span(source: &InputSource, span: InputSpan, message: &str) -> Self {
 
        Self{ statements: vec!(ParseErrorStatement::from_source_at_span(
 
            StatementKind::Error, source, span, message.to_string()
 
        )) }
 
    }
 

	
 
    pub fn with_at_pos(mut self, error_type: StatementKind, source: &InputSource2, position: InputPosition2, message: String) -> Self {
 
    pub fn with_at_pos(mut self, error_type: StatementKind, source: &InputSource, position: InputPosition, message: String) -> Self {
 
        self.statements.push(ParseErrorStatement::from_source_at_pos(error_type, source, position, message));
 
        self
 
    }
 

	
 
    pub fn with_at_span(mut self, error_type: StatementKind, source: &InputSource2, span: InputSpan, message: String) -> Self {
 
    pub fn with_at_span(mut self, error_type: StatementKind, source: &InputSource, span: InputSpan, message: String) -> Self {
 
        self.statements.push(ParseErrorStatement::from_source_at_span(error_type, source, span, message.to_string()));
 
        self
 
    }
 

	
 
    pub fn with_info_at_pos(self, source: &InputSource2, position: InputPosition2, msg: String) -> Self {
 
    pub fn with_info_at_pos(self, source: &InputSource, position: InputPosition, msg: String) -> Self {
 
        self.with_at_pos(StatementKind::Info, source, position, msg)
 
    }
 

	
 
    pub fn with_info_str_at_pos(self, source: &InputSource2, position: InputPosition2, msg: &str) -> Self {
 
    pub fn with_info_str_at_pos(self, source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        self.with_at_pos(StatementKind::Info, source, position, msg.to_string())
 
    }
 

	
 
    pub fn with_info_at_span(self, source: &InputSource2, span: InputSpan, msg: String) -> Self {
 
    pub fn with_info_at_span(self, source: &InputSource, span: InputSpan, msg: String) -> Self {
 
        self.with_at_span(StatementKind::Info, source, span, msg)
 
    }
 

	
 
    pub fn with_info_str_at_span(self, source: &InputSource2, span: InputSpan, msg: &str) -> Self {
 
    pub fn with_info_str_at_span(self, source: &InputSource, span: InputSpan, msg: &str) -> Self {
 
        self.with_at_span(StatementKind::Info, source, span, msg.to_string())
 
    }
 
}
src/protocol/inputsource.rs
Show inline comments
 
deleted file
src/protocol/lexer.rs
Show inline comments
 
deleted file
src/protocol/mod.rs
Show inline comments
 
mod arena;
 
// mod ast;
 
mod eval;
 
pub(crate) mod inputsource;
 
pub(crate) mod input_source2;
 
// mod lexer;
 
pub(crate) mod input_source;
 
mod parser;
 
#[cfg(test)] mod tests;
 

	
 
// TODO: Remove when not benchmarking
 
pub(crate) mod ast;
 
pub(crate) mod ast_printer;
 
pub(crate) mod lexer;
 

	
 
lazy_static::lazy_static! {
 
    /// Conveniently-provided protocol description initialized with a zero-length PDL string.
 
    /// Exposed to minimize repeated initializations of this common protocol description.
 
    pub static ref TRIVIAL_PD: std::sync::Arc<ProtocolDescription> = {
 
        std::sync::Arc::new(ProtocolDescription::parse(b"").unwrap())
 
    };
 
}
 

	
 
use crate::common::*;
 
use crate::protocol::ast::*;
 
use crate::protocol::eval::*;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::input_source::*;
 
use crate::protocol::parser::*;
 

	
 
/// Description of a protocol object, used to configure new connectors.
 
#[repr(C)]
 
pub struct ProtocolDescription {
 
    heap: Heap,
 
    source: InputSource,
 
    root: RootId,
 
}
 
#[derive(Debug, Clone)]
 
pub(crate) struct ComponentState {
 
    prompt: Prompt,
 
}
 
pub(crate) enum EvalContext<'a> {
 
    Nonsync(&'a mut NonsyncProtoContext<'a>),
 
    Sync(&'a mut SyncProtoContext<'a>),
 
    // None,
 
}
 
//////////////////////////////////////////////
 

	
 
impl std::fmt::Debug for ProtocolDescription {
 
    fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
 
        write!(f, "(An opaque protocol description)")
 
    }
 
}
 
impl ProtocolDescription {
 
    // TODO: Allow for multi-file compilation
 
    pub fn parse(buffer: &[u8]) -> Result<Self, String> {
 
        // TODO: @fixme, keep code compilable, but needs support for multiple
 
        //  input files.
 
        let source = InputSource::from_buffer(buffer).unwrap();
 
        let source = InputSource::new(String::new(), Vec::from(buffer));
 
        let mut parser = Parser::new();
 
        parser.feed(source).expect("failed to lex source");
 
        parser.feed(source).expect("failed to feed source");
 
        
 
        if let Err(err) = parser.parse() {
 
            println!("ERROR:\n{}", err);
 
            return Err(format!("{}", err))
 
        }
 

	
 
        debug_assert_eq!(parser.modules.len(), 1, "only supporting one module here for now");
 
        let root = parser.modules[0].root_id;
 
        return Ok(ProtocolDescription { heap: parser.heap, source: parser.modules[0].source.clone(), root });
 
        let module = parser.modules.remove(0);
 
        let root = module.root_id;
 
        let source = module.source;
 
        return Ok(ProtocolDescription { heap: parser.heap, source, root });
 
    }
 
    pub(crate) fn component_polarities(
 
        &self,
 
        identifier: &[u8],
 
    ) -> Result<Vec<Polarity>, AddComponentError> {
 
        use AddComponentError::*;
 
        let h = &self.heap;
 
        let root = &h[self.root];
 
        let def = root.get_definition_ident(h, identifier);
 
        if def.is_none() {
 
            return Err(NoSuchComponent);
 
        }
 
        let def = &h[def.unwrap()];
 
        if !def.is_component() {
 
            return Err(NoSuchComponent);
 
        }
 
        for &param in def.parameters().iter() {
 
            let param = &h[param];
 
            let parser_type = &h[param.parser_type];
 
            let first_element = &param.parser_type.elements[0];
 

	
 
            match parser_type.variant {
 
                ParserTypeVariant::Input(_) | ParserTypeVariant::Output(_) => continue,
 
            match first_element.variant {
 
                ParserTypeVariant::Input | ParserTypeVariant::Output => continue,
 
                _ => {
 
                    return Err(NonPortTypeParameters);
 
                }
 
            }
 
        }
 
        let mut result = Vec::new();
 
        for &param in def.parameters().iter() {
 
            let param = &h[param];
 
            let parser_type = &h[param.parser_type];
 
            let first_element = &param.parser_type.elements[0];
 

	
 
            if let ParserTypeVariant::Input(_) = parser_type.variant {
 
            if first_element.variant == ParserTypeVariant::Input {
 
                result.push(Polarity::Getter)
 
            } else if let ParserTypeVariant::Output(_) = parser_type.variant {
 
            } else if first_element.variant == ParserTypeVariant::Output {
 
                result.push(Polarity::Putter)
 
            } else {
 
                unreachable!()
 
            }
 
        }
 
        Ok(result)
 
    }
 
    // expects port polarities to be correct
 
    pub(crate) fn new_component(&self, identifier: &[u8], ports: &[PortId]) -> ComponentState {
 
        let mut args = Vec::new();
 
        for (&x, y) in ports.iter().zip(self.component_polarities(identifier).unwrap()) {
 
            match y {
 
                Polarity::Getter => args.push(Value::Input(InputValue(x))),
 
                Polarity::Putter => args.push(Value::Output(OutputValue(x))),
 
            }
 
        }
 
        let h = &self.heap;
 
        let root = &h[self.root];
 
        let def = root.get_definition_ident(h, identifier).unwrap();
 
        ComponentState { prompt: Prompt::new(h, def, &args) }
 
    }
 
}
 
impl ComponentState {
 
    pub(crate) fn nonsync_run<'a: 'b, 'b>(
 
        &'a mut self,
 
        context: &'b mut NonsyncProtoContext<'b>,
 
        pd: &'a ProtocolDescription,
 
    ) -> NonsyncBlocker {
 
        let mut context = EvalContext::Nonsync(context);
 
        loop {
 
            let result = self.prompt.step(&pd.heap, &mut context);
 
            match result {
 
                // In component definitions, there are no return statements
 
                Ok(_) => unreachable!(),
 
                Err(cont) => match cont {
 
                    EvalContinuation::Stepping => continue,
 
                    EvalContinuation::Inconsistent => return NonsyncBlocker::Inconsistent,
 
                    EvalContinuation::Terminal => return NonsyncBlocker::ComponentExit,
 
                    EvalContinuation::SyncBlockStart => return NonsyncBlocker::SyncBlockStart,
 
                    // Not possible to end sync block if never entered one
 
                    EvalContinuation::SyncBlockEnd => unreachable!(),
 
                    EvalContinuation::NewComponent(definition_id, args) => {
 
                        // Look up definition (TODO for now, assume it is a definition)
 
                        let h = &pd.heap;
 
                        let init_state = ComponentState { prompt: Prompt::new(h, definition_id, &args) };
 
                        context.new_component(&args, init_state);
 
                        // Continue stepping
 
                        continue;
 
                    }
 
                    // Outside synchronous blocks, no fires/get/put happens
 
                    EvalContinuation::BlockFires(_) => unreachable!(),
 
                    EvalContinuation::BlockGet(_) => unreachable!(),
 
                    EvalContinuation::Put(_, _) => unreachable!(),
 
                },
 
            }
 
        }
 
    }
 

	
 
    pub(crate) fn sync_run<'a: 'b, 'b>(
 
        &'a mut self,
 
        context: &'b mut SyncProtoContext<'b>,
 
        pd: &'a ProtocolDescription,
 
    ) -> SyncBlocker {
 
        let mut context = EvalContext::Sync(context);
 
        loop {
 
            let result = self.prompt.step(&pd.heap, &mut context);
 
            match result {
 
                // Inside synchronous blocks, there are no return statements
 
                Ok(_) => unreachable!(),
 
                Err(cont) => match cont {
 
                    EvalContinuation::Stepping => continue,
 
                    EvalContinuation::Inconsistent => return SyncBlocker::Inconsistent,
 
                    // First need to exit synchronous block before definition may end
 
                    EvalContinuation::Terminal => unreachable!(),
 
                    // No nested synchronous blocks
 
                    EvalContinuation::SyncBlockStart => unreachable!(),
 
                    EvalContinuation::SyncBlockEnd => return SyncBlocker::SyncBlockEnd,
 
                    // Not possible to create component in sync block
 
                    EvalContinuation::NewComponent(_, _) => unreachable!(),
 
                    EvalContinuation::BlockFires(port) => match port {
 
                        Value::Output(OutputValue(port)) => {
 
                            return SyncBlocker::CouldntCheckFiring(port);
 
                        }
 
                        Value::Input(InputValue(port)) => {
 
                            return SyncBlocker::CouldntCheckFiring(port);
 
                        }
 
                        _ => unreachable!(),
 
                    },
 
                    EvalContinuation::BlockGet(port) => match port {
 
                        Value::Output(OutputValue(port)) => {
 
                            return SyncBlocker::CouldntReadMsg(port);
 
                        }
 
                        Value::Input(InputValue(port)) => {
 
                            return SyncBlocker::CouldntReadMsg(port);
 
                        }
 
                        _ => unreachable!(),
 
                    },
 
                    EvalContinuation::Put(port, message) => {
 
                        let value;
 
                        match port {
 
                            Value::Output(OutputValue(port_value)) => {
 
                                value = port_value;
 
                            }
 
                            Value::Input(InputValue(port_value)) => {
 
                                value = port_value;
 
                            }
 
                            _ => unreachable!(),
 
                        }
 
                        let payload;
 
                        match message {
 
                            Value::Message(MessageValue(None)) => {
 
                                // Putting a null message is inconsistent
 
                                return SyncBlocker::Inconsistent;
 
                            }
 
                            Value::Message(MessageValue(Some(buffer))) => {
 
                                // Create a copy of the payload
 
                                payload = buffer;
 
                            }
 
                            _ => unreachable!(),
 
                        }
 
                        return SyncBlocker::PutMsg(value, payload);
 
                    }
 
                },
 
            }
 
        }
 
    }
 
}
 
impl EvalContext<'_> {
 
    // fn random(&mut self) -> LongValue {
 
    //     match self {
 
    //         // EvalContext::None => unreachable!(),
 
    //         EvalContext::Nonsync(_context) => todo!(),
 
    //         EvalContext::Sync(_) => unreachable!(),
 
    //     }
 
    // }
 
    fn new_component(&mut self, args: &[Value], init_state: ComponentState) -> () {
 
        match self {
 
            // EvalContext::None => unreachable!(),
 
            EvalContext::Nonsync(context) => {
 
                let mut moved_ports = HashSet::new();
 
                for arg in args.iter() {
 
                    match arg {
 
                        Value::Output(OutputValue(port)) => {
 
                            moved_ports.insert(*port);
 
                        }
 
                        Value::Input(InputValue(port)) => {
 
                            moved_ports.insert(*port);
 
                        }
 
                        _ => {}
 
                    }
 
                }
 
                context.new_component(moved_ports, init_state)
 
            }
 
            EvalContext::Sync(_) => unreachable!(),
 
        }
 
    }
 
    fn new_channel(&mut self) -> [Value; 2] {
 
        match self {
 
            // EvalContext::None => unreachable!(),
 
            EvalContext::Nonsync(context) => {
 
                let [from, to] = context.new_port_pair();
 
                let from = Value::Output(OutputValue(from));
 
                let to = Value::Input(InputValue(to));
 
                return [from, to];
 
            }
 
            EvalContext::Sync(_) => unreachable!(),
 
        }
 
    }
 
    fn fires(&mut self, port: Value) -> Option<Value> {
 
        match self {
 
            // EvalContext::None => unreachable!(),
 
            EvalContext::Nonsync(_) => unreachable!(),
 
            EvalContext::Sync(context) => match port {
 
                Value::Output(OutputValue(port)) => context.is_firing(port).map(Value::from),
 
                Value::Input(InputValue(port)) => context.is_firing(port).map(Value::from),
 
                _ => unreachable!(),
 
            },
 
        }
 
    }
 
    fn get(&mut self, port: Value) -> Option<Value> {
 
        match self {
 
            // EvalContext::None => unreachable!(),
 
            EvalContext::Nonsync(_) => unreachable!(),
 
            EvalContext::Sync(context) => match port {
 
                Value::Output(OutputValue(port)) => {
 
                    context.read_msg(port).map(Value::receive_message)
 
                }
 
                Value::Input(InputValue(port)) => {
 
                    context.read_msg(port).map(Value::receive_message)
 
                }
 
                _ => unreachable!(),
 
            },
 
        }
 
    }
 
    fn did_put(&mut self, port: Value) -> bool {
 
        match self {
 
            EvalContext::Nonsync(_) => unreachable!("did_put in nonsync context"),
 
            EvalContext::Sync(context) => match port {
 
                Value::Output(OutputValue(port)) => {
 
                    context.did_put_or_get(port)
 
                },
 
                Value::Input(_) => unreachable!("did_put on input port"),
 
                _ => unreachable!("did_put on non-port value")
 
            }
 
        }
 
    }
 
}
src/protocol/parser/depth_visitor.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::input_source::*;
 

	
 
// The following indirection is needed due to a bug in the cbindgen tool.
 
type Unit = ();
 
pub(crate) type VisitorError = (InputPosition, String); // TODO: Revise when multi-file compiling is in place
 
pub(crate) type VisitorResult = Result<Unit, VisitorError>;
 

	
 
pub(crate) trait Visitor: Sized {
 
    fn visit_protocol_description(&mut self, h: &mut Heap, pd: RootId) -> VisitorResult {
 
        recursive_protocol_description(self, h, pd)
 
    }
 
    fn visit_pragma(&mut self, _h: &mut Heap, _pragma: PragmaId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_import(&mut self, _h: &mut Heap, _import: ImportId) -> VisitorResult {
 
        Ok(())
 
    }
 

	
 
    fn visit_symbol_definition(&mut self, h: &mut Heap, def: DefinitionId) -> VisitorResult {
 
        recursive_symbol_definition(self, h, def)
 
    }
 
    fn visit_struct_definition(&mut self, _h: &mut Heap, _def: StructId) -> VisitorResult {
 
    fn visit_struct_definition(&mut self, _h: &mut Heap, _def: StructDefinitionId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_enum_definition(&mut self, _h: &mut Heap, _def: EnumId) -> VisitorResult {
 
    fn visit_enum_definition(&mut self, _h: &mut Heap, _def: EnumDefinitionId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_union_definition(&mut self, _h: &mut Heap, _def: UnionId) -> VisitorResult {
 
    fn visit_union_definition(&mut self, _h: &mut Heap, _def: UnionDefinitionId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_component_definition(&mut self, h: &mut Heap, def: ComponentDefinitionId) -> VisitorResult {
 
        recursive_component_definition(self, h, def)
 
    }
 
    fn visit_composite_definition(&mut self, h: &mut Heap, def: ComponentDefinitionId) -> VisitorResult {
 
        recursive_composite_definition(self, h, def)
 
    }
 
    fn visit_primitive_definition(&mut self, h: &mut Heap, def: ComponentDefinitionId) -> VisitorResult {
 
        recursive_primitive_definition(self, h, def)
 
    }
 
    fn visit_function_definition(&mut self, h: &mut Heap, def: FunctionId) -> VisitorResult {
 
    fn visit_function_definition(&mut self, h: &mut Heap, def: FunctionDefinitionId) -> VisitorResult {
 
        recursive_function_definition(self, h, def)
 
    }
 

	
 
    fn visit_variable_declaration(&mut self, h: &mut Heap, decl: VariableId) -> VisitorResult {
 
        recursive_variable_declaration(self, h, decl)
 
    }
 
    fn visit_parameter_declaration(&mut self, _h: &mut Heap, _decl: ParameterId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_local_declaration(&mut self, _h: &mut Heap, _decl: LocalId) -> VisitorResult {
 
        Ok(())
 
    }
 

	
 
    fn visit_statement(&mut self, h: &mut Heap, stmt: StatementId) -> VisitorResult {
 
        recursive_statement(self, h, stmt)
 
    }
 
    fn visit_local_statement(&mut self, h: &mut Heap, stmt: LocalStatementId) -> VisitorResult {
 
        recursive_local_statement(self, h, stmt)
 
    }
 
    fn visit_memory_statement(&mut self, _h: &mut Heap, _stmt: MemoryStatementId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_channel_statement(
 
        &mut self,
 
        _h: &mut Heap,
 
        _stmt: ChannelStatementId,
 
    ) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_block_statement(&mut self, h: &mut Heap, stmt: BlockStatementId) -> VisitorResult {
 
        recursive_block_statement(self, h, stmt)
 
    }
 
    fn visit_labeled_statement(&mut self, h: &mut Heap, stmt: LabeledStatementId) -> VisitorResult {
 
        recursive_labeled_statement(self, h, stmt)
 
    }
 
    fn visit_skip_statement(&mut self, _h: &mut Heap, _stmt: SkipStatementId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_if_statement(&mut self, h: &mut Heap, stmt: IfStatementId) -> VisitorResult {
 
        recursive_if_statement(self, h, stmt)
 
    }
 
    fn visit_end_if_statement(&mut self, _h: &mut Heap, _stmt: EndIfStatementId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_while_statement(&mut self, h: &mut Heap, stmt: WhileStatementId) -> VisitorResult {
 
        recursive_while_statement(self, h, stmt)
 
    }
 
    fn visit_end_while_statement(&mut self, _h: &mut Heap, _stmt: EndWhileStatementId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_break_statement(&mut self, _h: &mut Heap, _stmt: BreakStatementId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_continue_statement(
 
        &mut self,
 
        _h: &mut Heap,
 
        _stmt: ContinueStatementId,
 
    ) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_synchronous_statement(
 
        &mut self,
 
        h: &mut Heap,
 
        stmt: SynchronousStatementId,
 
    ) -> VisitorResult {
 
        recursive_synchronous_statement(self, h, stmt)
 
    }
 
    fn visit_end_synchronous_statement(&mut self, _h: &mut Heap, _stmt: EndSynchronousStatementId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_return_statement(&mut self, h: &mut Heap, stmt: ReturnStatementId) -> VisitorResult {
 
        recursive_return_statement(self, h, stmt)
 
    }
 
    fn visit_assert_statement(&mut self, h: &mut Heap, stmt: AssertStatementId) -> VisitorResult {
 
        recursive_assert_statement(self, h, stmt)
 
    fn visit_new_statement(&mut self, h: &mut Heap, stmt: NewStatementId) -> VisitorResult {
 
        recursive_new_statement(self, h, stmt)
 
    }
 
    fn visit_goto_statement(&mut self, _h: &mut Heap, _stmt: GotoStatementId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_new_statement(&mut self, h: &mut Heap, stmt: NewStatementId) -> VisitorResult {
 
        recursive_new_statement(self, h, stmt)
 
    }
 
    fn visit_expression_statement(
 
        &mut self,
 
        h: &mut Heap,
 
        stmt: ExpressionStatementId,
 
    ) -> VisitorResult {
 
        recursive_expression_statement(self, h, stmt)
 
    }
 

	
 
    fn visit_expression(&mut self, h: &mut Heap, expr: ExpressionId) -> VisitorResult {
 
        recursive_expression(self, h, expr)
 
    }
 
    fn visit_assignment_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: AssignmentExpressionId,
 
    ) -> VisitorResult {
 
        recursive_assignment_expression(self, h, expr)
 
    }
 
    fn visit_binding_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: BindingExpressionId
 
    ) -> VisitorResult {
 
        recursive_binding_expression(self, h, expr)
 
    }
 
    fn visit_conditional_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: ConditionalExpressionId,
 
    ) -> VisitorResult {
 
        recursive_conditional_expression(self, h, expr)
 
    }
 
    fn visit_binary_expression(&mut self, h: &mut Heap, expr: BinaryExpressionId) -> VisitorResult {
 
        recursive_binary_expression(self, h, expr)
 
    }
 
    fn visit_unary_expression(&mut self, h: &mut Heap, expr: UnaryExpressionId) -> VisitorResult {
 
        recursive_unary_expression(self, h, expr)
 
    }
 
    fn visit_indexing_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: IndexingExpressionId,
 
    ) -> VisitorResult {
 
        recursive_indexing_expression(self, h, expr)
 
    }
 
    fn visit_slicing_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: SlicingExpressionId,
 
    ) -> VisitorResult {
 
        recursive_slicing_expression(self, h, expr)
 
    }
 
    fn visit_select_expression(&mut self, h: &mut Heap, expr: SelectExpressionId) -> VisitorResult {
 
        recursive_select_expression(self, h, expr)
 
    }
 
    fn visit_array_expression(&mut self, h: &mut Heap, expr: ArrayExpressionId) -> VisitorResult {
 
        recursive_array_expression(self, h, expr)
 
    }
 
    fn visit_call_expression(&mut self, h: &mut Heap, expr: CallExpressionId) -> VisitorResult {
 
        recursive_call_expression(self, h, expr)
 
    }
 
    fn visit_constant_expression(
 
        &mut self,
 
        _h: &mut Heap,
 
        _expr: LiteralExpressionId,
 
    ) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_variable_expression(
 
        &mut self,
 
        _h: &mut Heap,
 
        _expr: VariableExpressionId,
 
    ) -> VisitorResult {
 
        Ok(())
 
    }
 
}
 

	
 
// Bubble-up helpers
 
fn recursive_parameter_as_variable<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    param: ParameterId,
 
) -> VisitorResult {
 
    this.visit_variable_declaration(h, param.upcast())
 
}
 

	
 
fn recursive_local_as_variable<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    local: LocalId,
 
) -> VisitorResult {
 
    this.visit_variable_declaration(h, local.upcast())
 
}
 

	
 
fn recursive_call_expression_as_expression<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    call: CallExpressionId,
 
) -> VisitorResult {
 
    this.visit_expression(h, call.upcast())
 
}
 

	
 
// Recursive procedures
 
fn recursive_protocol_description<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    pd: RootId,
 
) -> VisitorResult {
 
    for &pragma in h[pd].pragmas.clone().iter() {
 
        this.visit_pragma(h, pragma)?;
 
    }
 
    for &import in h[pd].imports.clone().iter() {
 
        this.visit_import(h, import)?;
 
    }
 
    for &def in h[pd].definitions.clone().iter() {
 
        this.visit_symbol_definition(h, def)?;
 
    }
 
    Ok(())
 
}
 

	
 
fn recursive_symbol_definition<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    def: DefinitionId,
 
) -> VisitorResult {
 
    // We clone the definition in case it is modified
 
    // TODO: Fix me
 
    match h[def].clone() {
 
        Definition::Struct(def) => this.visit_struct_definition(h, def.this),
 
        Definition::Enum(def) => this.visit_enum_definition(h, def.this),
 
        Definition::Union(def) => this.visit_union_definition(h, def.this),
 
        Definition::Component(cdef) => this.visit_component_definition(h, cdef.this),
 
        Definition::Function(fdef) => this.visit_function_definition(h, fdef.this),
 
    }
 
}
 

	
 
fn recursive_component_definition<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    def: ComponentDefinitionId,
 
) -> VisitorResult {
 
    let component_variant = h[def].variant;
 
    match component_variant {
 
        ComponentVariant::Primitive => this.visit_primitive_definition(h, def),
 
        ComponentVariant::Composite => this.visit_composite_definition(h, def),
 
    }
 
}
 

	
 
fn recursive_composite_definition<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    def: ComponentDefinitionId,
 
) -> VisitorResult {
 
    for &param in h[def].parameters.clone().iter() {
 
        recursive_parameter_as_variable(this, h, param)?;
 
    }
 
    this.visit_block_statement(h, h[def].body)
 
}
 

	
 
fn recursive_primitive_definition<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    def: ComponentDefinitionId,
 
) -> VisitorResult {
 
    for &param in h[def].parameters.clone().iter() {
 
        recursive_parameter_as_variable(this, h, param)?;
 
    }
 
    this.visit_block_statement(h, h[def].body)
 
}
 

	
 
fn recursive_function_definition<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    def: FunctionId,
 
    def: FunctionDefinitionId,
 
) -> VisitorResult {
 
    for &param in h[def].parameters.clone().iter() {
 
        recursive_parameter_as_variable(this, h, param)?;
 
    }
 
    this.visit_statement(h, h[def].body)
 
    this.visit_block_statement(h, h[def].body)
 
}
 

	
 
fn recursive_variable_declaration<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    decl: VariableId,
 
) -> VisitorResult {
 
    match h[decl].clone() {
 
        Variable::Parameter(decl) => this.visit_parameter_declaration(h, decl.this),
 
        Variable::Local(decl) => this.visit_local_declaration(h, decl.this),
 
    }
 
}
 

	
 
fn recursive_statement<T: Visitor>(this: &mut T, h: &mut Heap, stmt: StatementId) -> VisitorResult {
 
    match h[stmt].clone() {
 
        Statement::Block(stmt) => this.visit_block_statement(h, stmt.this),
 
        Statement::Local(stmt) => this.visit_local_statement(h, stmt.this()),
 
        Statement::Skip(stmt) => this.visit_skip_statement(h, stmt.this),
 
        Statement::Labeled(stmt) => this.visit_labeled_statement(h, stmt.this),
 
        Statement::If(stmt) => this.visit_if_statement(h, stmt.this),
 
        Statement::While(stmt) => this.visit_while_statement(h, stmt.this),
 
        Statement::Break(stmt) => this.visit_break_statement(h, stmt.this),
 
        Statement::Continue(stmt) => this.visit_continue_statement(h, stmt.this),
 
        Statement::Synchronous(stmt) => this.visit_synchronous_statement(h, stmt.this),
 
        Statement::Return(stmt) => this.visit_return_statement(h, stmt.this),
 
        Statement::Assert(stmt) => this.visit_assert_statement(h, stmt.this),
 
        Statement::Goto(stmt) => this.visit_goto_statement(h, stmt.this),
 
        Statement::New(stmt) => this.visit_new_statement(h, stmt.this),
 
        Statement::Expression(stmt) => this.visit_expression_statement(h, stmt.this),
 
        Statement::EndSynchronous(stmt) => this.visit_end_synchronous_statement(h, stmt.this),
 
        Statement::EndWhile(stmt) => this.visit_end_while_statement(h, stmt.this),
 
        Statement::EndIf(stmt) => this.visit_end_if_statement(h, stmt.this),
 
    }
 
}
 

	
 
fn recursive_block_statement<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    block: BlockStatementId,
 
) -> VisitorResult {
 
    for &local in h[block].locals.clone().iter() {
 
        recursive_local_as_variable(this, h, local)?;
 
    }
 
    for &stmt in h[block].statements.clone().iter() {
 
        this.visit_statement(h, stmt)?;
 
    }
 
    Ok(())
 
}
 

	
 
fn recursive_local_statement<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    stmt: LocalStatementId,
 
) -> VisitorResult {
 
    match h[stmt].clone() {
 
        LocalStatement::Channel(stmt) => this.visit_channel_statement(h, stmt.this),
 
        LocalStatement::Memory(stmt) => this.visit_memory_statement(h, stmt.this),
 
    }
 
}
 

	
 
fn recursive_labeled_statement<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    stmt: LabeledStatementId,
 
) -> VisitorResult {
 
    this.visit_statement(h, h[stmt].body)
 
}
 

	
 
fn recursive_if_statement<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    stmt: IfStatementId,
 
) -> VisitorResult {
 
    this.visit_expression(h, h[stmt].test)?;
 
    this.visit_block_statement(h, h[stmt].true_body)?;
 
    if let Some(false_body) = h[stmt].false_body {
 
        this.visit_block_statement(h, false_body)?;
 
    }
 
    Ok(())
 
}
 

	
 
fn recursive_while_statement<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    stmt: WhileStatementId,
 
) -> VisitorResult {
 
    this.visit_expression(h, h[stmt].test)?;
 
    this.visit_block_statement(h, h[stmt].body)
 
}
 

	
 
fn recursive_synchronous_statement<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    stmt: SynchronousStatementId,
 
) -> VisitorResult {
 
    // TODO: Check where this was used for
 
    // for &param in h[stmt].parameters.clone().iter() {
 
    //     recursive_parameter_as_variable(this, h, param)?;
 
    // }
 
    this.visit_block_statement(h, h[stmt].body)
 
}
 

	
 
fn recursive_return_statement<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    stmt: ReturnStatementId,
 
) -> VisitorResult {
 
    this.visit_expression(h, h[stmt].expression)
 
}
 

	
 
fn recursive_assert_statement<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    stmt: AssertStatementId,
 
) -> VisitorResult {
 
    this.visit_expression(h, h[stmt].expression)
 
    debug_assert_eq!(h[stmt].expressions.len(), 1);
 
    this.visit_expression(h, h[stmt].expressions[0])
 
}
 

	
 
fn recursive_new_statement<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    stmt: NewStatementId,
 
) -> VisitorResult {
 
    recursive_call_expression_as_expression(this, h, h[stmt].expression)
 
}
 

	
 
fn recursive_expression_statement<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    stmt: ExpressionStatementId,
 
) -> VisitorResult {
 
    this.visit_expression(h, h[stmt].expression)
 
}
 

	
 
fn recursive_expression<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    expr: ExpressionId,
 
) -> VisitorResult {
 
    match h[expr].clone() {
 
        Expression::Assignment(expr) => this.visit_assignment_expression(h, expr.this),
 
        Expression::Binding(expr) => this.visit_binding_expression(h, expr.this),
 
        Expression::Conditional(expr) => this.visit_conditional_expression(h, expr.this),
 
        Expression::Binary(expr) => this.visit_binary_expression(h, expr.this),
 
        Expression::Unary(expr) => this.visit_unary_expression(h, expr.this),
 
        Expression::Indexing(expr) => this.visit_indexing_expression(h, expr.this),
 
        Expression::Slicing(expr) => this.visit_slicing_expression(h, expr.this),
 
        Expression::Select(expr) => this.visit_select_expression(h, expr.this),
 
        Expression::Array(expr) => this.visit_array_expression(h, expr.this),
 
        Expression::Literal(expr) => this.visit_constant_expression(h, expr.this),
 
        Expression::Call(expr) => this.visit_call_expression(h, expr.this),
 
        Expression::Variable(expr) => this.visit_variable_expression(h, expr.this),
 
    }
 
}
 

	
 
fn recursive_assignment_expression<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    expr: AssignmentExpressionId,
 
) -> VisitorResult {
 
    this.visit_expression(h, h[expr].left)?;
 
    this.visit_expression(h, h[expr].right)
 
}
 

	
 
fn recursive_binding_expression<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    expr: BindingExpressionId,
 
) -> VisitorResult {
 
    this.visit_expression(h, h[expr].left.upcast())?;
 
    this.visit_expression(h, h[expr].right)
 
}
 

	
 
fn recursive_conditional_expression<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    expr: ConditionalExpressionId,
 
) -> VisitorResult {
 
    this.visit_expression(h, h[expr].test)?;
 
    this.visit_expression(h, h[expr].true_expression)?;
 
    this.visit_expression(h, h[expr].false_expression)
 
}
 

	
 
fn recursive_binary_expression<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    expr: BinaryExpressionId,
 
) -> VisitorResult {
 
    this.visit_expression(h, h[expr].left)?;
 
    this.visit_expression(h, h[expr].right)
 
}
 

	
 
fn recursive_unary_expression<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    expr: UnaryExpressionId,
 
) -> VisitorResult {
 
    this.visit_expression(h, h[expr].expression)
 
}
 

	
 
fn recursive_indexing_expression<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    expr: IndexingExpressionId,
 
) -> VisitorResult {
 
    this.visit_expression(h, h[expr].subject)?;
 
    this.visit_expression(h, h[expr].index)
 
}
 

	
 
fn recursive_slicing_expression<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    expr: SlicingExpressionId,
 
) -> VisitorResult {
 
    this.visit_expression(h, h[expr].subject)?;
 
    this.visit_expression(h, h[expr].from_index)?;
 
    this.visit_expression(h, h[expr].to_index)
 
}
 

	
 
fn recursive_select_expression<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    expr: SelectExpressionId,
 
) -> VisitorResult {
 
    this.visit_expression(h, h[expr].subject)
 
}
 

	
 
fn recursive_array_expression<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    expr: ArrayExpressionId,
 
) -> VisitorResult {
 
    for &expr in h[expr].elements.clone().iter() {
 
        this.visit_expression(h, expr)?;
 
    }
 
    Ok(())
 
}
 

	
 
fn recursive_call_expression<T: Visitor>(
 
    this: &mut T,
 
    h: &mut Heap,
 
    expr: CallExpressionId,
 
) -> VisitorResult {
 
    for &expr in h[expr].arguments.clone().iter() {
 
        this.visit_expression(h, expr)?;
 
    }
 
    Ok(())
 
}
 

	
 
// ====================
 
// Grammar Rules
 
// ====================
 

	
 
pub(crate) struct NestedSynchronousStatements {
 
    illegal: bool,
 
}
 

	
 
impl NestedSynchronousStatements {
 
    pub(crate) fn new() -> Self {
 
        NestedSynchronousStatements { illegal: false }
 
    }
 
}
 

	
 
impl Visitor for NestedSynchronousStatements {
 
    fn visit_composite_definition(&mut self, h: &mut Heap, def: ComponentDefinitionId) -> VisitorResult {
 
        assert!(!self.illegal);
 
        self.illegal = true;
 
        recursive_composite_definition(self, h, def)?;
 
        self.illegal = false;
 
        Ok(())
 
    }
 
    fn visit_function_definition(&mut self, h: &mut Heap, def: FunctionDefinitionId) -> VisitorResult {
 
        assert!(!self.illegal);
 
        self.illegal = true;
 
        recursive_function_definition(self, h, def)?;
 
        self.illegal = false;
 
        Ok(())
 
    }
 
    fn visit_synchronous_statement(
 
        &mut self,
 
        h: &mut Heap,
 
        stmt: SynchronousStatementId,
 
    ) -> VisitorResult {
 
        if self.illegal {
 
            return Err((
 
                h[stmt].position(),
 
                "Illegal nested synchronous statement".to_string(),
 
            ));
 
        }
 
        self.illegal = true;
 
        recursive_synchronous_statement(self, h, stmt)?;
 
        self.illegal = false;
 
        Ok(())
 
    }
 
    fn visit_expression(&mut self, _h: &mut Heap, _expr: ExpressionId) -> VisitorResult {
 
        Ok(())
 
    }
 
}
 

	
 
pub(crate) struct ChannelStatementOccurrences {
 
    illegal: bool,
 
}
 

	
 
impl ChannelStatementOccurrences {
 
    pub(crate) fn new() -> Self {
 
        ChannelStatementOccurrences { illegal: false }
 
    }
 
}
 

	
 
impl Visitor for ChannelStatementOccurrences {
 
    fn visit_primitive_definition(&mut self, h: &mut Heap, def: ComponentDefinitionId) -> VisitorResult {
 
        assert!(!self.illegal);
 
        self.illegal = true;
 
        recursive_primitive_definition(self, h, def)?;
 
        self.illegal = false;
 
        Ok(())
 
    }
 
    fn visit_function_definition(&mut self, h: &mut Heap, def: FunctionId) -> VisitorResult {
 
        assert!(!self.illegal);
 
        self.illegal = true;
 
        recursive_function_definition(self, h, def)?;
 
        self.illegal = false;
 
        Ok(())
 
    }
 
    fn visit_channel_statement(&mut self, h: &mut Heap, stmt: ChannelStatementId) -> VisitorResult {
 
        if self.illegal {
 
            return Err((h[stmt].position(), "Illegal channel declaration".to_string()));
 
        }
 
        Ok(())
 
    }
 
    fn visit_expression(&mut self, _h: &mut Heap, _expr: ExpressionId) -> VisitorResult {
 
        Ok(())
 
    }
 
}
 

	
 
pub(crate) struct FunctionStatementReturns {}
 

	
 
impl FunctionStatementReturns {
 
    pub(crate) fn new() -> Self {
 
        FunctionStatementReturns {}
 
    }
 
    fn function_error(&self, position: InputPosition) -> VisitorResult {
 
        Err((position, "Function definition must return".to_string()))
 
    }
 
}
 

	
 
impl Visitor for FunctionStatementReturns {
 
    fn visit_component_definition(&mut self, _h: &mut Heap, _def: ComponentDefinitionId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_variable_declaration(&mut self, _h: &mut Heap, _decl: VariableId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_block_statement(&mut self, h: &mut Heap, block: BlockStatementId) -> VisitorResult {
 
        let len = h[block].statements.len();
 
        assert!(len > 0);
 
        self.visit_statement(h, h[block].statements[len - 1])
 
    }
 
    fn visit_skip_statement(&mut self, h: &mut Heap, stmt: SkipStatementId) -> VisitorResult {
 
        self.function_error(h[stmt].position)
 
    }
 
    fn visit_break_statement(&mut self, h: &mut Heap, stmt: BreakStatementId) -> VisitorResult {
 
        self.function_error(h[stmt].position)
 
    }
 
    fn visit_continue_statement(
 
        &mut self,
 
        h: &mut Heap,
 
        stmt: ContinueStatementId,
 
    ) -> VisitorResult {
 
        self.function_error(h[stmt].position)
 
    }
 
    fn visit_assert_statement(&mut self, h: &mut Heap, stmt: AssertStatementId) -> VisitorResult {
 
        self.function_error(h[stmt].position)
 
    }
 
    fn visit_new_statement(&mut self, h: &mut Heap, stmt: NewStatementId) -> VisitorResult {
 
        self.function_error(h[stmt].position)
 
    }
 
    fn visit_expression_statement(
 
        &mut self,
 
        h: &mut Heap,
 
        stmt: ExpressionStatementId,
 
    ) -> VisitorResult {
 
        self.function_error(h[stmt].position)
 
    }
 
    fn visit_expression(&mut self, _h: &mut Heap, _expr: ExpressionId) -> VisitorResult {
 
        Ok(())
 
    }
 
}
 

	
 
pub(crate) struct ComponentStatementReturnNew {
 
    illegal_new: bool,
 
    illegal_return: bool,
 
}
 

	
 
impl ComponentStatementReturnNew {
 
    pub(crate) fn new() -> Self {
 
        ComponentStatementReturnNew { illegal_new: false, illegal_return: false }
 
    }
 
}
 

	
 
impl Visitor for ComponentStatementReturnNew {
 
    fn visit_component_definition(&mut self, h: &mut Heap, def: ComponentDefinitionId) -> VisitorResult {
 
        assert!(!(self.illegal_new || self.illegal_return));
 
        self.illegal_return = true;
 
        recursive_component_definition(self, h, def)?;
 
        self.illegal_return = false;
 
        Ok(())
 
    }
 
    fn visit_primitive_definition(&mut self, h: &mut Heap, def: ComponentDefinitionId) -> VisitorResult {
 
        assert!(!self.illegal_new);
 
        self.illegal_new = true;
 
        recursive_primitive_definition(self, h, def)?;
 
        self.illegal_new = false;
 
        Ok(())
 
    }
 
    fn visit_function_definition(&mut self, h: &mut Heap, def: FunctionId) -> VisitorResult {
 
        assert!(!(self.illegal_new || self.illegal_return));
 
        self.illegal_new = true;
 
        recursive_function_definition(self, h, def)?;
 
        self.illegal_new = false;
 
        Ok(())
 
    }
 
    fn visit_variable_declaration(&mut self, _h: &mut Heap, _decl: VariableId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_return_statement(&mut self, h: &mut Heap, stmt: ReturnStatementId) -> VisitorResult {
 
        if self.illegal_return {
 
            Err((h[stmt].position, "Component definition must not return".to_string()))
 
        } else {
 
            recursive_return_statement(self, h, stmt)
 
        }
 
    }
 
    fn visit_new_statement(&mut self, h: &mut Heap, stmt: NewStatementId) -> VisitorResult {
 
        if self.illegal_new {
 
            Err((
 
                h[stmt].position,
 
                "Symbol definition contains illegal new statement".to_string(),
 
            ))
 
        } else {
 
            recursive_new_statement(self, h, stmt)
 
        }
 
    }
 
    fn visit_expression(&mut self, _h: &mut Heap, _expr: ExpressionId) -> VisitorResult {
 
        Ok(())
 
    }
 
}
 

	
 
pub(crate) struct CheckBuiltinOccurrences {
 
    legal: bool,
 
}
 

	
 
impl CheckBuiltinOccurrences {
 
    pub(crate) fn new() -> Self {
 
        CheckBuiltinOccurrences { legal: false }
 
    }
 
}
 

	
 
impl Visitor for CheckBuiltinOccurrences {
 
    fn visit_synchronous_statement(
 
        &mut self,
 
        h: &mut Heap,
 
        stmt: SynchronousStatementId,
 
    ) -> VisitorResult {
 
        assert!(!self.legal);
 
        self.legal = true;
 
        recursive_synchronous_statement(self, h, stmt)?;
 
        self.legal = false;
 
        Ok(())
 
    }
 
    fn visit_call_expression(&mut self, h: &mut Heap, expr: CallExpressionId) -> VisitorResult {
 
        match h[expr].method {
 
            Method::Get | Method::Fires => {
 
                if !self.legal {
 
                    return Err((h[expr].position, "Illegal built-in occurrence".to_string()));
 
                }
 
            }
 
            _ => {}
 
        }
 
        recursive_call_expression(self, h, expr)
 
    }
 
}
 

	
 
pub(crate) struct BuildScope {
 
    scope: Option<Scope>,
 
}
 

	
 
impl BuildScope {
 
    pub(crate) fn new() -> Self {
 
        BuildScope { scope: None }
 
    }
 
}
 

	
 
impl Visitor for BuildScope {
 
    fn visit_symbol_definition(&mut self, h: &mut Heap, def: DefinitionId) -> VisitorResult {
 
        assert!(self.scope.is_none());
 
        self.scope = Some(Scope::Definition(def));
 
        recursive_symbol_definition(self, h, def)?;
 
        self.scope = None;
 
        Ok(())
 
    }
 
    fn visit_block_statement(&mut self, h: &mut Heap, stmt: BlockStatementId) -> VisitorResult {
 
        assert!(!self.scope.is_none());
 
        let old = self.scope;
 
        // First store the current scope
 
        h[stmt].parent_scope = self.scope;
 
        // Then move scope down to current block
 
        self.scope = Some(Scope::Regular(stmt));
 
        recursive_block_statement(self, h, stmt)?;
 
        // Move scope back up
 
        self.scope = old;
 
        Ok(())
 
    }
 
    fn visit_synchronous_statement(
 
        &mut self,
 
        h: &mut Heap,
 
        stmt: SynchronousStatementId,
 
    ) -> VisitorResult {
 
        assert!(!self.scope.is_none());
 
        let old = self.scope;
 
        // First store the current scope
 
        h[stmt].parent_scope = self.scope;
 
        // Then move scope down to current sync
 
        // TODO: Should be legal-ish, but very wrong
 
        self.scope = Some(Scope::Synchronous((stmt, BlockStatementId(stmt.upcast()))));
 
        recursive_synchronous_statement(self, h, stmt)?;
 
        // Move scope back up
 
        self.scope = old;
 
        Ok(())
 
    }
 
    fn visit_expression(&mut self, _h: &mut Heap, _expr: ExpressionId) -> VisitorResult {
 
        Ok(())
 
    }
 
}
 

	
 
pub(crate) struct UniqueStatementId(StatementId);
 

	
 
pub(crate) struct LinkStatements {
 
    prev: Option<UniqueStatementId>,
 
}
 

	
 
impl LinkStatements {
 
    pub(crate) fn new() -> Self {
 
        LinkStatements { prev: None }
 
    }
 
}
 

	
 
impl Visitor for LinkStatements {
 
    fn visit_symbol_definition(&mut self, h: &mut Heap, def: DefinitionId) -> VisitorResult {
 
        assert!(self.prev.is_none());
 
        recursive_symbol_definition(self, h, def)?;
 
        // Clear out last statement
 
        self.prev = None;
 
        Ok(())
 
    }
 
    fn visit_statement(&mut self, h: &mut Heap, stmt: StatementId) -> VisitorResult {
 
        if let Some(UniqueStatementId(prev)) = self.prev.take() {
 
            h[prev].link_next(stmt);
 
        }
 
        recursive_statement(self, h, stmt)
 
    }
 
    fn visit_local_statement(&mut self, _h: &mut Heap, stmt: LocalStatementId) -> VisitorResult {
 
        self.prev = Some(UniqueStatementId(stmt.upcast()));
 
        Ok(())
 
    }
 
    fn visit_labeled_statement(&mut self, h: &mut Heap, stmt: LabeledStatementId) -> VisitorResult {
 
        recursive_labeled_statement(self, h, stmt)
 
    }
 
    fn visit_skip_statement(&mut self, _h: &mut Heap, stmt: SkipStatementId) -> VisitorResult {
 
        self.prev = Some(UniqueStatementId(stmt.upcast()));
 
        Ok(())
 
    }
 
    fn visit_if_statement(&mut self, h: &mut Heap, stmt: IfStatementId) -> VisitorResult {
 
        // Link the two branches to the corresponding EndIf pseudo-statement
 
        let end_if_id = h[stmt].end_if;
 
        assert!(end_if_id.is_some());
 
        let end_if_id = end_if_id.unwrap();
 

	
 
        assert!(self.prev.is_none());
 
        self.visit_block_statement(h, h[stmt].true_body)?;
 
        if let Some(UniqueStatementId(prev)) = self.prev.take() {
 
            h[prev].link_next(end_if_id.upcast());
 
        }
 

	
 
        assert!(self.prev.is_none());
 
        if let Some(false_body) = h[stmt].false_body {
 
            self.visit_block_statement(h, false_body)?;
 
        }
 
        if let Some(UniqueStatementId(prev)) = self.prev.take() {
 
            h[prev].link_next(end_if_id.upcast());
 
        }
 

	
 
        // Use the pseudo-statement as the statement where to update the next pointer
 
        // self.prev = Some(UniqueStatementId(end_if_id.upcast()));
 
        Ok(())
 
    }
 
    fn visit_end_if_statement(&mut self, _h: &mut Heap, stmt: EndIfStatementId) -> VisitorResult {
 
        assert!(self.prev.is_none());
 
        self.prev = Some(UniqueStatementId(stmt.upcast()));
 
        Ok(())
 
    }
 
    fn visit_while_statement(&mut self, h: &mut Heap, stmt: WhileStatementId) -> VisitorResult {
 
        // We allocate a pseudo-statement, to which the break statement finds its target
 
        // Update the while's next statement to point to the pseudo-statement
 
        let end_while_id = h[stmt].end_while;
 
        assert!(end_while_id.is_some());
 
        // let end_while_id = end_while_id.unwrap();
 

	
 
        assert!(self.prev.is_none());
 
        self.visit_block_statement(h, h[stmt].body)?;
 
        // The body's next statement loops back to the while statement itself
 
        // Note: continue statements also loop back to the while statement itself
 
        if let Some(UniqueStatementId(prev)) = self.prev.take() {
 
            h[prev].link_next(stmt.upcast());
 
        }
 
        // Use the while statement as the statement where the next pointer is updated
 
        // self.prev = Some(UniqueStatementId(end_while_id.upcast()));
 
        Ok(())
 
    }
 
    fn visit_end_while_statement(&mut self, _h: &mut Heap, stmt: EndWhileStatementId) -> VisitorResult {
 
        assert!(self.prev.is_none());
 
        self.prev = Some(UniqueStatementId(stmt.upcast()));
 
        Ok(())
 
    }
 
    fn visit_break_statement(&mut self, _h: &mut Heap, _stmt: BreakStatementId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_continue_statement(
 
        &mut self,
 
        _h: &mut Heap,
 
        _stmt: ContinueStatementId,
 
    ) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_synchronous_statement(
 
        &mut self,
 
        h: &mut Heap,
 
        stmt: SynchronousStatementId,
 
    ) -> VisitorResult {
 
        // Allocate a pseudo-statement, that is added for helping the evaluator to issue a command
 
        // that marks the end of the synchronous block. Every evaluation has to pause at this
 
        // point, only to resume later when the thread is selected as unique thread to continue.
 
        let end_sync_id = h[stmt].end_sync;
 
        assert!(end_sync_id.is_some());
 
        let end_sync_id = end_sync_id.unwrap();
 

	
 
        assert!(self.prev.is_none());
 
        self.visit_block_statement(h, h[stmt].body)?;
 
        // The body's next statement points to the pseudo element
 
        if let Some(UniqueStatementId(prev)) = self.prev.take() {
 
            h[prev].link_next(end_sync_id.upcast());
 
        }
 
        // Use the pseudo-statement as the statement where the next pointer is updated
 
        // self.prev = Some(UniqueStatementId(end_sync_id.upcast()));
 
        Ok(())
 
    }
 
    fn visit_end_synchronous_statement(&mut self, _h: &mut Heap, stmt: EndSynchronousStatementId) -> VisitorResult {
 
        assert!(self.prev.is_none());
 
        self.prev = Some(UniqueStatementId(stmt.upcast()));
 
        Ok(())
 
    }
 
    fn visit_return_statement(&mut self, _h: &mut Heap, _stmt: ReturnStatementId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_assert_statement(&mut self, _h: &mut Heap, stmt: AssertStatementId) -> VisitorResult {
 
        self.prev = Some(UniqueStatementId(stmt.upcast()));
 
        Ok(())
 
    }
 
    fn visit_goto_statement(&mut self, _h: &mut Heap, _stmt: GotoStatementId) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_new_statement(&mut self, _h: &mut Heap, stmt: NewStatementId) -> VisitorResult {
 
        self.prev = Some(UniqueStatementId(stmt.upcast()));
 
        Ok(())
 
    }
 
    fn visit_expression_statement(
 
        &mut self,
 
        _h: &mut Heap,
 
        stmt: ExpressionStatementId,
 
    ) -> VisitorResult {
 
        self.prev = Some(UniqueStatementId(stmt.upcast()));
 
        Ok(())
 
    }
 
    fn visit_expression(&mut self, _h: &mut Heap, _expr: ExpressionId) -> VisitorResult {
 
        Ok(())
 
    }
 
}
 

	
 
pub(crate) struct BuildLabels {
 
    block: Option<BlockStatementId>,
 
    sync_enclosure: Option<SynchronousStatementId>,
 
}
 

	
 
impl BuildLabels {
 
    pub(crate) fn new() -> Self {
 
        BuildLabels { block: None, sync_enclosure: None }
 
    }
 
}
 

	
 
impl Visitor for BuildLabels {
 
    fn visit_block_statement(&mut self, h: &mut Heap, stmt: BlockStatementId) -> VisitorResult {
 
        assert_eq!(self.block, h[stmt].parent_block(h));
 
        let old = self.block;
 
        self.block = Some(stmt);
 
        recursive_block_statement(self, h, stmt)?;
 
        self.block = old;
 
        Ok(())
 
    }
 
    fn visit_labeled_statement(&mut self, h: &mut Heap, stmt: LabeledStatementId) -> VisitorResult {
 
        assert!(!self.block.is_none());
 
        // Store label in current block (on the fly)
 
        h[self.block.unwrap()].labels.push(stmt);
 
        // Update synchronous scope of label
 
        h[stmt].in_sync = self.sync_enclosure;
 
        recursive_labeled_statement(self, h, stmt)
 
    }
 
    fn visit_while_statement(&mut self, h: &mut Heap, stmt: WhileStatementId) -> VisitorResult {
 
        h[stmt].in_sync = self.sync_enclosure;
 
        recursive_while_statement(self, h, stmt)
 
    }
 
    fn visit_synchronous_statement(
 
        &mut self,
 
        h: &mut Heap,
 
        stmt: SynchronousStatementId,
 
    ) -> VisitorResult {
 
        assert!(self.sync_enclosure.is_none());
 
        self.sync_enclosure = Some(stmt);
 
        recursive_synchronous_statement(self, h, stmt)?;
 
        self.sync_enclosure = None;
 
        Ok(())
 
    }
 
    fn visit_expression(&mut self, _h: &mut Heap, _expr: ExpressionId) -> VisitorResult {
 
        Ok(())
 
    }
 
}
 

	
 
pub(crate) struct ResolveLabels {
 
    block: Option<BlockStatementId>,
 
    while_enclosure: Option<WhileStatementId>,
 
    sync_enclosure: Option<SynchronousStatementId>,
 
}
 

	
 
impl ResolveLabels {
 
    pub(crate) fn new() -> Self {
 
        ResolveLabels { block: None, while_enclosure: None, sync_enclosure: None }
 
    }
 
    fn check_duplicate_impl(
 
        h: &Heap,
 
        block: Option<BlockStatementId>,
 
        stmt: LabeledStatementId,
 
    ) -> VisitorResult {
 
        if let Some(block) = block {
 
            // Checking the parent first is important. Otherwise, labels
 
            // overshadow previously defined labels: and this is illegal!
 
            ResolveLabels::check_duplicate_impl(h, h[block].parent_block(h), stmt)?;
 
            // For the current block, check for a duplicate.
 
            for &other_stmt in h[block].labels.iter() {
 
                if other_stmt == stmt {
 
                    continue;
 
                } else {
 
                    if h[other_stmt].label == h[stmt].label {
 
                        return Err((h[stmt].position, "Duplicate label".to_string()));
 
                    }
 
                }
 
            }
 
        }
 
        Ok(())
 
    }
 
    fn check_duplicate(&self, h: &Heap, stmt: LabeledStatementId) -> VisitorResult {
 
        ResolveLabels::check_duplicate_impl(h, self.block, stmt)
 
    }
 
    fn get_target(
 
        &self,
 
        h: &Heap,
 
        id: &Identifier,
 
    ) -> Result<LabeledStatementId, VisitorError> {
 
        if let Some(stmt) = ResolveLabels::find_target(h, self.block, id) {
 
            Ok(stmt)
 
        } else {
 
            Err((id.position, "Unresolved label".to_string()))
 
        }
 
    }
 
    fn find_target(
 
        h: &Heap,
 
        block: Option<BlockStatementId>,
 
        id: &Identifier,
 
    ) -> Option<LabeledStatementId> {
 
        if let Some(block) = block {
 
            // It does not matter in what order we find the labels.
 
            // If there are duplicates: that is checked elsewhere.
 
            for &stmt in h[block].labels.iter() {
 
                if h[stmt].label == *id {
 
                    return Some(stmt);
 
                }
 
            }
 
            if let Some(stmt) = ResolveLabels::find_target(h, h[block].parent_block(h), id) {
 
                return Some(stmt);
 
            }
 
        }
 
        None
 
    }
 
}
 

	
 
impl Visitor for ResolveLabels {
 
    fn visit_block_statement(&mut self, h: &mut Heap, stmt: BlockStatementId) -> VisitorResult {
 
        assert_eq!(self.block, h[stmt].parent_block(h));
 
        let old = self.block;
 
        self.block = Some(stmt);
 
        recursive_block_statement(self, h, stmt)?;
 
        self.block = old;
 
        Ok(())
 
    }
 
    fn visit_labeled_statement(&mut self, h: &mut Heap, stmt: LabeledStatementId) -> VisitorResult {
 
        assert!(!self.block.is_none());
 
        self.check_duplicate(h, stmt)?;
 
        recursive_labeled_statement(self, h, stmt)
 
    }
 
    fn visit_while_statement(&mut self, h: &mut Heap, stmt: WhileStatementId) -> VisitorResult {
 
        let old = self.while_enclosure;
 
        self.while_enclosure = Some(stmt);
 
        recursive_while_statement(self, h, stmt)?;
 
        self.while_enclosure = old;
 
        Ok(())
 
    }
 
    fn visit_break_statement(&mut self, h: &mut Heap, stmt: BreakStatementId) -> VisitorResult {
 
        let the_while;
 
        if let Some(label) = &h[stmt].label {
 
            let target = self.get_target(h, label)?;
 
            let target = &h[h[target].body];
 
            if !target.is_while() {
 
                return Err((
 
                    h[stmt].position,
 
                    "Illegal break: target not a while statement".to_string(),
 
                ));
 
            }
 
            the_while = target.as_while();
 
            // TODO: check if break is nested under while
 
        } else {
 
            if self.while_enclosure.is_none() {
 
                return Err((
 
                    h[stmt].position,
 
                    "Illegal break: no surrounding while statement".to_string(),
 
                ));
 
            }
 
            the_while = &h[self.while_enclosure.unwrap()];
 
            // break is always nested under while, by recursive vistor
 
        }
 
        if the_while.in_sync != self.sync_enclosure {
 
            return Err((
 
                h[stmt].position,
 
                "Illegal break: synchronous statement escape".to_string(),
 
            ));
 
        }
 
        h[stmt].target = the_while.end_while;
 
        Ok(())
 
    }
 
    fn visit_continue_statement(
 
        &mut self,
 
        h: &mut Heap,
 
        stmt: ContinueStatementId,
 
    ) -> VisitorResult {
 
        let the_while;
 
        if let Some(label) = &h[stmt].label {
 
            let target = self.get_target(h, label)?;
 
            let target = &h[h[target].body];
 
            if !target.is_while() {
 
                return Err((
 
                    h[stmt].position,
 
                    "Illegal continue: target not a while statement".to_string(),
 
                ));
 
            }
 
            the_while = target.as_while();
 
            // TODO: check if continue is nested under while
 
        } else {
 
            if self.while_enclosure.is_none() {
 
                return Err((
 
                    h[stmt].position,
 
                    "Illegal continue: no surrounding while statement".to_string(),
 
                ));
 
            }
 
            the_while = &h[self.while_enclosure.unwrap()];
 
            // continue is always nested under while, by recursive vistor
 
        }
 
        if the_while.in_sync != self.sync_enclosure {
 
            return Err((
 
                h[stmt].position,
 
                "Illegal continue: synchronous statement escape".to_string(),
 
            ));
 
        }
 
        h[stmt].target = Some(the_while.this);
 
        Ok(())
 
    }
 
    fn visit_synchronous_statement(
 
        &mut self,
 
        h: &mut Heap,
 
        stmt: SynchronousStatementId,
 
    ) -> VisitorResult {
 
        assert!(self.sync_enclosure.is_none());
 
        self.sync_enclosure = Some(stmt);
 
        recursive_synchronous_statement(self, h, stmt)?;
 
        self.sync_enclosure = None;
 
        Ok(())
 
    }
 
    fn visit_goto_statement(&mut self, h: &mut Heap, stmt: GotoStatementId) -> VisitorResult {
 
        let target = self.get_target(h, &h[stmt].label)?;
 
        if h[target].in_sync != self.sync_enclosure {
 
            return Err((
 
                h[stmt].position,
 
                "Illegal goto: synchronous statement escape".to_string(),
 
            ));
 
        }
 
        h[stmt].target = Some(target);
 
        Ok(())
 
    }
 
    fn visit_expression(&mut self, _h: &mut Heap, _expr: ExpressionId) -> VisitorResult {
 
        Ok(())
 
    }
 
}
 

	
 
pub(crate) struct AssignableExpressions {
 
    assignable: bool,
 
}
 

	
 
impl AssignableExpressions {
 
    pub(crate) fn new() -> Self {
 
        AssignableExpressions { assignable: false }
 
    }
 
    fn error(&self, position: InputPosition) -> VisitorResult {
 
        Err((position, "Unassignable expression".to_string()))
 
    }
 
}
 

	
 
impl Visitor for AssignableExpressions {
 
    fn visit_assignment_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: AssignmentExpressionId,
 
    ) -> VisitorResult {
 
        if self.assignable {
 
            self.error(h[expr].position)
 
            self.error(h[expr].span.begin)
 
        } else {
 
            self.assignable = true;
 
            self.visit_expression(h, h[expr].left)?;
 
            self.assignable = false;
 
            self.visit_expression(h, h[expr].right)
 
        }
 
    }
 
    fn visit_conditional_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: ConditionalExpressionId,
 
    ) -> VisitorResult {
 
        if self.assignable {
 
            self.error(h[expr].position)
 
            self.error(h[expr].span.begin)
 
        } else {
 
            recursive_conditional_expression(self, h, expr)
 
        }
 
    }
 
    fn visit_binary_expression(&mut self, h: &mut Heap, expr: BinaryExpressionId) -> VisitorResult {
 
        if self.assignable {
 
            self.error(h[expr].position)
 
            self.error(h[expr].span.begin)
 
        } else {
 
            recursive_binary_expression(self, h, expr)
 
        }
 
    }
 
    fn visit_unary_expression(&mut self, h: &mut Heap, expr: UnaryExpressionId) -> VisitorResult {
 
        if self.assignable {
 
            self.error(h[expr].position)
 
            self.error(h[expr].span.begin)
 
        } else {
 
            match h[expr].operation {
 
                UnaryOperation::PostDecrement
 
                | UnaryOperation::PreDecrement
 
                | UnaryOperation::PostIncrement
 
                | UnaryOperation::PreIncrement => {
 
                    self.assignable = true;
 
                    recursive_unary_expression(self, h, expr)?;
 
                    self.assignable = false;
 
                    Ok(())
 
                }
 
                _ => recursive_unary_expression(self, h, expr),
 
            }
 
        }
 
    }
 
    fn visit_indexing_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: IndexingExpressionId,
 
    ) -> VisitorResult {
 
        let old = self.assignable;
 
        self.assignable = false;
 
        recursive_indexing_expression(self, h, expr)?;
 
        self.assignable = old;
 
        Ok(())
 
    }
 
    fn visit_slicing_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: SlicingExpressionId,
 
    ) -> VisitorResult {
 
        let old = self.assignable;
 
        self.assignable = false;
 
        recursive_slicing_expression(self, h, expr)?;
 
        self.assignable = old;
 
        Ok(())
 
    }
 
    fn visit_select_expression(&mut self, h: &mut Heap, expr: SelectExpressionId) -> VisitorResult {
 
        if h[expr].field.is_length() && self.assignable {
 
            return self.error(h[expr].position);
 
            return self.error(h[expr].span.begin);
 
        }
 
        let old = self.assignable;
 
        self.assignable = false;
 
        recursive_select_expression(self, h, expr)?;
 
        self.assignable = old;
 
        Ok(())
 
    }
 
    fn visit_array_expression(&mut self, h: &mut Heap, expr: ArrayExpressionId) -> VisitorResult {
 
        if self.assignable {
 
            self.error(h[expr].position)
 
        } else {
 
            recursive_array_expression(self, h, expr)
 
        }
 
    }
 
    fn visit_call_expression(&mut self, h: &mut Heap, expr: CallExpressionId) -> VisitorResult {
 
        if self.assignable {
 
            self.error(h[expr].position)
 
            self.error(h[expr].span.begin)
 
        } else {
 
            recursive_call_expression(self, h, expr)
 
        }
 
    }
 
    fn visit_constant_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: LiteralExpressionId,
 
    ) -> VisitorResult {
 
        if self.assignable {
 
            self.error(h[expr].position)
 
            self.error(h[expr].span.begin)
 
        } else {
 
            Ok(())
 
        }
 
    }
 
    fn visit_variable_expression(
 
        &mut self,
 
        _h: &mut Heap,
 
        _expr: VariableExpressionId,
 
    ) -> VisitorResult {
 
        Ok(())
 
    }
 
}
 

	
 
pub(crate) struct IndexableExpressions {
 
    indexable: bool,
 
}
 

	
 
impl IndexableExpressions {
 
    pub(crate) fn new() -> Self {
 
        IndexableExpressions { indexable: false }
 
    }
 
    fn error(&self, position: InputPosition) -> VisitorResult {
 
        Err((position, "Unindexable expression".to_string()))
 
    }
 
}
 

	
 
impl Visitor for IndexableExpressions {
 
    fn visit_assignment_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: AssignmentExpressionId,
 
    ) -> VisitorResult {
 
        if self.indexable {
 
            self.error(h[expr].position)
 
            self.error(h[expr].span.begin)
 
        } else {
 
            recursive_assignment_expression(self, h, expr)
 
        }
 
    }
 
    fn visit_conditional_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: ConditionalExpressionId,
 
    ) -> VisitorResult {
 
        let old = self.indexable;
 
        self.indexable = false;
 
        self.visit_expression(h, h[expr].test)?;
 
        self.indexable = old;
 
        self.visit_expression(h, h[expr].true_expression)?;
 
        self.visit_expression(h, h[expr].false_expression)
 
    }
 
    fn visit_binary_expression(&mut self, h: &mut Heap, expr: BinaryExpressionId) -> VisitorResult {
 
        if self.indexable && h[expr].operation != BinaryOperator::Concatenate {
 
            self.error(h[expr].position)
 
            self.error(h[expr].span.begin)
 
        } else {
 
            recursive_binary_expression(self, h, expr)
 
        }
 
    }
 
    fn visit_unary_expression(&mut self, h: &mut Heap, expr: UnaryExpressionId) -> VisitorResult {
 
        if self.indexable {
 
            self.error(h[expr].position)
 
            self.error(h[expr].span.begin)
 
        } else {
 
            recursive_unary_expression(self, h, expr)
 
        }
 
    }
 
    fn visit_indexing_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: IndexingExpressionId,
 
    ) -> VisitorResult {
 
        let old = self.indexable;
 
        self.indexable = true;
 
        self.visit_expression(h, h[expr].subject)?;
 
        self.indexable = false;
 
        self.visit_expression(h, h[expr].index)?;
 
        self.indexable = old;
 
        Ok(())
 
    }
 
    fn visit_slicing_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: SlicingExpressionId,
 
    ) -> VisitorResult {
 
        let old = self.indexable;
 
        self.indexable = true;
 
        self.visit_expression(h, h[expr].subject)?;
 
        self.indexable = false;
 
        self.visit_expression(h, h[expr].from_index)?;
 
        self.visit_expression(h, h[expr].to_index)?;
 
        self.indexable = old;
 
        Ok(())
 
    }
 
    fn visit_select_expression(&mut self, h: &mut Heap, expr: SelectExpressionId) -> VisitorResult {
 
        let old = self.indexable;
 
        self.indexable = false;
 
        recursive_select_expression(self, h, expr)?;
 
        self.indexable = old;
 
        Ok(())
 
    }
 
    fn visit_array_expression(&mut self, h: &mut Heap, expr: ArrayExpressionId) -> VisitorResult {
 
        let old = self.indexable;
 
        self.indexable = false;
 
        recursive_array_expression(self, h, expr)?;
 
        self.indexable = old;
 
        Ok(())
 
    }
 
    fn visit_call_expression(&mut self, h: &mut Heap, expr: CallExpressionId) -> VisitorResult {
 
        let old = self.indexable;
 
        self.indexable = false;
 
        recursive_call_expression(self, h, expr)?;
 
        self.indexable = old;
 
        Ok(())
 
    }
 
    fn visit_constant_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: LiteralExpressionId,
 
    ) -> VisitorResult {
 
        if self.indexable {
 
            self.error(h[expr].position)
 
            self.error(h[expr].span.begin)
 
        } else {
 
            Ok(())
 
        }
 
    }
 
}
 

	
 
pub(crate) struct SelectableExpressions {
 
    selectable: bool,
 
}
 

	
 
impl SelectableExpressions {
 
    pub(crate) fn new() -> Self {
 
        SelectableExpressions { selectable: false }
 
    }
 
    fn error(&self, position: InputPosition) -> VisitorResult {
 
        Err((position, "Unselectable expression".to_string()))
 
    }
 
}
 

	
 
impl Visitor for SelectableExpressions {
 
    fn visit_assignment_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: AssignmentExpressionId,
 
    ) -> VisitorResult {
 
        // left-hand side of assignment can be skipped
 
        let old = self.selectable;
 
        self.selectable = false;
 
        self.visit_expression(h, h[expr].right)?;
 
        self.selectable = old;
 
        Ok(())
 
    }
 
    fn visit_conditional_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: ConditionalExpressionId,
 
    ) -> VisitorResult {
 
        let old = self.selectable;
 
        self.selectable = false;
 
        self.visit_expression(h, h[expr].test)?;
 
        self.selectable = old;
 
        self.visit_expression(h, h[expr].true_expression)?;
 
        self.visit_expression(h, h[expr].false_expression)
 
    }
 
    fn visit_binary_expression(&mut self, h: &mut Heap, expr: BinaryExpressionId) -> VisitorResult {
 
        if self.selectable && h[expr].operation != BinaryOperator::Concatenate {
 
            self.error(h[expr].position)
 
            self.error(h[expr].span.begin)
 
        } else {
 
            recursive_binary_expression(self, h, expr)
 
        }
 
    }
 
    fn visit_unary_expression(&mut self, h: &mut Heap, expr: UnaryExpressionId) -> VisitorResult {
 
        if self.selectable {
 
            self.error(h[expr].position)
 
            self.error(h[expr].span.begin)
 
        } else {
 
            recursive_unary_expression(self, h, expr)
 
        }
 
    }
 
    fn visit_indexing_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: IndexingExpressionId,
 
    ) -> VisitorResult {
 
        let old = self.selectable;
 
        self.selectable = false;
 
        recursive_indexing_expression(self, h, expr)?;
 
        self.selectable = old;
 
        Ok(())
 
    }
 
    fn visit_slicing_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: SlicingExpressionId,
 
    ) -> VisitorResult {
 
        let old = self.selectable;
 
        self.selectable = false;
 
        recursive_slicing_expression(self, h, expr)?;
 
        self.selectable = old;
 
        Ok(())
 
    }
 
    fn visit_select_expression(&mut self, h: &mut Heap, expr: SelectExpressionId) -> VisitorResult {
 
        let old = self.selectable;
 
        self.selectable = false;
 
        recursive_select_expression(self, h, expr)?;
 
        self.selectable = old;
 
        Ok(())
 
    }
 
    fn visit_array_expression(&mut self, h: &mut Heap, expr: ArrayExpressionId) -> VisitorResult {
 
        let old = self.selectable;
 
        self.selectable = false;
 
        recursive_array_expression(self, h, expr)?;
 
        self.selectable = old;
 
        Ok(())
 
    }
 
    fn visit_call_expression(&mut self, h: &mut Heap, expr: CallExpressionId) -> VisitorResult {
 
        let old = self.selectable;
 
        self.selectable = false;
 
        recursive_call_expression(self, h, expr)?;
 
        self.selectable = old;
 
        Ok(())
 
    }
 
    fn visit_constant_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: LiteralExpressionId,
 
    ) -> VisitorResult {
 
        if self.selectable {
 
            self.error(h[expr].position)
 
            self.error(h[expr].span.begin)
 
        } else {
 
            Ok(())
 
        }
 
    }
 
}
src/protocol/parser/mod.rs
Show inline comments
 
mod depth_visitor;
 
pub(crate) mod symbol_table;
 
pub(crate) mod symbol_table2;
 
pub(crate) mod type_table;
 
pub(crate) mod tokens;
 
pub(crate) mod token_parsing;
 
pub(crate) mod pass_tokenizer;
 
pub(crate) mod pass_symbols;
 
pub(crate) mod pass_imports;
 
pub(crate) mod pass_definitions;
 
mod type_resolver;
 
pub(crate) mod pass_validation_linking;
 
pub(crate) mod pass_typing;
 
mod visitor;
 
mod pass_validation_linking;
 
mod utils;
 

	
 
use depth_visitor::*;
 
use tokens::*;
 
use crate::collections::*;
 
use symbol_table2::SymbolTable;
 
use symbol_table::SymbolTable;
 
use visitor::Visitor2;
 
use pass_tokenizer::PassTokenizer;
 
use pass_symbols::PassSymbols;
 
use pass_imports::PassImport;
 
use pass_definitions::PassDefinitions;
 
use pass_validation_linking::PassValidationLinking;
 
use type_resolver::{TypeResolvingVisitor, ResolveQueue};
 
use type_table::{TypeTable, TypeCtx};
 
use pass_typing::{PassTyping, ResolveQueue};
 
use type_table::TypeTable;
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::input_source2::{InputSource2 as InputSource};
 
use crate::protocol::lexer::*;
 
use crate::protocol::input_source::*;
 

	
 
use std::collections::HashMap;
 
use crate::protocol::ast_printer::ASTWriter;
 

	
 
#[derive(PartialEq, Eq)]
 
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
 
pub enum ModuleCompilationPhase {
 
    Source,                 // only source is set
 
    Tokenized,              // source is tokenized
 
    SymbolsScanned,         // all definitions are linked to their type class
 
    ImportsResolved,        // all imports are added to the symbol table
 
    DefinitionsParsed,      // produced the AST for the entire module
 
    TypesParsed,            // added all definitions to the type table
 
    TypesAddedToTable,      // added all definitions to the type table
 
    ValidatedAndLinked,     // AST is traversed and has linked the required AST nodes
 
    Typed,                  // Type inference and checking has been performed
 
}
 

	
 
pub struct Module {
 
    // Buffers
 
    source: InputSource,
 
    tokens: TokenBuffer,
 
    pub source: InputSource,
 
    pub tokens: TokenBuffer,
 
    // Identifiers
 
    root_id: RootId,
 
    name: Option<(PragmaId, StringRef<'static>)>,
 
    version: Option<(PragmaId, i64)>,
 
    phase: ModuleCompilationPhase,
 
    pub root_id: RootId,
 
    pub name: Option<(PragmaId, StringRef<'static>)>,
 
    pub version: Option<(PragmaId, i64)>,
 
    pub phase: ModuleCompilationPhase,
 
}
 

	
 
pub struct PassCtx<'a> {
 
    heap: &'a mut Heap,
 
    symbols: &'a mut SymbolTable,
 
    pool: &'a mut StringPool,
 
}
 

	
 
// TODO: @fixme, pub qualifier
 
pub(crate) struct LexedModule {
 
    pub(crate) source: InputSource,
 
    module_name: Vec<u8>,
 
    version: Option<u64>,
 
    pub(crate) root_id: RootId,
 
}
 

	
 
pub struct Parser {
 
    pub(crate) heap: Heap,
 
    pub(crate) modules: Vec<LexedModule>,
 
    pub(crate) module_lookup: HashMap<Vec<u8>, usize>, // from (optional) module name to `modules` idx
 
    pub(crate) string_pool: StringPool,
 
    pub(crate) modules: Vec<Module>,
 
    pub(crate) symbol_table: SymbolTable,
 
    pub(crate) type_table: TypeTable,
 
    // Compiler passes
 
    pass_tokenizer: PassTokenizer,
 
    pass_symbols: PassSymbols,
 
    pass_import: PassImport,
 
    pass_definitions: PassDefinitions,
 
    pass_validation: PassValidationLinking,
 
    pass_typing: PassTyping,
 
}
 

	
 
impl Parser {
 
    pub fn new() -> Self {
 
        Parser{
 
            heap: Heap::new(),
 
            string_pool: StringPool::new(),
 
            modules: Vec::new(),
 
            module_lookup: HashMap::new(),
 
            symbol_table: SymbolTable::new(),
 
            type_table: TypeTable::new(),
 
            pass_tokenizer: PassTokenizer::new(),
 
            pass_symbols: PassSymbols::new(),
 
            pass_import: PassImport::new(),
 
            pass_definitions: PassDefinitions::new(),
 
            pass_validation: PassValidationLinking::new(),
 
            pass_typing: PassTyping::new(),
 
        }
 
    }
 

	
 
    pub fn feed(&mut self, mut source: InputSource) -> Result<RootId, ParseError> {
 
        // Lex the input source
 
        let mut lex = Lexer::new(&mut source);
 
        let pd = lex.consume_protocol_description(&mut self.heap)?;
 

	
 
        // Seek the module name and version
 
        let root = &self.heap[pd];
 
        let mut module_name_pos = InputPosition::default();
 
        let mut module_name = Vec::new();
 
        let mut module_version_pos = InputPosition::default();
 
        let mut module_version = None;
 

	
 
        for pragma in &root.pragmas {
 
            match &self.heap[*pragma] {
 
                Pragma::Module(module) => {
 
                    if !module_name.is_empty() {
 
                        return Err(
 
                            ParseError::new_error(&source, module.position, "Double definition of module name in the same file")
 
                                .with_postfixed_info(&source, module_name_pos, "Previous definition was here")
 
                        )
 
                    }
 
    pub fn feed(&mut self, mut source: InputSource) -> Result<(), ParseError> {
 
        // TODO: @Optimize
 
        let mut token_buffer = TokenBuffer::new();
 
        self.pass_tokenizer.tokenize(&mut source, &mut token_buffer)?;
 

	
 
                    module_name_pos = module.position.clone();
 
                    module_name = module.value.clone();
 
                },
 
                Pragma::Version(version) => {
 
                    if module_version.is_some() {
 
                        return Err(
 
                            ParseError::new_error(&source, version.position, "Double definition of module version")
 
                                .with_postfixed_info(&source, module_version_pos, "Previous definition was here")
 
                        )
 
                    }
 

	
 
                    module_version_pos = version.position.clone();
 
                    module_version = Some(version.version);
 
                },
 
            }
 
        }
 

	
 
        // Add module to list of modules and prevent naming conflicts
 
        let cur_module_idx = self.modules.len();
 
        if let Some(prev_module_idx) = self.module_lookup.get(&module_name) {
 
            // Find `#module` statement in other module again
 
            let prev_module = &self.modules[*prev_module_idx];
 
            let prev_module_pos = self.heap[prev_module.root_id].pragmas
 
                .iter()
 
                .find_map(|p| {
 
                    match &self.heap[*p] {
 
                        Pragma::Module(module) => Some(module.position.clone()),
 
                        _ => None
 
                    }
 
                })
 
                .unwrap_or(InputPosition::default());
 

	
 
            let module_name_msg = if module_name.is_empty() {
 
                format!("a nameless module")
 
            } else {
 
                format!("module '{}'", String::from_utf8_lossy(&module_name))
 
            };
 

	
 
            return Err(
 
                ParseError::new_error(&source, module_name_pos, &format!("Double definition of {} across files", module_name_msg))
 
                    .with_postfixed_info(&prev_module.source, prev_module_pos, "Other definition was here")
 
            );
 
        }
 

	
 
        self.modules.push(LexedModule{
 
        let module = Module{
 
            source,
 
            module_name: module_name.clone(),
 
            version: module_version,
 
            root_id: pd
 
        });
 
        self.module_lookup.insert(module_name, cur_module_idx);
 
        Ok(pd)
 
    }
 

	
 
    fn resolve_symbols_and_types(&mut self) -> Result<(), ParseError> {
 
        // Construct the symbol table to resolve any imports and/or definitions,
 
        // then use the symbol table to actually annotate all of the imports.
 
        // If the type table is constructed correctly then all imports MUST be
 
        // resolvable.
 
        self.symbol_table.build(&self.heap, &self.modules)?;
 
            tokens: token_buffer,
 
            root_id: RootId::new_invalid(),
 
            name: None,
 
            version: None,
 
            phase: ModuleCompilationPhase::Tokenized,
 
        };
 
        self.modules.push(module);
 

	
 
        // Not pretty, but we need to work around rust's borrowing rules, it is
 
        // totally safe to mutate the contents of an AST element that we are
 
        // not borrowing anywhere else.
 
        let mut module_index = 0;
 
        let mut import_index = 0;
 
        loop {
 
            if module_index >= self.modules.len() {
 
                break;
 
        Ok(())
 
    }
 

	
 
            let module_root_id = self.modules[module_index].root_id;
 
            let import_id = {
 
                let root = &self.heap[module_root_id];
 
                if import_index >= root.imports.len() {
 
                    module_index += 1;
 
                    import_index = 0;
 
                    continue
 
                }
 
                root.imports[import_index]
 
    pub fn parse(&mut self) -> Result<(), ParseError> {
 
        let mut pass_ctx = PassCtx{
 
            heap: &mut self.heap,
 
            symbols: &mut self.symbol_table,
 
            pool: &mut self.string_pool,
 
        };
 

	
 
            let import = &mut self.heap[import_id];
 
            match import {
 
                Import::Module(import) => {
 
                    debug_assert!(import.module_id.is_none(), "module import already resolved");
 
                    let target_module_id = self.symbol_table.resolve_module(&import.module)
 
                        .expect("module import is resolved by symbol table");
 
                    import.module_id = Some(target_module_id)
 
                },
 
                Import::Symbols(import) => {
 
                    debug_assert!(import.module_id.is_none(), "module of symbol import already resolved");
 
                    let target_module_id = self.symbol_table.resolve_module(&import.module)
 
                        .expect("symbol import's module is resolved by symbol table");
 
                    import.module_id = Some(target_module_id);
 

	
 
                    for symbol in &mut import.symbols {
 
                        debug_assert!(symbol.definition_id.is_none(), "symbol import already resolved");
 
                        let (_, target_definition_id) = self.symbol_table.resolve_identifier(module_root_id, &symbol.alias)
 
                            .expect("symbol import is resolved by symbol table")
 
                            .as_definition()
 
                            .expect("symbol import does not resolve to namespace symbol");
 
                        symbol.definition_id = Some(target_definition_id);
 
                    }
 
        // Advance all modules to the phase where all symbols are scanned
 
        for module_idx in 0..self.modules.len() {
 
            self.pass_symbols.parse(&mut self.modules, module_idx, &mut pass_ctx)?;
 
        }
 
            }
 

	
 
            import_index += 1;
 
        }
 

	
 
        // All imports in the AST are now annotated. We now use the symbol table
 
        // to construct the type table.
 
        let mut type_ctx = TypeCtx::new(&self.symbol_table, &mut self.heap, &self.modules);
 
        self.type_table.build_base_types(&mut type_ctx)?;
 

	
 
        Ok(())
 
        // With all symbols scanned, perform further compilation until we can
 
        // add all base types to the type table.
 
        for module_idx in 0..self.modules.len() {
 
            self.pass_import.parse(&mut self.modules, module_idx, &mut pass_ctx)?;
 
            self.pass_definitions.parse(&mut self.modules, module_idx, &mut pass_ctx)?;
 
        }
 

	
 
    pub fn parse(&mut self) -> Result<(), ParseError> {
 
        self.resolve_symbols_and_types()?;
 
        // Add every known type to the type table
 
        self.type_table.build_base_types(&mut self.modules, &mut pass_ctx)?;
 

	
 
        // Validate and link all modules
 
        let mut visit = PassValidationLinking::new();
 
        for module in &self.modules {
 
        // Continue compilation with the remaining phases now that the types
 
        // are all in the type table
 
        for module_idx in 0..self.modules.len() {
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module,
 
                module: &self.modules[module_idx],
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            visit.visit_module(&mut ctx)?;
 
            self.pass_validation.visit_module(&mut ctx)?;
 
        }
 

	
 
        // Perform typechecking on all modules
 
        let mut visit = TypeResolvingVisitor::new();
 
        let mut queue = ResolveQueue::new();
 
        for module in &self.modules {
 
            let ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            TypeResolvingVisitor::queue_module_definitions(&ctx, &mut queue);   
 
            PassTyping::queue_module_definitions(&ctx, &mut queue);
 
        };
 
        while !queue.is_empty() {
 
            let top = queue.pop().unwrap();
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module: &self.modules[top.root_id.index as usize],
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            visit.handle_module_definition(&mut ctx, &mut queue, top)?;
 
            self.pass_typing.handle_module_definition(&mut ctx, &mut queue, top)?;
 
        }
 

	
 
        // Perform remaining steps
 
        // TODO: Phase out at some point
 
        for module in &self.modules {
 
            let root_id = module.root_id;
 
            if let Err((position, message)) = Self::parse_inner(&mut self.heap, root_id) {
 
                return Err(ParseError::new_error(&self.modules[0].source, position, &message))
 
                return Err(ParseError::new_error_str_at_pos(&self.modules[0].source, position, &message))
 
            }
 
        }
 

	
 
        // let mut writer = ASTWriter::new();
 
        // let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap();
 
        // writer.write_ast(&mut file, &self.heap);
 

	
 
        Ok(())
 
    }
 

	
 
    pub fn parse_inner(h: &mut Heap, pd: RootId) -> VisitorResult {
 
        // TODO: @cleanup, slowly phasing out old compiler
 
        // NestedSynchronousStatements::new().visit_protocol_description(h, pd)?;
 
        // ChannelStatementOccurrences::new().visit_protocol_description(h, pd)?;
 
        // FunctionStatementReturns::new().visit_protocol_description(h, pd)?;
 
        // ComponentStatementReturnNew::new().visit_protocol_description(h, pd)?;
 
        // CheckBuiltinOccurrences::new().visit_protocol_description(h, pd)?;
 
        // BuildSymbolDeclarations::new().visit_protocol_description(h, pd)?;
 
        // LinkCallExpressions::new().visit_protocol_description(h, pd)?;
 
        // BuildScope::new().visit_protocol_description(h, pd)?;
 
        // ResolveVariables::new().visit_protocol_description(h, pd)?;
 
        LinkStatements::new().visit_protocol_description(h, pd)?;
 
        // BuildLabels::new().visit_protocol_description(h, pd)?;
 
        // ResolveLabels::new().visit_protocol_description(h, pd)?;
 
        AssignableExpressions::new().visit_protocol_description(h, pd)?;
 
        IndexableExpressions::new().visit_protocol_description(h, pd)?;
 
        SelectableExpressions::new().visit_protocol_description(h, pd)?;
 

	
 
        Ok(())
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/pass_definitions.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use super::symbol_table2::*;
 
use super::symbol_table::*;
 
use super::{Module, ModuleCompilationPhase, PassCtx};
 
use super::tokens::*;
 
use super::token_parsing::*;
 
use crate::protocol::input_source2::{InputSource2 as InputSource, InputPosition2 as InputPosition, InputSpan, ParseError};
 
use crate::protocol::input_source::{InputSource as InputSource, InputPosition as InputPosition, InputSpan, ParseError};
 
use crate::collections::*;
 

	
 
/// Parses all the tokenized definitions into actual AST nodes.
 
pub(crate) struct PassDefinitions {
 
    // State
 
    cur_definition: DefinitionId,
 
    // Temporary buffers of various kinds
 
    buffer: String,
 
    identifiers: Vec<Identifier>,
 
    struct_fields: Vec<StructFieldDefinition>,
 
    enum_variants: Vec<EnumVariantDefinition>,
 
    union_variants: Vec<UnionVariantDefinition>,
 
    parameters: ScopedBuffer<ParameterId>,
 
    expressions: ScopedBuffer<ExpressionId>,
 
    statements: ScopedBuffer<StatementId>,
 
    parser_types: Vec<ParserType>,
 
}
 

	
 
impl PassDefinitions {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            cur_definition: DefinitionId::new_invalid(),
 
            buffer: String::with_capacity(128),
 
            struct_fields: Vec::with_capacity(128),
 
            enum_variants: Vec::with_capacity(128),
 
            union_variants: Vec::with_capacity(128),
 
            parameters: ScopedBuffer::new_reserved(128),
 
            expressions: ScopedBuffer::new_reserved(128),
 
            statements: ScopedBuffer::new_reserved(128),
 
            parser_types: Vec::with_capacity(128),
 
        }
 
    }
 

	
 
    pub(crate) fn parse(&mut self, modules: &mut [Module], module_idx: usize, ctx: &mut PassCtx) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let module_range = &module.tokens.ranges[0];
 
        debug_assert_eq!(module.phase, ModuleCompilationPhase::ImportsResolved);
 
        debug_assert_eq!(module_range.range_kind, TokenRangeKind::Module);
 

	
 
        // Although we only need to parse the definitions, we want to go through
 
        // code ranges as well such that we can throw errors if we get
 
        // unexpected tokens at the module level of the source.
 
        let mut range_idx = module_range.first_child_idx;
 
        loop {
 
            let range_idx_usize = range_idx as usize;
 
            let cur_range = &module.tokens.ranges[range_idx_usize];
 

	
 
            match cur_range.range_kind {
 
                TokenRangeKind::Module => unreachable!(), // should not be reachable
 
                TokenRangeKind::Pragma | TokenRangeKind::Import => continue, // already fully parsed
 
                TokenRangeKind::Definition | TokenRangeKind::Code => {}
 
            }
 

	
 
            self.visit_range(modules, module_idx, ctx, range_idx_usize)?;
 

	
 
            match cur_range.next_sibling_idx {
 
                Some(idx) => { range_idx = idx; },
 
                None => { break; },
 
            }
 
        }
 

	
 

	
 
        modules[module_idx].phase = ModuleCompilationPhase::DefinitionsParsed;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_range(
 
        &mut self, modules: &[Module], module_idx: usize, ctx: &mut PassCtx, range_idx: usize
 
    ) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let cur_range = &module.tokens.ranges[range_idx];
 
        debug_assert!(cur_range.range_kind == TokenRangeKind::Definition || cur_range.range_kind == TokenRangeKind::Code);
 

	
 
        // Detect which definition we're parsing
 
        let mut iter = module.tokens.iter_range(cur_range);
 
        loop {
 
            let next = iter.next();
 
            if next.is_none() {
 
                return Ok(())
 
            }
 

	
 
            // Token was not None, so peek_ident returns None if not an ident
 
            let ident = peek_ident(&module.source, &mut iter);
 
            match ident {
 
                Some(KW_STRUCT) => self.visit_struct_definition(module, &mut iter, ctx)?,
 
                Some(KW_ENUM) => self.visit_enum_definition(module, &mut iter, ctx)?,
 
                Some(KW_FUNCTION) => self.visit_function_definition(module, &mut iter, ctx)?,
 
                Some(KW_PRIMITIVE) | Some(KW_COMPOSITE) => self.visit_component_definition(module, &mut iter, ctx)?,
 
                _ => return Err(ParseError::new_error_str_at_pos(
 
                    &module.source, iter.last_valid_pos(),
 
                    "unexpected symbol, expected some kind of type or procedure definition"
 
                )),
 
            }
 
        }
 
    }
 

	
 
    fn visit_struct_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_STRUCT)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        // Parse struct definition
 
        consume_polymorphic_vars_spilled(source, iter)?;
 
        consume_polymorphic_vars_spilled(&module.source, iter)?;
 
        debug_assert!(self.struct_fields.is_empty());
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, source, iter,
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter,
 
            |source, iter| {
 
                let start_pos = iter.last_valid_pos();
 
                let parser_type = consume_parser_type(
 
                    source, iter, &ctx.symbols, &ctx.heap, poly_vars, module_scope,
 
                    definition_id, false, 0
 
                )?;
 
                let field = consume_ident_interned(source, iter, ctx)?;
 
                Ok(StructFieldDefinition{
 
                    span: InputSpan::from_positions(start_pos, field.span.end),
 
                    field, parser_type
 
                })
 
            },
 
            &mut self.struct_fields, "a struct field", "a list of struct fields", None
 
        )?;
 

	
 
        // Transfer to preallocated definition
 
        let struct_def = ctx.heap[definition_id].as_struct_mut();
 
        struct_def.fields.clone_from(&self.struct_fields);
 
        self.struct_fields.clear();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_enum_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_ENUM)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        // Parse enum definition
 
        consume_polymorphic_vars_spilled(source, iter)?;
 
        consume_polymorphic_vars_spilled(&module.source, iter)?;
 
        debug_assert!(self.enum_variants.is_empty());
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, source, iter,
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter,
 
            |source, iter| {
 
                let identifier = consume_ident_interned(source, iter, ctx)?;
 
                let value = if iter.next() == Some(TokenKind::Equal) {
 
                    iter.consume();
 
                    let (variant_number, _) = consume_integer_literal(source, iter, &mut self.buffer)?;
 
                    EnumVariantValue::Integer(variant_number as i64) // TODO: @int
 
                } else {
 
                    EnumVariantValue::None
 
                };
 
                Ok(EnumVariantDefinition{ identifier, value })
 
            },
 
            &mut self.enum_variants, "an enum variant", "a list of enum variants", None
 
        )?;
 

	
 
        // Transfer to definition
 
        let enum_def = ctx.heap[definition_id].as_enum_mut();
 
        enum_def.variants.clone_from(&self.enum_variants);
 
        self.enum_variants.clear();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_union_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_UNION)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        // Parse union definition
 
        consume_polymorphic_vars_spilled(source, iter)?;
 
        consume_polymorphic_vars_spilled(&module.source, iter)?;
 
        debug_assert!(self.union_variants.is_empty());
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, source, iter,
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter,
 
            |source, iter| {
 
                let identifier = consume_ident_interned(source, iter, ctx)?;
 
                let mut close_pos = identifier.span.end;
 
                let has_embedded = maybe_consume_comma_separated(
 
                    TokenKind::OpenParen, TokenKind::CloseParen, source, iter,
 
                    |source, iter| {
 
                        consume_parser_type(
 
                            source, iter, &ctx.symbols, &ctx.heap, poly_vars,
 
                            module_scope, definition_id, false, 0
 
                        )
 
                    },
 
                    &mut self.parser_types, "an embedded type", Some(&mut close_pos)
 
                )?;
 
                let value = if has_embedded {
 
                    UnionVariantValue::Embedded(self.parser_types.clone())
 
                } else {
 
                    UnionVariantValue::None
 
                };
 
                self.parser_types.clear();
 

	
 
                Ok(UnionVariantDefinition{
 
                    span: InputSpan::from_positions(identifier.span.begin, close_pos),
 
                    identifier,
 
                    value
 
                })
 
            },
 
            &mut self.union_variants, "a union variant", "a list of union variants", None
 
        )?;
 

	
 
        // Transfer to AST
 
        let union_def = ctx.heap[definition_id].as_union_mut();
 
        union_def.variants.clone_from(&self.union_variants);
 
        self.union_variants.clear();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_function_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_FUNCTION)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        // Parse function's argument list
 
        let mut parameter_section = self.parameters.start_section();
 
        consume_parameter_list(
 
            source, iter, ctx, &mut parameter_section, poly_vars, module_scope, definition_id
 
            &module.source, iter, ctx, &mut parameter_section, poly_vars, module_scope, definition_id
 
        )?;
 
        let parameters = parameter_section.into_vec();
 

	
 
        // Consume return types
 
        consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
        let mut open_curly_pos = iter.last_valid_pos();
 
        consume_comma_separated_until(
 
            TokenKind::OpenCurly, &module.source, iter,
 
            |source, iter| {
 
                consume_parser_type(source, iter, &ctx.symbols, &ctx.heap, poly_vars, module_scope, definition_id, false, 0)
 
            },
 
            &mut self.parser_types, "a return type", Some(&mut open_curly_pos)
 
        )?;
 
        let return_types = self.parser_types.clone();
 

	
 
        // TODO: @ReturnValues
 
        match return_types.len() {
 
            0 => return Err(ParseError::new_error_str_at_pos(&module.source, open_curly_pos, "expected a return type")),
 
            1 => {},
 
            _ => return Err(ParseError::new_error_str_at_pos(&module.source, open_curly_pos, "multiple return types are not (yet) allowed")),
 
        }
 

	
 
        // Consume block
 
        let body = self.consume_block_statement_without_leading_curly(module, iter, ctx, open_curly_pos)?;
 

	
 
        // Assign everything in the preallocated AST node
 
        let function = ctx.heap[definition_id].as_function_mut();
 
        function.return_types = return_types;
 
        function.parameters = parameters;
 
        function.body = body;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_component_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        let (_variant_text, _) = consume_any_ident(&module.source, iter)?;
 
        debug_assert!(variant_text == KW_PRIMITIVE || variant_text == KW_COMPOSITE);
 
        debug_assert!(_variant_text == KW_PRIMITIVE || _variant_text == KW_COMPOSITE);
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated definition
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        // Parse component's argument list
 
        let mut parameter_section = self.parameters.start_section();
 
        consume_parameter_list(
 
            source, iter, ctx, &mut parameter_section, poly_vars, module_scope, definition_id
 
            &module.source, iter, ctx, &mut parameter_section, poly_vars, module_scope, definition_id
 
        )?;
 
        let parameters = parameter_section.into_vec();
 

	
 
        // Consume block
 
        let body = self.consume_block_statement(module, iter, ctx)?;
 

	
 
        // Assign everything in the AST node
 
        let component = ctx.heap[definition_id].as_component_mut();
 
        component.parameters = parameters;
 
        component.body = body;
 

	
 
        Ok(())
 
    }
 

	
 
    /// Consumes a block statement. If the resulting statement is not a block
 
    /// (e.g. for a shorthand "if (expr) single_statement") then it will be
 
    /// wrapped in one
 
    fn consume_block_or_wrapped_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<BlockStatementId, ParseError> {
 
        if Some(TokenKind::OpenCurly) == iter.next() {
 
            // This is a block statement
 
            self.consume_block_statement(module, iter, ctx)
 
        } else {
 
            // Not a block statement, so wrap it in one
 
            let mut statements = self.statements.start_section();
 
            let wrap_begin_pos = iter.last_valid_pos();
 
            self.consume_statement(module, iter, ctx, &mut statements)?;
 
            let wrap_end_pos = iter.last_valid_pos();
 

	
 
            debug_assert_eq!(statements.len(), 1);
 
            let statements = statements.into_vec();
 

	
 
            ctx.heap.alloc_block_statement(|this| BlockStatement{
 
            Ok(ctx.heap.alloc_block_statement(|this| BlockStatement{
 
                this,
 
                is_implicit: true,
 
                span: InputSpan::from_positions(wrap_begin_pos, wrap_end_pos), // TODO: @Span
 
                statements,
 
                parent_scope: None,
 
                relative_pos_in_parent: 0,
 
                locals: Vec::new(),
 
                labels: Vec::new()
 
            })
 
            }))
 
        }
 
    }
 

	
 
    /// Consumes a statement and returns a boolean indicating whether it was a
 
    /// block or not.
 
    fn consume_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, section: &mut ScopedSection<StatementId>
 
    ) -> Result<(), ParseError> {
 
        let next = iter.next().expect("consume_statement has a next token");
 

	
 
        if next == TokenKind::OpenCurly {
 
            let id = self.consume_block_statement(module, iter, ctx)?;
 
            section.push(id.upcast());
 
        } else if next == TokenKind::Ident {
 
            let (ident, _) = consume_any_ident(source, iter)?;
 
            let (ident, _) = consume_any_ident(&module.source, iter)?;
 
            if ident == KW_STMT_IF {
 
                // Consume if statement and place end-if statement directly
 
                // after it.
 
                let id = self.consume_if_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 

	
 
                let end_if = ctx.heap.alloc_end_if_statement(|this| EndIfStatement{
 
                    this, start_if: id, next: None
 
                });
 
                section.push(id.upcast());
 

	
 
                let if_stmt = &mut ctx.heap[id];
 
                if_stmt.end_if = Some(end_if);
 
            } else if ident == KW_STMT_WHILE {
 
                let id = self.consume_while_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 

	
 
                let end_while = ctx.heap.alloc_end_while_statement(|this| EndWhileStatement{
 
                    this, start_while: id, next: None
 
                });
 
                section.push(id.upcast());
 

	
 
                let while_stmt = &mut ctx.heap[id];
 
                while_stmt.end_while = Some(end_while);
 
            } else if ident == KW_STMT_BREAK {
 
                let id = self.consume_break_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 
            } else if ident == KW_STMT_CONTINUE {
 
                let id = self.consume_continue_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 
            } else if ident == KW_STMT_SYNC {
 
                let id = self.consume_synchronous_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 

	
 
                let end_sync = ctx.heap.alloc_end_synchronous_statement(|this| EndSynchronousStatement{
 
                    this, start_sync: id, next: None
 
                });
 

	
 
                let sync_stmt = &mut ctx.heap[id];
 
                sync_stmt.end_sync = Some(end_sync);
 
            } else if ident == KW_STMT_RETURN {
 
                let id = self.consume_return_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 
            } else if ident == KW_STMT_GOTO {
 
                let id = self.consume_goto_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 
            } else if ident == KW_STMT_NEW {
 
                let id = self.consume_new_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 
            } else if ident == KW_STMT_CHANNEL {
 
                let id = self.consume_channel_statement(module, iter, ctx)?;
 
                section.push(id.upcast().upcast());
 
            } else if iter.peek() == Some(TokenKind::Colon) {
 
                self.consume_labeled_statement(module, iter, ctx, section)?;
 
            } else {
 
                // Two fallback possibilities: the first one is a memory
 
                // declaration, the other one is to parse it as a regular
 
                // expression. This is a bit ugly
 
                if let Some((memory_stmt_id, assignment_stmt_id)) = self.maybe_consume_memory_statement(module, iter, ctx)? {
 
                    section.push(memory_stmt_id.upcast().upcast());
 
                    section.push(assignment_stmt_id.upcast());
 
                } else {
 
                    let id = self.consume_expression_statement(module, iter, ctx)?;
 
                    section.push(id.upcast());
 
                }
 
            }
 
        };
 

	
 
        return Ok(());
 
    }
 

	
 
    fn consume_block_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<BlockStatementId, ParseError> {
 
        let open_span = consume_token(source, iter, TokenKind::OpenCurly)?;
 
        let open_span = consume_token(&module.source, iter, TokenKind::OpenCurly)?;
 
        self.consume_block_statement_without_leading_curly(module, iter, ctx, open_span.begin)
 
    }
 

	
 
    fn consume_block_statement_without_leading_curly(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, open_curly_pos: InputPosition
 
    ) -> Result<BlockStatementId, ParseError> {
 
        let mut statements = Vec::new();
 
        let mut next = iter.next();
 
        while next.is_some() && next != Some(TokenKind::CloseCurly) {
 

	
 
        }
 

	
 
        let mut block_span = consume_token(&module.source, iter, TokenKind::CloseCurly)?;
 
        block_span.begin = open_curly_pos;
 

	
 
        Ok(ctx.heap.alloc_block_statement(|this| BlockStatement{
 
            this,
 
            is_implicit: false,
 
            span: block_span,
 
            statements,
 
            parent_scope: None,
 
            relative_pos_in_parent: 0,
 
            locals: Vec::new(),
 
            labels: Vec::new(),
 
        }))
 
    }
 

	
 
    fn consume_if_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<IfStatementId, ParseError> {
 
        let if_span = consume_exact_ident(&module.source, iter, KW_STMT_IF)?;
 
        consume_token(&module.source, iter, TokenKind::OpenParen)?;
 
        let test = self.consume_expression(module, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::CloseParen)?;
 
        let true_body = self.consume_block_or_wrapped_statement(module, iter, ctx)?;
 

	
 
        let false_body = if has_ident(source, iter, KW_STMT_ELSE) {
 
        let false_body = if has_ident(&module.source, iter, KW_STMT_ELSE) {
 
            iter.consume();
 
            let false_body = self.consume_block_or_wrapped_statement(module, iter, ctx)?;
 
            Some(false_body)
 
        } else {
 
            None
 
        };
 

	
 
        Ok(ctx.heap.alloc_if_statement(|this| IfStatement{
 
            this,
 
            span: if_span,
 
            test,
 
            true_body,
 
            false_body,
 
            end_if: None,
 
        }))
 
    }
 

	
 
    fn consume_while_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<WhileStatementId, ParseError> {
 
        let while_span = consume_exact_ident(&module.source, iter, KW_STMT_WHILE)?;
 
        consume_token(&module.source, iter, TokenKind::OpenParen)?;
 
        let test = self.consume_expression(module, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::CloseParen)?;
 
        let body = self.consume_block_or_wrapped_statement(module, iter, ctx)?;
 

	
 
        Ok(ctx.heap.alloc_while_statement(|this| WhileStatement{
 
            this,
 
            span: while_span,
 
            test,
 
            body,
 
            end_while: None,
 
            in_sync: None,
 
        }))
 
    }
 

	
 
    fn consume_break_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<BreakStatementId, ParseError> {
 
        let break_span = consume_exact_ident(&module.source, iter, KW_STMT_BREAK)?;
 
        let label = if Some(TokenKind::Ident) == iter.next() {
 
            let label = consume_ident_interned(&module.source, iter, ctx)?;
 
            Some(label)
 
        } else {
 
            None
 
        };
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 
        Ok(ctx.heap.alloc_break_statement(|this| BreakStatement{
 
            this,
 
            span: break_span,
 
            label,
 
            target: None,
 
        }))
 
    }
 

	
 
    fn consume_continue_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ContinueStatementId, ParseError> {
 
        let continue_span = consume_exact_ident(&module.source, iter, KW_STMT_CONTINUE)?;
 
        let label=  if Some(TokenKind::Ident) == iter.next() {
 
            let label = consume_ident_interned(&module.source, iter, ctx)?;
 
            Some(label)
 
        } else {
 
            None
 
        };
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 
        Ok(ctx.heap.alloc_continue_statement(|this| ContinueStatement{
 
            this,
 
            span: continue_span,
 
            label,
 
            target: None
 
        }))
 
    }
 

	
 
    fn consume_synchronous_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<SynchronousStatementId, ParseError> {
 
        let synchronous_span = consume_exact_ident(&module.source, iter, KW_STMT_SYNC)?;
 
        let body = self.consume_block_or_wrapped_statement(module, iter, ctx)?;
 

	
 
        Ok(ctx.heap.alloc_synchronous_statement(|this| SynchronousStatement{
 
            this,
 
            span: synchronous_span,
 
            body,
 
            end_sync: None,
 
            parent_scope: None,
 
        }))
 
    }
 

	
 
    fn consume_return_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ReturnStatementId, ParseError> {
 
        let return_span = consume_exact_ident(&module.source, iter, KW_STMT_RETURN)?;
 
        let mut scoped_section = self.expressions.start_section();
 

	
 
        consume_comma_separated_until(
 
            TokenKind::SemiColon, &module.source, iter,
 
            |source, iter| self.consume_expression(module, iter, ctx),
 
            &mut scoped_section, "a return expression", None
 
        )?;
 
        let expressions = scoped_section.into_vec();
 

	
 
        if expressions.is_empty() {
 
            return Err(ParseError::new_error_str_at_span(&module.source, return_span, "expected at least one return value"));
 
        }
 

	
 
        Ok(ctx.heap.alloc_return_statement(|this| ReturnStatement{
 
            this,
 
            span: return_span,
 
            expressions
 
        }))
 
    }
 

	
 
    fn consume_goto_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<GotoStatementId, ParseError> {
 
        let goto_span = consume_exact_ident(&module.source, iter, KW_STMT_GOTO)?;
 
        let label = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 
        Ok(ctx.heap.alloc_goto_statement(|this| GotoStatement{
 
            this,
 
            span: goto_span,
 
            label,
 
            target: None
 
        }))
 
    }
 

	
 
    fn consume_new_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<NewStatementId, ParseError> {
 
        let new_span = consume_exact_ident(&module.source, iter, KW_STMT_NEW)?;
 

	
 
        // TODO: @Cleanup, should just call something like consume_component_expression-ish
 
        let start_pos = iter.last_valid_pos();
 
        let expression_id = self.consume_primary_expression(module, iter, ctx)?;
 
        let expression = &ctx.heap[expression_id];
 
        let mut valid = false;
 

	
 
        let mut call_id = CallExpressionId.new_invalid();
 
        let mut call_id = CallExpressionId::new_invalid();
 
        if let Expression::Call(expression) = expression {
 
            // Allow both components and functions, as it makes more sense to
 
            // check their correct use in the validation and linking pass
 
            if expression.method == Method::UserComponent || expression.method == Method::UserFunction {
 
                call_id = expression.this;
 
                valid = true;
 
            }
 
        }
 

	
 
        if !valid {
 
            return Err(ParseError::new_error_str_at_span(
 
                source, InputSpan::from_positions(start_pos, iter.last_valid_pos()),
 
                "expected a call expression"
 
                &module.source, InputSpan::from_positions(start_pos, iter.last_valid_pos()), "expected a call expression"
 
            ));
 
        }
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
        debug_assert!(!call_id.is_invalid());
 
        Ok(ctx.heap.alloc_new_statement(|this| NewStatement{
 
            this,
 
            span: new_span,
 
            expression: call_id,
 
            next: None
 
        }))
 
    }
 

	
 
    fn consume_channel_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ChannelStatementId, ParseError> {
 
        // Consume channel specification
 
        let channel_span = consume_exact_ident(&module.source, iter, KW_STMT_CHANNEL)?;
 
        let channel_type = if Some(TokenKind::OpenAngle) = iter.next() {
 
        let channel_type = if Some(TokenKind::OpenAngle) == iter.next() {
 
            // Retrieve the type of the channel, we're cheating a bit here by
 
            // consuming the first '<' and setting the initial angle depth to 1
 
            // such that our final '>' will be consumed as well.
 
            iter.consume();
 
            let definition_id = self.cur_definition;
 
            let poly_vars = ctx.heap[definition_id].poly_vars();
 
            consume_parser_type(
 
                &module.source, iter, &ctx.symbols, &ctx.heap,
 
                poly_vars, SymbolScope::Module(module.root_id), definition_id,
 
                &poly_vars, SymbolScope::Module(module.root_id), definition_id,
 
                true, 1
 
            )?
 
        } else {
 
            // Assume inferred
 
            ParserType{ elements: vec![ParserTypeElement{
 
                full_span: channel_span, // TODO: @Span fix
 
                variant: ParserTypeVariant::Inferred
 
            }]}
 
        };
 

	
 
        let from_identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
        let to_identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
        // Construct ports
 
        let from = ctx.heap.alloc_local(|this| Local{
 
            this,
 
            identifier: from_identifier,
 
            parser_type: channel_type.clone(),
 
            relative_pos_in_block: 0,
 
        });
 
        let to = ctx.heap.alloc_local(|this| Local{
 
            this,
 
            identifier: to_identifier,
 
            parser_type: channel_type,
 
            relative_pos_in_block: 0,
 
        });
 

	
 
        // Construct the channel
 
        Ok(ctx.heap.alloc_channel_statement(|this| ChannelStatement{
 
            this,
 
            span: channel_span,
 
            from, to,
 
            relative_pos_in_block: 0,
 
            next: None,
 
        }))
 
    }
 

	
 
    fn consume_labeled_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, section: &mut ScopedSection<StatementId>
 
    ) -> Result<(), ParseError> {
 
        let label = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::Colon)?;
 

	
 
        // Not pretty: consume_statement may produce more than one statement.
 
        // The values in the section need to be in the correct order if some
 
        // kind of outer block is consumed, so we take another section, push
 
        // the expressions in that one, and then allocate the labeled statement.
 
        let mut inner_section = self.statements.start_section();
 
        self.consume_statement(module, iter, ctx, &mut inner_section)?;
 
        debug_assert!(inner_section.len() >= 1);
 

	
 
        let stmt_id = ctx.heap.alloc_labeled_statement(|this| LabeledStatement {
 
            this,
 
            label,
 
            body: *inner_section[0],
 
            body: inner_section[0],
 
            relative_pos_in_block: 0,
 
            in_sync: None,
 
        });
 

	
 
        if inner_section.len() == 1 {
 
            // Produce the labeled statement pointing to the first statement.
 
            // This is by far the most common case.
 
            inner_section.forget();
 
            section.push(stmt_id.upcast());
 
        } else {
 
            // Produce the labeled statement using the first statement, and push
 
            // the remaining ones at the end.
 
            let inner_statements = inner_section.into_vec();
 
            section.push(stmt_id.upcast());
 
            for idx in 1..inner_statements.len() {
 
                section.push(inner_statements[idx])
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn maybe_consume_memory_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<Option<(MemoryStatementId, ExpressionStatementId)>, ParseError> {
 
        // This is a bit ugly. It would be nicer if we could somehow
 
        // consume the expression with a type hint if we do get a valid
 
        // type, but we don't get an identifier following it
 
        let iter_state = iter.save();
 
        let definition_id = self.cur_definition;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        let parser_type = consume_parser_type(
 
            &module.source, iter, &ctx.symbols, &ctx.heap, poly_vars,
 
            SymbolScope::Definition(definition_id), definition_id, true, 0
 
        );
 

	
 
        if let Ok(parser_type) = parser_type {
 
            if Some(TokenKind::Ident) == iter.next() {
 
                // Assume this is a proper memory statement
 
                let identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
                let memory_span = InputSpan::from_positions(parser_type.elements[0].full_span.begin, identifier.span.end);
 
                let assign_span = consume_token(&module.source, iter, TokenKind::Equal)?;
 

	
 
                let initial_expr_begin_pos = iter.last_valid_pos();
 
                let initial_expr_id = self.consume_expression(module, iter, ctx)?;
 
                let initial_expr_end_pos = iter.last_valid_pos();
 
                consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
                // Allocate the memory statement with the variable
 
                let local_id = ctx.heap.alloc_local(|this| Local{
 
                    this,
 
                    identifier: identifier.clone(),
 
                    parser_type,
 
                    relative_pos_in_block: 0,
 
                });
 
                let memory_stmt_id = ctx.heap.alloc_memory_statement(|this| MemoryStatement{
 
                    this,
 
                    span: memory_span,
 
                    variable: local_id,
 
                    next: None
 
                });
 

	
 
                // Allocate the initial assignment
 
                let variable_expr_id = ctx.heap.alloc_variable_expression(|this| VariableExpression{
 
                    this,
 
                    identifier,
 
                    declaration: None,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: Default::default()
 
                });
 
                let assignment_expr_id = ctx.heap.alloc_assignment_expression(|this| AssignmentExpression{
 
                    this,
 
                    span: assign_span,
 
                    left: variable_expr_id.upcast(),
 
                    operation: AssignmentOperator::Set,
 
                    right: initial_expr_id,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: Default::default(),
 
                });
 
                let assignment_stmt_id = ctx.heap.alloc_expression_statement(|this| ExpressionStatement{
 
                    this,
 
                    span: InputSpan::from_positions(initial_expr_begin_pos, initial_expr_end_pos),
 
                    expression: assignment_expr_id.upcast(),
 
                    next: None,
 
                });
 

	
 
                return Ok(Some((memory_stmt_id, assignment_stmt_id)))
 
            }
 
        }
 

	
 
        // If here then one of the preconditions for a memory statement was not
 
        // met. So recover the iterator and return
 
        iter.load(iter_state);
 
        Ok(None)
 
    }
 

	
 
    fn consume_expression_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionStatementId, ParseError> {
 
        let start_pos = iter.last_valid_pos();
 
        let expression = self.consume_expression(module, iter, ctx)?;
 
        let end_pos = iter.last_valid_pos();
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
        Ok(ctx.heap.alloc_expression_statement(|this| ExpressionStatement{
 
            this,
 
            span: InputSpan::from_positions(start_pos, end_pos),
 
            expression,
 
            next: None,
 
        }))
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Expression Parsing
 
    //--------------------------------------------------------------------------
 

	
 
    fn consume_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_assignment_expression(module, iter, ctx)
 
    }
 

	
 
    fn consume_assignment_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        // Utility to convert token into assignment operator
 
        fn parse_assignment_operator(token: Option<TokenKind>) -> Option<AssignmentOperator> {
 
            use TokenKind as TK;
 
            use AssignmentOperator as AO;
 

	
 
            if token.is_none() {
 
                return None
 
            }
 

	
 
            match token.unwrap() {
 
                TK::Equal               => Some(AO::Set),
 
                TK::StarEquals          => Some(AO::Multiplied),
 
                TK::SlashEquals         => Some(AO::Divided),
 
                TK::PercentEquals       => Some(AO::Remained),
 
                TK::PlusEquals          => Some(AO::Added),
 
                TK::MinusEquals         => Some(AO::Subtracted),
 
                TK::ShiftLeftEquals     => Some(AO::ShiftedLeft),
 
                TK::ShiftRightEquals    => Some(AO::ShiftedRight),
 
                TK::AndEquals           => Some(AO::BitwiseAnded),
 
                TK::CaretEquals         => Some(AO::BitwiseXored),
 
                TK::OrEquals            => Some(AO::BitwiseOred),
 
                _                       => None
 
            }
 
        }
 

	
 
        let expr = self.consume_conditional_expression(module, iter, ctx)?;
 
        if let Some(operation) = parse_assignment_operator(iter.next()) {
 
            let span = iter.next_span();
 
            iter.consume();
 

	
 
            let left = expr;
 
            let right = self.consume_expression(module, iter, ctx)?;
 

	
 
            Ok(ctx.heap.alloc_assignment_expression(|this| AssignmentExpression{
 
                this, span, left, operation, right,
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast())
 
        } else {
 
            Ok(expr)
 
        }
 
    }
 

	
 
    fn consume_conditional_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        let result = self.consume_concat_expression(module, iter, ctx)?;
 
        if let Some(TokenKind::Question) = iter.next() {
 
            let span = iter.next_span();
 
            iter.consume();
 

	
 
            let test = result;
 
            let true_expression = self.consume_expression(module, iter, ctx)?;
 
            consume_token(source, iter, TokenKind::Colon)?;
 
            consume_token(&module.source, iter, TokenKind::Colon)?;
 
            let false_expression = self.consume_expression(module, iter, ctx)?;
 
            Ok(ctx.heap.alloc_conditional_expression(|this| ConditionalExpression{
 
                this, span, test, true_expression, false_expression,
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast())
 
        } else {
 
            Ok(result)
 
        }
 
    }
 

	
 
    fn consume_concat_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::At) => Some(BinaryOperator::Concatenate),
 
                _ => None
 
            },
 
            Self::consume_logical_or_expression
 
        )
 
    }
 

	
 
    fn consume_logical_or_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::OrOr) => Some(BinaryOperator::LogicalOr),
 
                _ => None
 
            },
 
            Self::consume_logical_and_expression
 
        )
 
    }
 

	
 
    fn consume_logical_and_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::AndAnd) => Some(BinaryOperator::LogicalAnd),
 
                _ => None
 
            },
 
            Self::consume_bitwise_or_expression
 
        )
 
    }
 

	
 
    fn consume_bitwise_or_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::Or) => Some(BinaryOperator::BitwiseOr),
 
                _ => None
 
            },
 
            Self::consume_bitwise_xor_expression
 
        )
 
    }
 

	
 
    fn consume_bitwise_xor_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::Caret) => Some(BinaryOperator::BitwiseXor),
 
                _ => None
 
            },
 
            Self::consume_bitwise_and_expression
 
        )
 
    }
 

	
 
    fn consume_bitwise_and_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::And) => Some(BinaryOperator::BitwiseAnd),
 
                _ => None
 
            },
 
            Self::consume_equality_expression
 
        )
 
    }
 

	
 
    fn consume_equality_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::EqualEqual) => Some(BinaryOperator::Equality),
 
                Some(TokenKind::NotEqual) => Some(BinaryOperator::Inequality),
 
                _ => None
 
            },
 
            Self::consume_relational_expression
 
        )
 
    }
 

	
 
    fn consume_relational_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::OpenAngle) => Some(BinaryOperator::LessThan),
 
                Some(TokenKind::CloseAngle) => Some(BinaryOperator::GreaterThan),
 
                Some(TokenKind::LessEquals) => Some(BinaryOperator::LessThanEqual),
 
                Some(TokenKind::GreaterEquals) => Some(BinaryOperator::GreaterThanEqual),
 
                _ => None
 
            },
 
            Self::consume_shift_expression
 
        )
 
    }
 

	
 
    fn consume_shift_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::ShiftLeft) => Some(BinaryOperator::ShiftLeft),
 
                Some(TokenKind::ShiftRight) => Some(BinaryOperator::ShiftRight),
 
                _ => None
 
            },
 
            Self::consume_add_or_subtract_expression
 
        )
 
    }
 

	
 
    fn consume_add_or_subtract_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::Plus) => Some(BinaryOperator::Add),
 
                Some(TokenKind::Minus) => Some(BinaryOperator::Subtract),
 
                _ => None,
 
            },
 
            Self::consume_multiply_divide_or_modulus_expression
 
        )
 
    }
 

	
 
    fn consume_multiply_divide_or_modulus_expression(
 
        &mut self, module: &Module, iter: &mut Tokeniter, ctx: &mut PassCtx
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::Star) => Some(BinaryOperator::Multiply),
 
                Some(TokenKind::Slash) => Some(BinaryOperator::Divide),
 
                Some(TokenKind::Percent) => Some(BinaryOperator::Remainder),
 
                _ => None
 
            },
 
            Self::consume_prefix_expression
 
        )
 
    }
 

	
 
    fn consume_prefix_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        fn parse_prefix_token(token: Option<TokenKind>) -> Some(UnaryOperation) {
 
        fn parse_prefix_token(token: Option<TokenKind>) -> Option<UnaryOperation> {
 
            use TokenKind as TK;
 
            use UnaryOperation as UO;
 
            match token {
 
                Some(TK::Plus) => Some(UO::Positive),
 
                Some(TK::Minus) => Some(UO::Negative),
 
                Some(TK::PlusPlus) => Some(UO::PreIncrement),
 
                Some(TK::MinusMinus) => Some(UO::PreDecrement),
 
                Some(TK::Tilde) => Some(UO::BitwiseNot),
 
                Some(TK::Exclamation) => Some(UO::LogicalNot),
 
                _ => None
 
            }
 
        }
 

	
 
        if let Some(operation) = parse_prefix_token(iter.next()) {
 
            let span = iter.next_span();
 
            iter.consume();
 

	
 
            let expression = self.consume_prefix_expression(module, iter, ctx)?;
 
            Ok(ctx.heap.alloc_unary_expression(|this| UnaryExpression {
 
                this, span, operation, expression,
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default()
 
            }).upcast())
 
        } else {
 
            self.consume_postfix_expression(module, iter, ctx)
 
        }
 
    }
 

	
 
    fn consume_postfix_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        fn has_matching_postfix_token(token: Option<TokenKind>) -> bool {
 
            use TokenKind as TK;
 

	
 
            if token.is_none() { return false; }
 
            match token.unwrap() {
 
                TK::PlusPlus | TK::MinusMinus | TK::OpenSquare | TK::Dot => true,
 
                _ => false
 
            }
 
        }
 

	
 
        let mut result = self.consume_primary_expression(module, iter, ctx)?;
 
        let mut next = iter.next();
 
        while has_matching_postfix_token(next) {
 
            let token = next.unwrap();
 
            let mut span = iter.next_span();
 
            iter.consume();
 

	
 
            if token == TokenKind::PlusPlus {
 
                result = ctx.heap.alloc_unary_expression(|this| UnaryExpression{
 
                    this, span,
 
                    operation: UnaryOperation::PostIncrement,
 
                    expression: result,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default()
 
                }).upcast();
 
            } else if token == TokenKind::MinusMinus {
 
                result = ctx.heap.alloc_unary_expression(|this| UnaryExpression{
 
                    this, span,
 
                    operation: UnaryOperation::PostDecrement,
 
                    expression: result,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default()
 
                }).upcast();
 
            } else if token == TokenKind::OpenSquare {
 
                let subject = result;
 
                let from_index = self.consume_expression(module, iter, ctx)?;
 

	
 
                // Check if we have an indexing or slicing operation
 
                next = iter.next();
 
                if Some(TokenKind::DotDot) = next {
 
                if Some(TokenKind::DotDot) == next {
 
                    iter.consume();
 

	
 
                    let to_index = self.consume_expression(module, iter, ctx)?;
 
                    let end_span = consume_token(&module.source, iter, TokenKind::CloseSquare)?;
 
                    span.end = end_span.end;
 

	
 
                    result = ctx.heap.alloc_slicing_expression(|this| SlicingExpression{
 
                        this, span, subject, from_index, to_index,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default()
 
                    }).upcast();
 
                } else if Some(TokenKind::CloseSquare) {
 
                } else if Some(TokenKind::CloseSquare) == next {
 
                    let end_span = consume_token(&module.source, iter, TokenKind::CloseSquare)?;
 
                    span.end = end_span.end;
 

	
 
                    result = ctx.heap.alloc_indexing_expression(|this| IndexingExpression{
 
                        this, span, subject,
 
                        index: from_index,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default()
 
                    }).upcast();
 
                } else {
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        &module.source, iter.last_valid_pos(), "unexpected token: expected ']' or '..'"
 
                    ));
 
                }
 
            } else {
 
                debug_assert_eq!(token, TokenKind::Dot);
 
                let subject = result;
 
                let (field_text, field_span) = consume_ident(&module.source, iter)?;
 
                let field = if field_text == b"length" {
 
                    Field::Length
 
                } else {
 
                    let value = ctx.pool.intern(field_text);
 
                    let identifier = Identifier{ value, span: field_span };
 
                    Field::Symbolic(FieldSymbolic{ identifier, definition: None, field_idx: 0 });
 
                    Field::Symbolic(FieldSymbolic{ identifier, definition: None, field_idx: 0 })
 
                };
 

	
 
                result = ctx.heap.alloc_select_expression(|this| SelectExpression{
 
                    this, span, subject, field,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default()
 
                }).upcast();
 
            }
 

	
 
            next = iter.next();
 
        }
 

	
 
        Ok(result)
 
    }
 

	
 
    fn consume_primary_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        let next = iter.next();
 

	
 
        let result = if next == Some(TokenKind::OpenParen) {
 
            // Expression between parentheses
 
            iter.consume();
 
            let result = self.consume_expression(module, iter, ctx)?;
 
            consume_token(&module.source, iter, TokenKind::CloseParen)?;
 

	
 
            result
 
        } else if next == Some(TokenKind::OpenCurly) {
 
            // Array literal
 
            let (start_pos, mut end_pos) = iter.next_positions();
 
            let mut scoped_section = self.expressions.start_section();
 
            consume_comma_separated(
 
                TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter,
 
                |source, iter| self.consume_expression(module, iter, ctx),
 
                &mut scoped_section, "an expression", "a list of expressions", Some(&mut end_pos)
 
            )?;
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this,
 
                span: InputSpan::from_positions(start_pos, end_pos),
 
                value: Literal::Array(scoped_section.into_vec()),
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast()
 
        } else if next == Some(TokenKind::Integer) {
 
            let (literal, span) = consume_integer_literal(&module.source, iter, &mut self.buffer)?;
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this, span,
 
                value: Literal::Integer(LiteralInteger{ unsigned_value: literal, negated: false }),
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast()
 
        } else if next == Some(TokenKind::String) {
 
            let span = consume_string_literal(&module.source, iter, &mut self.buffer)?;
 
            let interned = ctx.pool.intern(self.buffer.as_bytes());
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this, span,
 
                value: Literal::String(interned),
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast()
 
        } else if next == Some(TokenKind::Character) {
 
            let (character, span) = consume_character_literal(&module.source, iter)?;
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this, span,
 
                value: Literal::Character(character),
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast()
 
        } else if next == Some(TokenKind::Ident) {
 
            // May be a variable, a type instantiation or a function call. If we
 
            // have a single identifier that we cannot find in the type table
 
            // then we're going to assume that we're dealing with a variable.
 
            let ident_span = iter.next_span();
 
            let ident_text = module.source.section_at_span(ident_span);
 
            let symbol = ctx.symbols.get_symbol_by_name(SymbolScope::Module(module.root_id), ident_text);
 

	
 
            if symbol.is_some() {
 
                // The first bit looked like a symbol, so we're going to follow
 
                // that all the way through, assume we arrive at some kind of
 
                // function call or type instantiation
 
                use ParserTypeVariant as PTV;
 

	
 
                let symbol_scope = SymbolScope::Definition(self.cur_definition);
 
                let poly_vars = ctx.heap[self.cur_definition].poly_vars();
 
                let parser_type = consume_parser_type(
 
                    &module.source, iter, &ctx.symbols, &ctx.heap, poly_vars, symbol_scope,
 
                    self.cur_definition, true, 0
 
                )?;
 
                debug_assert!(!parser_type.elements.is_empty());
 
                match parser_type.elements[0].variant {
 
                    PTV::Definition(target_definition_id, _) => {
 
                        let definition = &ctx.heap[target_definition_id];
 
                        match definition {
 
                            Definition::Struct(_) => {
 
                                // Struct literal
 
                                let mut last_token = iter.last_valid_pos();
 
                                let mut struct_fields = Vec::new();
 
                                consume_comma_separated(
 
                                    TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter,
 
                                    |source, iter| {
 
                                        let identifier = consume_ident_interned(source, iter, ctx)?;
 
                                        consume_token(source, iter, TokenKind::Colon)?;
 
                                        let value = self.consume_expression(module, iter, ctx)?;
 
                                        Ok(LiteralStructField{ identifier, value, field_idx: 0 })
 
                                    },
 
                                    &mut struct_fields, "a struct field", "a list of struct field", Some(&mut last_token)
 
                                )?;
 

	
 
                                ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                                    this,
 
                                    span: InputSpan::from_positions(ident_span.begin, last_token),
 
                                    value: Literal::Struct(LiteralStruct{
 
                                        parser_type,
 
                                        fields: struct_fields,
 
                                        definition: target_definition_id,
 
                                    }),
 
                                    parent: ExpressionParent::None,
 
                                    concrete_type: ConcreteType::default(),
 
                                }).upcast()
 
                            },
 
                            Definition::Enum(_) => {
 
                                // Enum literal: consume the variant
 
                                consume_token(&module.source, iter, TokenKind::ColonColon)?;
 
                                let variant = consume_ident_interned(&module.source, iter, ctx)?;
 

	
 
                                ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                                    this,
 
                                    span: InputSpan::from_positions(ident_span.begin, variant.span.end),
 
                                    value: Literal::Enum(LiteralEnum{
 
                                        parser_type,
 
                                        variant,
 
                                        definition: target_definition_id,
 
                                        variant_idx: 0
 
                                    }),
 
                                    parent: ExpressionParent::None,
 
                                    concrete_type: ConcreteType::default()
 
                                }).upcast()
 
                            },
 
                            Definition::Union(_) => {
 
                                // Union literal: consume the variant
 
                                consume_token(&module.source, iter, TokenKind::ColonColon)?;
 
                                let variant = consume_ident_interned(&module.source, iter, ctx)?;
 

	
 
                                // Consume any possible embedded values
 
                                let mut end_pos = iter.last_valid_pos();
 
                                let values = self.consume_expression_list(module, iter, ctx, Some(&mut end_pos))?;
 

	
 
                                ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                                    this,
 
                                    span: InputSpan::from_positions(ident_span.begin, end_pos),
 
                                    value: Literal::Union(LiteralUnion{
 
                                        parser_type, variant, values,
 
                                        definition: target_definition_id,
 
                                        variant_idx: 0,
 
                                    }),
 
                                    parent: ExpressionParent::None,
 
                                    concrete_type: ConcreteType::default()
 
                                }).upcast()
 
                            },
 
                            Definition::Component(_) => {
 
                                // Component instantiation
 
                                let arguments = self.consume_expression_list(module, iter, ctx, None)?;
 

	
 
                                ctx.heap.alloc_call_expression(|this| CallExpression{
 
                                    this,
 
                                    span: parser_type.elements[0].full_span, // TODO: @Span fix
 
                                    parser_type,
 
                                    method: Method::UserComponent,
 
                                    arguments,
 
                                    definition: target_definition_id,
 
                                    parent: ExpressionParent::None,
 
                                    concrete_type: ConcreteType::default(),
 
                                }).upcast()
 
                            },
 
                            Definition::Function(function_definition) => {
 
                                // Function call: consume the arguments
 
                                let arguments = self.consume_expression_list(module, iter, ctx, None)?;
 

	
 
                                // Check whether it is a builtin function
 
                                let method = if function_definition.builtin {
 
                                    match function_definition.identifier.value.as_str() {
 
                                        "get" => Method::Get,
 
                                        "put" => Method::Put,
 
                                        "fires" => Method::Fires,
 
                                        "create" => Method::Create,
 
                                        "length" => Method::Length,
 
                                        "assert" => Method::Assert,
 
                                        _ => unreachable!(),
 
                                    }
 
                                } else {
 
                                    Method::UserFunction
 
                                };
 

	
 
                                ctx.heap.alloc_call_expression(|this| CallExpression{
 
                                    this,
 
                                    span: parser_type.elements[0].full_span, // TODO: @Span fix
 
                                    parser_type,
 
                                    method,
 
                                    arguments,
 
                                    definition: target_definition_id,
 
                                    parent: ExpressionParent::None,
 
                                    concrete_type: ConcreteType::default(),
 
                                }).upcast()
 
                            }
 
                        }
 
                    },
 
                    _ => {
 
                        // TODO: Casting expressions
 
                        return Err(ParseError::new_error_str_at_span(
 
                            &module.source, parser_type.elements[0].full_span,
 
                            "unexpected type in expression, note that casting expressions are not yet implemented"
 
                        ))
 
                    }
 
                }
 
            } else {
 
                // Check for builtin keywords or builtin functions
 
                if ident_text == KW_LIT_NULL || ident_text == KW_LIT_TRUE || ident_text == KW_LIT_FALSE {
 
                    // Parse builtin literal
 
                    let value = match ident_text {
 
                        KW_LIT_NULL => Literal::Null,
 
                        KW_LIT_TRUE => Literal::True,
 
                        KW_LIT_FALSE => Literal::False,
 
                        _ => unreachable!(),
 
                    };
 

	
 
                    ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                        this,
 
                        span: ident_span,
 
                        value,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    }).upcast()
 
                } else {
 
                    // I'm a bit unsure about this. One may as well have wrongfully
 
                    // typed `TypeWithTypo<Subtype>::`, then we assume that
 
                    // `TypeWithTypo` is a variable. So might want to come back to
 
                    // this later to do some silly heuristics.
 
                    iter.consume();
 
                    if Some(TokenKind::ColonColon) == iter.next() {
 
                        return Err(ParseError::new_error_str_at_span(&module.source, ident_span, "unknown identifier"));
 
                    }
 

	
 
                    let ident_text = ctx.pool.intern(ident_text);
 
                    let identifier = Identifier { span: ident_span, value: ident_text };
 

	
 
                    ctx.heap.alloc_variable_expression(|this| VariableExpression {
 
                        this,
 
                        identifier,
 
                        declaration: NJone,
 
                        declaration: None,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default()
 
                    }).upcast()
 
                }
 
            }
 
        } else {
 
            return Err(ParseError::new_error_str_at_pos(
 
                &module.source, iter.last_valid_pos(), "expected an expression"
 
            ));
 
        };
 

	
 
        Ok(result)
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Expression Utilities
 
    //--------------------------------------------------------------------------
 

	
 
    #[inline]
 
    fn consume_generic_binary_expression<
 
        M: Fn(Option<TokenKind>) -> Option<BinaryOperator>,
 
        F: Fn(&mut PassDefinitions, &Module, &mut TokenIter, &mut PassCtx) -> Result<ExpressionId, ParseError>
 
    >(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, match_fn: M, higher_precedence_fn: F
 
    ) -> Result<ExpressionId, ParseError> {
 
        let mut result = higher_precedence_fn(self, module, iter, ctx)?;
 
        while let Some(operation) = match_fn(iter.next()) {
 
            let span = iter.next_span();
 
            iter.consume();
 

	
 
            let left = result;
 
            let right = higher_precedence_fn(self, module, iter, ctx)?;
 

	
 
            result = ctx.heap.alloc_binary_expression(|this| BinaryExpression{
 
                this, span, left, operation, right,
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default()
 
            }).upcast();
 
        }
 

	
 
        Ok(result)
 
    }
 

	
 
    #[inline]
 
    fn consume_expression_list(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, end_pos: Option<&mut InputPosition>
 
    ) -> Result<Vec<ExpressionId>, ParseError> {
 
        let mut section = self.expressions.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenParen, TokenKind::CloseParen, &module.source, iter,
 
            |source, iter| self.consume_expression(module, iter, ctx),
 
            &mut section, "an expression", "a list of expressions", end_pos
 
        )?;
 
        Ok(section.into_vec())
 
    }
 
}
 

	
 
/// Consumes a type. A type always starts with an identifier which may indicate
 
/// a builtin type or a user-defined type. The fact that it may contain
 
/// polymorphic arguments makes it a tree-like structure. Because we cannot rely
 
/// on knowing the exact number of polymorphic arguments we do not check for
 
/// these.
 
///
 
/// Note that the first depth index is used as a hack.
 
// TODO: @Optimize, @Span fix
 
fn consume_parser_type(
 
    source: &InputSource, iter: &mut TokenIter, symbols: &SymbolTable, heap: &Heap, poly_vars: &[Identifier],
 
    cur_scope: SymbolScope, wrapping_definition: DefinitionId, allow_inference: bool, first_angle_depth: i32,
 
) -> Result<ParserType, ParseError> {
 
    struct Entry{
 
        element: ParserTypeElement,
 
        depth: i32,
 
    }
 

	
 
    fn insert_array_before(elements: &mut Vec<Entry>, depth: i32, span: InputSpan) {
 
        let index = elements.iter().rposition(|e| e.depth == depth).unwrap();
 
        elements.insert(index, Entry{
 
            element: ParserTypeElement{ full_span: span, variant: ParserTypeVariant::Array },
 
            depth,
 
        });
 
    }
 

	
 
    // Most common case we just have one type, perhaps with some array
 
    // annotations.
 
    let element = consume_parser_type_ident(source, iter, symbols, heap, poly_vars, cur_scope, wrapping_definition, allow_inference)?;
 
    if iter.next() != Some(TokenKind::OpenAngle) {
 
        let mut num_array = 0;
 
        while iter.next() == Some(TokenKind::OpenSquare) {
 
            iter.consume();
 
            consume_token(source, iter, TokenKind::CloseSquare)?;
 
            num_array += 1;
 
        }
 

	
 
        let array_span = element.full_span;
 
        let mut elements = Vec::with_capacity(num_array + 1);
 
        for _ in 0..num_array {
 
            elements.push(ParserTypeElement{ full_span: array_span, variant: ParserTypeVariant::Array });
 
        }
 
        elements.push(element);
 

	
 
        return Ok(ParserType{ elements });
 
    };
 

	
 
    // We have a polymorphic specification. So we start by pushing the item onto
 
    // our stack, then start adding entries together with the angle-brace depth
 
    // at which they're found.
 
    let mut elements = Vec::new();
 
    elements.push(Entry{ element, depth: 0 });
 

	
 
    // Start out with the first '<' consumed.
 
    iter.consume();
 
    enum State { Ident, Open, Close, Comma };
 
    enum State { Ident, Open, Close, Comma }
 
    let mut state = State::Open;
 
    let mut angle_depth = first_angle_depth + 1;
 

	
 
    loop {
 
        let next = iter.next();
 

	
 
        match state {
 
            State::Ident => {
 
                // Just parsed an identifier, may expect comma, angled braces,
 
                // or the tokens indicating an array
 
                if Some(TokenKind::OpenAngle) == next {
 
                    angle_depth += 1;
 
                    state = State::Open;
 
                } else if Some(TokenKind::CloseAngle) == next {
 
                    angle_depth -= 1;
 
                    state = State::Close;
 
                } else if Some(TokenKind::ShiftRight) == next {
 
                    angle_depth -= 2;
 
                    state = State::Close;
 
                } else if Some(TokenKind::Comma) == next {
 
                    state = State::Comma;
 
                } else if Some(TokenKind::OpenSquare) == next {
 
                    let (start_pos, _) = iter.next_positions();
 
                    iter.consume(); // consume opening square
 
                    if iter.next() != Some(TokenKind::CloseSquare) {
 
                        return Err(ParseError::new_error_str_at_pos(
 
                            source, iter.last_valid_pos(),
 
                            "unexpected token: expected ']'"
 
                        ));
 
                    }
 
                    let (_, end_pos) = iter.next_positions();
 
                    let array_span = InputSpan::from_positions(start_pos, end_pos);
 
                    insert_array_before(&mut elements, angle_depth, array_span);
 
                } else {
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        source, iter.last_valid_pos(),
 
                        "unexpected token: expected '<', '>', ',' or '['")
 
                    );
 
                }
 

	
 
                iter.consume();
 
            },
 
            State::Open => {
 
                // Just parsed an opening angle bracket, expecting an identifier
 
                let element = consume_parser_type_ident(source, iter, symbols, heap, poly_vars, cur_scope, wrapping_definition, allow_inference)?;
 
                elements.push(Entry{ element, depth: angle_depth });
 
                state = State::Ident;
 
            },
 
            State::Close => {
 
                // Just parsed 1 or 2 closing angle brackets, expecting comma,
 
                // more closing brackets or the tokens indicating an array
 
                if Some(TokenKind::Comma) == next {
 
                    state = State::Comma;
 
                } else if Some(TokenKind::CloseAngle) == next {
 
                    angle_depth -= 1;
 
                    state = State::Close;
 
                } else if Some(TokenKind::ShiftRight) == next {
 
                    angle_depth -= 2;
 
                    state = State::Close;
 
                } else if Some(TokenKind::OpenSquare) == next {
 
                    let (start_pos, _) = iter.next_positions();
 
                    iter.consume();
 
                    if iter.next() != Some(TokenKind::CloseSquare) {
 
                        return Err(ParseError::new_error_str_at_pos(
 
                            source, iter.last_valid_pos(),
 
                            "unexpected token: expected ']'"
 
                        ));
 
                    }
 
                    let (_, end_pos) = iter.next_positions();
 
                    let array_span = InputSpan::from_positions(start_pos, end_pos);
 
                    insert_array_before(&mut elements, angle_depth, array_span);
 
                } else {
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        source, iter.last_valid_pos(),
 
                        "unexpected token: expected ',', '>', or '['")
 
                    );
 
                }
 

	
 
                iter.consume();
 
            },
 
            State::Comma => {
 
                // Just parsed a comma, expecting an identifier or more closing
 
                // braces
 
                if Some(TokenKind::Ident) == next {
 
                    let element = consume_parser_type_ident(source, iter, symbols, heap, poly_vars, cur_scope, wrapping_definition, allow_inference)?;
 
                    elements.push(Entry{ element, depth: angle_depth });
 
                    state = State::Ident;
 
                } else if Some(TokenKind::CloseAngle) == next {
 
                    iter.consume();
 
                    angle_depth -= 1;
 
                    state = State::Close;
 
                } else if Some(TokenKind::ShiftRight) == next {
 
                    iter.consume();
 
                    angle_depth -= 2;
 
                    state = State::Close;
 
                } else {
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        source, iter.last_valid_pos(),
 
                        "unexpected token: expected '>' or a type name"
 
                    ));
 
                }
 
            }
 
        }
 

	
 
        if angle_depth < 0 {
 
            return Err(ParseError::new_error_str_at_pos(source, iter.last_valid_pos(), "unmatched '>'"));
 
        } else if angle_depth == 0 {
 
            break;
 
        }
 
    }
 

	
 
    // If here then we found the correct number of angle braces. But we still
 
    // need to make sure that each encountered type has the correct number of
 
    // embedded types.
 
    let mut idx = 0;
 
    while idx < elements.len() {
 
        let cur_element = &elements[idx];
 
        let expected_subtypes = cur_element.element.variant.num_embedded();
 
        let mut encountered_subtypes = 0;
 
        for peek_idx in idx + 1..elements.len() {
 
            let peek_element = &elements[peek_idx];
 
            if peek_element.depth == cur_element.depth + 1 {
 
                encountered_subtypes += 1;
 
            } else if peek_element.depth <= cur_element.depth {
 
                break;
 
            }
 
        }
 

	
 
        if expected_subtypes != encountered_subtypes {
 
            if encountered_subtypes == 0 {
 
                // Case where we have elided the embedded types, all of them
 
                // should be inferred.
 
                if !allow_inference {
 
                    return Err(ParseError::new_error_str_at_span(
 
                        source, cur_element.element.full_span,
 
                        "type inference is not allowed here"
 
                    ));
 
                }
 

	
 
                // Insert the missing types
 
                let inserted_span = cur_element.element.full_span;
 
                let inserted_depth = cur_element.depth + 1;
 
                elements.reserve(expected_subtypes);
 
                for _ in 0..expected_subtypes {
 
                    elements.insert(idx + 1, Entry{
 
                        element: ParserTypeElement{ full_span: inserted_span, variant: ParserTypeVariant::Inferred },
 
                        depth: inserted_depth,
 
                    });
 
                }
 
            } else {
 
                // Mismatch in number of embedded types
 
                let expected_args_text = if expected_subtypes == 1 {
 
                    "polymorphic argument"
 
                } else {
 
                    "polymorphic arguments"
 
                };
 

	
 
                let maybe_infer_text = if allow_inference {
 
                    " (or none, to perform implicit type inference)"
 
                } else {
 
                    ""
 
                };
 

	
 
                return Err(ParseError::new_error_at_span(
 
                    source, cur_element.element.full_span,
 
                    format!(
 
                        "expected {} {}{}, but {} were provided",
 
                        expected_subtypes, expected_args_text, maybe_infer_text, encountered_subtypes
 
                    )
 
                ));
 
            }
 
        }
 

	
 
        idx += 1;
 
    }
 

	
 
    let mut constructed_elements = Vec::with_capacity(elements.len());
 
    for element in elements.into_iter() {
 
        constructed_elements.push(element.element);
 
    }
 

	
 
    Ok(ParserType{ elements: constructed_elements })
 
}
 

	
 
/// Consumes an identifier for which we assume that it resolves to some kind of
 
/// type. Once we actually arrive at a type we will stop parsing. Hence there
 
/// may be trailing '::' tokens in the iterator.
 
fn consume_parser_type_ident(
 
    source: &InputSource, iter: &mut TokenIter, symbols: &SymbolTable, heap: &Heap, poly_vars: &[Identifier],
 
    mut scope: SymbolScope, wrapping_definition: DefinitionId, allow_inference: bool,
 
) -> Result<ParserTypeElement, ParseError> {
 
    use ParserTypeVariant as PTV;
 
    let (mut type_text, mut type_span) = consume_any_ident(source, iter)?;
 

	
 
    let variant = match type_text {
 
        KW_TYPE_MESSAGE => PTV::Message,
 
        KW_TYPE_BOOL => PTV::Bool,
 
        KW_TYPE_UINT8 => PTV::UInt8,
 
        KW_TYPE_UINT16 => PTV::UInt16,
 
        KW_TYPE_UINT32 => PTV::UInt32,
 
        KW_TYPE_UINT64 => PTV::UInt64,
 
        KW_TYPE_SINT8 => PTV::SInt8,
 
        KW_TYPE_SINT16 => PTV::SInt16,
 
        KW_TYPE_SINT32 => PTV::SInt32,
 
        KW_TYPE_SINT64 => PTV::SInt64,
 
        KW_TYPE_IN_PORT => PTV::Input,
 
        KW_TYPE_OUT_PORT => PTV::Output,
 
        KW_TYPE_CHAR => PTV::Character,
 
        KW_TYPE_STRING => PTV::String,
 
        KW_TYPE_INFERRED => {
 
            if !allow_inference {
 
                return Err(ParseError::new_error_str_at_span(source, type_span, "type inference is not allowed here"));
 
            }
 

	
 
            PTV::Inferred
 
        },
 
        _ => {
 
            // Must be some kind of symbolic type
 
            let mut type_kind = None;
 
            for (poly_idx, poly_var) in poly_vars.iter().enumerate() {
 
                if poly_var.value.as_bytes() == type_text {
 
                    type_kind = Some(PTV::PolymorphicArgument(wrapping_definition, poly_idx));
 
                }
 
            }
 

	
 
            if type_kind.is_none() {
 
                // Check symbol table for definition. To be fair, the language
 
                // only allows a single namespace for now. That said:
 
                let last_symbol = symbols.get_symbol_by_name(scope, type_text);
 
                if last_symbol.is_none() {
 
                    return Err(ParseError::new_error_str_at_span(source, type_span, "unknown type"));
 
                }
 
                let mut last_symbol = last_symbol.unwrap();
 

	
 
                loop {
 
                    match &last_symbol.variant {
 
                        SymbolVariant::Module(symbol_module) => {
 
                            // Expecting more identifiers
 
                            if Some(TokenKind::ColonColon) != iter.next() {
 
                                return Err(ParseError::new_error_str_at_span(source, type_span, "expected type but got module"));
 
                            }
 

	
 
                            consume_token(source, iter, TokenKind::ColonColon)?;
 

	
 
                            // Consume next part of type and prepare for next
 
                            // lookup loop
 
                            let (next_text, next_span) = consume_any_ident(source, iter)?;
 
                            let old_text = type_text;
 
                            type_text = next_text;
 
                            type_span.end = next_span.end;
 
                            scope = SymbolScope::Module(symbol_module.root_id);
 

	
 
                            let new_symbol = symbols.get_symbol_by_name_defined_in_scope(scope, type_text);
 
                            if new_symbol.is_none() {
 
                                return Err(ParseError::new_error_at_span(
 
                                    source, next_span,
 
                                    format!(
 
                                        "unknown type '{}' in module '{}'",
 
                                        String::from_utf8_lossy(type_text),
 
                                        String::from_utf8_lossy(old_text)
 
                                    )
 
                                ));
 
                            }
 

	
 
                            last_symbol = new_symbol.unwrap();
 
                        },
 
                        SymbolVariant::Definition(symbol_definition) => {
 
                            let num_poly_vars = heap[symbol_definition.definition_id].poly_vars().len();
 
                            type_kind = Some(PTV::Definition(symbol_definition.definition_id, num_poly_vars));
 
                            break;
 
                        }
 
                    }
 
                }
 
            }
 

	
 
            debug_assert!(type_kind.is_some());
 
            type_kind.unwrap()
 
        },
 
    };
 

	
 
    Ok(ParserTypeElement{ full_span: type_span, variant })
 
}
 

	
 
/// Consumes polymorphic variables and throws them on the floor.
 
fn consume_polymorphic_vars_spilled(source: &InputSource, iter: &mut TokenIter) -> Result<(), ParseError> {
 
    maybe_consume_comma_separated_spilled(
 
        TokenKind::OpenAngle, TokenKind::CloseAngle, source, iter,
 
        |source, iter| {
 
            consume_ident(source, iter)?;
 
            Ok(())
 
        }, "a polymorphic variable"
 
    )?;
 
    Ok(())
 
}
 

	
 
/// Consumes the parameter list to functions/components
 
fn consume_parameter_list(
 
    source: &InputSource, iter: &mut TokenIter, ctx: &mut PassCtx, target: &mut ScopedSection<ParameterId>,
 
    poly_vars: &[Identifier], scope: SymbolScope, definition_id: DefinitionId
 
) -> Result<(), ParseError> {
 
    consume_comma_separated(
 
        TokenKind::OpenParen, TokenKind::CloseParen, source, iter,
 
        |source, iter| {
 
            let (start_pos, _) = iter.next_positions();
 
            let parser_type = consume_parser_type(
 
                source, iter, &ctx.symbols, &ctx.heap, poly_vars, scope,
 
                definition_id, false, 0
 
            )?;
 
            let identifier = consume_ident_interned(source, iter, ctx)?;
 
            let parameter_id = ctx.heap.alloc_parameter(|this| Parameter{
 
                this,
 
                span: InputSpan::from_positions(start_pos, identifier.span.end),
 
                parser_type,
 
                identifier
 
            });
 
            Ok(parameter_id)
 
        },
 
        target, "a parameter", "a parameter list", None
 
    )
 
}
 
\ No newline at end of file
src/protocol/parser/pass_imports.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use super::symbol_table2::*;
 
use super::symbol_table::*;
 
use super::{Module, ModuleCompilationPhase, PassCtx};
 
use super::tokens::*;
 
use super::token_parsing::*;
 
use crate::protocol::input_source2::{InputSource2 as InputSource, InputSpan, ParseError};
 
use crate::protocol::input_source::{InputSource as InputSource, InputSpan, ParseError};
 
use crate::collections::*;
 

	
 
/// Parses all the imports in the module tokens. Is applied after the
 
/// definitions and name of modules are resolved. Hence we should be able to
 
/// resolve all symbols to their appropriate module/definition.
 
pub(crate) struct PassImport {
 
    imports: Vec<ImportId>,
 
    found_symbols: Vec<(AliasedSymbol, SymbolDefinition)>,
 
    scoped_symbols: Vec<Symbol>,
 
}
 

	
 
impl PassImport {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            imports: Vec::with_capacity(32),
 
            found_symbols: Vec::with_capacity(32),
 
            scoped_symbols: Vec::with_capacity(32),
 
        }
 
    }
 
    pub(crate) fn parse(&mut self, modules: &mut [Module], module_idx: usize, ctx: &mut PassCtx) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let module_range = &module.tokens.ranges[0];
 
        debug_assert!(modules.iter().all(|m| m.phase >= ModuleCompilationPhase::SymbolsScanned));
 
        debug_assert_eq!(module.phase, ModuleCompilationPhase::SymbolsScanned);
 
        debug_assert_eq!(module_range.range_kind, TokenRangeKind::Module);
 

	
 
        let mut range_idx = module_range.first_child_idx;
 
        loop {
 
            let range_idx_usize = range_idx as usize;
 
            let cur_range = &module.tokens.ranges[range_idx_usize];
 

	
 
            if cur_range.range_kind == TokenRangeKind::Import {
 
                self.visit_import_range(modules, module_idx, ctx, range_idx_usize)?;
 
            }
 

	
 
            match cur_range.next_sibling_idx {
 
                Some(idx) => { range_idx = idx; },
 
                None => { break; }
 
            }
 
        }
 

	
 
        let root = &mut ctx.heap[module.root_id];
 
        root.imports.extend(self.imports.drain(..));
 

	
 
        let module = &mut modules[module_idx];
 
        module.phase = ModuleCompilationPhase::ImportsResolved;
 

	
 
        Ok(())
 
    }
 

	
 
    pub(crate) fn visit_import_range(
 
        &mut self, modules: &mut [Module], module_idx: usize, ctx: &mut PassCtx, range_idx: usize
 
    ) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let import_range = &module.tokens.ranges[range_idx];
 
        debug_assert_eq!(import_range.range_kind, TokenRangeKind::Import);
 

	
 
        let mut iter = module.tokens.iter_range(import_range);
 

	
 
        // Consume "import"
 
        let (_import_ident, import_span) =
 
            consume_any_ident(&module.source, &mut iter)?;
 
        debug_assert_eq!(_import_ident, KW_IMPORT);
 

	
 
        // Consume module name
 
        let (module_name, module_name_span) = consume_domain_ident(&module.source, &mut iter)?;
 
        let target_root_id = ctx.symbols.get_module_by_name(module_name);
 
        if target_root_id.is_none() {
 
            return Err(ParseError::new_error_at_span(
 
                &module.source, module_name_span,
 
                format!("could not resolve module '{}'", String::from_utf8_lossy(module_name))
 
            ));
 
        }
 
        let module_name = ctx.pool.intern(module_name);
 
        let module_identifier = Identifier{ span: module_name_span, value: module_name };
 
        let target_root_id = target_root_id.unwrap();
 

	
 
        // Check for subsequent characters (alias, multiple imported symbols)
 
        let next = iter.next();
 
        let import_id;
 

	
 
        if has_ident(&module.source, &mut iter, b"as") {
 
            // Alias for module
 
            iter.consume();
 
            let alias_identifier = consume_ident_interned(&module.source, &mut iter, ctx)?;
 
            let alias_name = alias_identifier.value.clone();
 

	
 
            import_id = ctx.heap.alloc_import(|this| Import::Module(ImportModule{
 
                this,
 
                span: import_span,
 
                module: module_identifier,
 
                alias: alias_identifier,
 
                module_id: target_root_id
 
            }));
 
            ctx.symbols.insert_symbol(SymbolScope::Module(module.root_id), Symbol{
 
                name: alias_name,
 
                variant: SymbolVariant::Module(SymbolModule{
 
                    root_id: target_root_id,
 
                    introduced_at: import_id,
 
                }),
 
            });
 
        } else if Some(TokenKind::ColonColon) == next {
 
            iter.consume();
 

	
 
            // Helper function to consume symbols, their alias, and the
 
            // definition the symbol is pointing to.
 
            fn consume_symbol_and_maybe_alias<'a>(
 
                source: &'a InputSource, iter: &mut TokenIter, ctx: &mut PassCtx,
 
                module_name: &StringRef<'static>, module_root_id: RootId,
 
            ) -> Result<(AliasedSymbol, SymbolDefinition), ParseError> {
 
                // Consume symbol name and make sure it points to an existing definition
 
                let symbol_identifier = consume_ident_interned(source, iter, ctx)?;
 
                let target = ctx.symbols.get_symbol_by_name_defined_in_scope(
 
                    SymbolScope::Module(module_root_id), symbol
 
                    SymbolScope::Module(module_root_id), symbol_identifier.value.as_bytes()
 
                );
 

	
 
                if target.is_none() {
 
                    return Err(ParseError::new_error_at_span(
 
                        source, symbol_span,
 
                        source, symbol_identifier.span,
 
                        format!(
 
                            "could not find symbol '{}' within module '{}'",
 
                            symbol_identifier.value.as_str(), module_name.as_str()
 
                        )
 
                    ));
 
                }
 
                let target = target.unwrap();
 
                debug_assert_ne!(target.class(), SymbolClass::Module);
 
                let target_definition = target.variant.as_definition();
 

	
 
                // Consume alias text if specified
 
                let alias_identifier = if peek_ident(source, iter) == b"as" {
 
                    // Consume alias
 
                    iter.consume();
 
                    Some(consume_ident_interned(source, iter, ctx)?)
 
                } else {
 
                    None
 
                };
 

	
 
                Ok((
 
                    AliasedSymbol{
 
                        name: symbol_identifier,
 
                        alias: alias_identifier,
 
                        definition_id: target_definition.definition_id,
 
                    },
 
                    target_definition.clone()
 
                ))
 
            }
 

	
 
            let next = iter.next();
 

	
 
            if Some(TokenKind::Ident) = next {
 
            if Some(TokenKind::Ident) == next {
 
                // Importing a single symbol
 
                iter.consume();
 
                let (imported_symbol, symbol_definition) = consume_symbol_and_maybe_alias(
 
                    &module.source, &mut iter, ctx, &module_identifier.value, target_root_id
 
                )?;
 
                let alias_identifier = imported_symbol.alias.unwrap_or_else(|| { imported_symbol.name.clone() });
 
                import_id = ctx.heap.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    span: InputSpan::from_positions(import_span.begin, alias_identifier.span.end),
 
                    module: module_identifier,
 
                    module_id: target_root_id,
 
                    symbols: vec![imported_symbol],
 
                }));
 
                if let Err((new_symbol, old_symbol)) = ctx.symbols.insert_symbol(
 
                    SymbolScope::Module(module.root_id),
 
                    Symbol{
 
                        name: alias_identifier.value,
 
                        variant: SymbolVariant::Definition(symbol_definition.into_imported(import_id))
 
                    }
 
                ) {
 
                    return Err(construct_symbol_conflict_error(
 
                        modules, module_idx, ctx, &new_symbol, old_symbol
 
                    ));
 
                }
 
            } else if Some(TokenKind::OpenCurly) = next {
 
            } else if Some(TokenKind::OpenCurly) == next {
 
                // Importing multiple symbols
 
                let mut end_of_list = iter.last_valid_pos();
 
                consume_comma_separated(
 
                    TokenKind::OpenCurly, TokenKind::CloseCurly, source, &mut iter,
 
                    TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, &mut iter,
 
                    |source, iter| consume_symbol_and_maybe_alias(
 
                        source, iter, ctx, &module_identifier.value, target_root_id
 
                    ),
 
                    &mut self.found_symbols, "a symbol", "a list of symbols to import", Some(&mut end_of_list)
 
                )?;
 

	
 
                // Preallocate import
 
                import_id = ctx.heap.alloc_import(|this| Import::Symbols(ImportSymbols {
 
                    this,
 
                    span: InputSpan::from_positions(import_span.begin, end_of_list),
 
                    module: module_identifier,
 
                    module_id: target_root_id,
 
                    symbols: Vec::with_capacity(self.found_symbols.len()),
 
                }));
 

	
 
                // Fill import symbols while inserting symbols in the
 
                // appropriate scope in the symbol table.
 
                let import = ctx.heap[import_id].as_symbols_mut();
 

	
 
                for (imported_symbol, symbol_definition) in self.found_symbols.drain(..) {
 
                    let import_name = imported_symbol.alias.map_or_else(
 
                        || imported_symbol.name.value.clone(),
 
                        |v| v.value.clone()
 
                    );
 
                    import.symbols.push(imported_symbol);
 
                    if let Err((new_symbol, old_symbol)) = ctx.symbols.insert_symbol(
 
                        SymbolScope::Module(module.root_id), Symbol{
 
                            name: import_name,
 
                            variant: SymbolVariant::Definition(symbol_definition.into_imported(import_id))
 
                        }
 
                    ) {
 
                        return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, old_symbol));
 
                    }
 
                }
 
            } else if Some(TokenKind::Star) = next {
 
            } else if Some(TokenKind::Star) == next {
 
                // Import all symbols from the module
 
                let star_span = iter.next_span();
 

	
 
                iter.consume();
 
                self.scoped_symbols.clear();
 
                let _found = ctx.symbols.get_all_symbols_defined_in_scope(
 
                    SymbolScope::Module(target_root_id),
 
                    &mut self.scoped_symbols
 
                );
 
                debug_assert!(_found); // even modules without symbols should have a scope
 

	
 
                // Preallocate import
 
                import_id = ctx.heap.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    span: InputSpan::from_positions(import_span.begin, star_span.end),
 
                    module: module_identifier,
 
                    module_id: target_root_id,
 
                    symbols: Vec::with_capacity(self.scoped_symbols.len())
 
                }));
 

	
 
                // Fill import AST node and symbol table
 
                let import = ctx.heap[import_id].as_symbols_mut();
 

	
 
                for symbol in self.scoped_symbols.drain(..) {
 
                    let symbol_name = symbol.name;
 
                    match symbol.variant {
 
                        SymbolVariant::Definition(symbol_definition) => {
 
                            import.symbols.push(AliasedSymbol{
 
                                name: Identifier{ span: star_span, value: symbol.name.clone() },
 
                                alias: None,
 
                                definition_id: symbol_definition.definition_id,
 
                            });
 

	
 
                            if let Err((new_symbol, old_symbol)) = ctx.symbols.insert_symbol(
 
                                SymbolScope::Module(module.root_id),
 
                                Symbol{
 
                                    name: symbol_name,
 
                                    variant: SymbolVariant::Definition(symbol_definition.into_imported(import_id))
 
                                }
 
                            ) {
 
                                return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, old_symbol));
 
                            }
 
                        },
 
                        _ => unreachable!(),
 
                    }
 
                }
 
            } else {
 
                return Err(ParseError::new_error_str_at_pos(
 
                    &module.source, iter.last_valid_pos(), "expected symbol name, '{' or '*'"
 
                ));
 
            }
 
        } else {
 
            // Assume implicit alias
 
            let module_name_str = module_identifier.value.clone();
 
            let last_ident_start = module_name_str.rfind('.').map_or(0, |v| v + 1);
 
            let last_ident_start = module_name_str.as_str().rfind('.').map_or(0, |v| v + 1);
 
            let alias_text = &module_name_str.as_bytes()[last_ident_start..];
 
            let alias = ctx.pool.intern(alias_text);
 
            let alias_span = InputSpan::from_positions(
 
                module_name_span.begin.with_offset(last_ident_start as u32),
 
                module_name_span.end
 
            );
 
            let alias_identifier = Identifier{ span: alias_span, value: alias.clone() };
 

	
 
            import_id = ctx.heap.alloc_import(|this| Import::Module(ImportModule{
 
                this,
 
                span: InputSpan::from_positions(import_span.begin, module_identifier.span.end),
 
                module: module_identifier,
 
                alias: alias_identifier,
 
                module_id: target_root_id,
 
            }));
 
            ctx.symbols.insert_symbol(SymbolScope::Module(module.root_id), Symbol{
 
                name: alias,
 
                variant: SymbolVariant::Module(SymbolModule{
 
                    root_id: target_root_id,
 
                    introduced_at: import_id
 
                })
 
            });
 
        }
 

	
 
        // By now the `import_id` is set, just need to make sure that the import
 
        // properly ends with a semicolon
 
        consume_token(&module.source, &mut iter, TokenKind::SemiColon)?;
 
        self.imports.push(import_id);
 

	
 
        Ok(())
 
    }
 
}
src/protocol/parser/pass_symbols.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use super::symbol_table2::*;
 
use crate::protocol::input_source2::{ParseError, InputSpan};
 
use super::symbol_table::*;
 
use crate::protocol::input_source::{ParseError, InputSpan};
 
use super::tokens::*;
 
use super::token_parsing::*;
 
use super::{Module, ModuleCompilationPhase, PassCtx};
 

	
 
/// Scans the module and finds all module-level type definitions. These will be
 
/// added to the symbol table such that during AST-construction we know which
 
/// identifiers point to types. Will also parse all pragmas to determine module
 
/// names.
 
pub(crate) struct PassSymbols {
 
    symbols: Vec<Symbol>,
 
    pragmas: Vec<PragmaId>,
 
    imports: Vec<ImportId>,
 
    definitions: Vec<DefinitionId>,
 
    buffer: String,
 
    has_pragma_version: bool,
 
    has_pragma_module: bool,
 
}
 

	
 
impl PassSymbols {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            symbols: Vec::with_capacity(128),
 
            pragmas: Vec::with_capacity(8),
 
            imports: Vec::with_capacity(32),
 
            definitions: Vec::with_capacity(128),
 
            buffer: String::with_capacity(128),
 
            has_pragma_version: false,
 
            has_pragma_module: false,
 
        }
 
    }
 

	
 
    fn reset(&mut self) {
 
        self.symbols.clear();
 
        self.pragmas.clear();
 
        self.imports.clear();
 
        self.definitions.clear();
 
        self.has_pragma_version = false;
 
        self.has_pragma_module = false;
 
    }
 

	
 
    pub(crate) fn parse(&mut self, modules: &mut [Module], module_idx: usize, ctx: &mut PassCtx) -> Result<(), ParseError> {
 
        self.reset();
 

	
 
        let module = &mut modules[module_idx];
 
        let module_range = &module.tokens.ranges[0];
 

	
 
        debug_assert_eq!(module.phase, ModuleCompilationPhase::Tokenized);
 
        debug_assert_eq!(module_range.range_kind, TokenRangeKind::Module);
 
        debug_assert!(module.root_id.is_invalid()); // not set yet,
 

	
 
        // Preallocate root in the heap
 
        let root_id = ctx.heap.alloc_protocol_description(|this| {
 
            Root{
 
                this,
 
                pragmas: Vec::new(),
 
                imports: Vec::new(),
 
                definitions: Vec::new(),
 
            }
 
        });
 
        module.root_id = root_id;
 

	
 
        // Visit token ranges to detect definitions and pragmas
 
        let mut range_idx = module_range.first_child_idx;
 
        loop {
 
            let range_idx_usize = range_idx as usize;
 
            let cur_range = &module.tokens.ranges[range_idx_usize];
 

	
 
            // Parse if it is a definition or a pragma
 
            if cur_range.range_kind == TokenRangeKind::Definition {
 
                self.visit_definition_range(modules, module_idx, ctx, range_idx_usize)?;
 
            } else if cur_range.range_kind == TokenRangeKind::Pragma {
 
                self.visit_pragma_range(modules, module_idx, ctx, range_idx_usize)?;
 
            }
 

	
 
            match cur_range.next_sibling_idx {
 
                Some(idx) => { range_idx = idx; },
 
                None => { break; },
 
            }
 
        }
 

	
 
        // Add the module's symbol scope and the symbols we just parsed
 
        let module_scope = SymbolScope::Module(root_id);
 
        ctx.symbols.insert_scope(None, module_scope);
 
        for symbol in self.symbols.drain(..) {
 
            if let Err((new_symbol, old_symbol)) = ctx.symbols.insert_symbol(module_scope, symbol) {
 
                return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, old_symbol))
 
            }
 
        }
 

	
 
        // Modify the preallocated root
 
        let root = &mut ctx.heap[root_id];
 
        root.pragmas.extend(self.pragmas.drain(..));
 
        root.definitions.extend(self.definitions.drain(..));
 
        module.phase = ModuleCompilationPhase::SymbolsScanned;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_pragma_range(&mut self, modules: &[Module], module_idx: usize, ctx: &mut PassCtx, range_idx: usize) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let range = &module.tokens.ranges[range_idx];
 
        let mut iter = module.tokens.iter_range(range);
 

	
 
        // Consume pragma name
 
        let (pragma_section, pragma_start, _) = consume_pragma(&self.source, &mut iter)?;
 
        let (pragma_section, pragma_start, _) = consume_pragma(&module.source, &mut iter)?;
 

	
 
        // Consume pragma values
 
        if pragma_section == "#module" {
 
        if pragma_section == b"#module" {
 
            // Check if name is defined twice within the same file
 
            if self.has_pragma_module {
 
                return Err(ParseError::new_error(&module.source, pragma_start, "module name is defined twice"));
 
                return Err(ParseError::new_error_str_at_pos(&module.source, pragma_start, "module name is defined twice"));
 
            }
 

	
 
            // Consume the domain-name
 
            let (module_name, module_span) = consume_domain_ident(&module.source, &mut iter)?;
 
            if iter.next().is_some() {
 
                return Err(ParseError::new_error(&module.source, iter.last_valid_pos(), "expected end of #module pragma after module name"));
 
                return Err(ParseError::new_error_str_at_pos(&module.source, iter.last_valid_pos(), "expected end of #module pragma after module name"));
 
            }
 

	
 
            // Add to heap and symbol table
 
            let pragma_span = InputSpan::from_positions(pragma_start, module_span.end);
 
            let module_name = ctx.pool.intern(module_name);
 
            let pragma_id = ctx.heap.alloc_pragma(|this| Pragma::Module(PragmaModule{
 
                this,
 
                span: pragma_span,
 
                value: Identifier{ span: module_span, value: module_name.clone() },
 
            }));
 
            self.pragmas.push(pragma_id);
 

	
 
            if let Err(other_module_root_id) = ctx.symbols.insert_module(module_name, module.root_id) {
 
                // Naming conflict
 
                let this_module = &modules[module_idx];
 
                let other_module = seek_module(modules, other_module_root_id).unwrap();
 
                let (other_module_pragma_id, _) = other_module.name.unwrap();
 
                let other_pragma = ctx.heap[other_module_pragma_id].as_module();
 
                return Err(ParseError::new_error_str_at_span(
 
                    &this_module.source, pragma_span, "conflict in module name"
 
                ).with_info_str_at_span(
 
                    &other_module.source, other_pragma.span, "other module is defined here"
 
                ));
 
            }
 
            self.has_pragma_module = true;
 
        } else if pragma_section == "#version" {
 
        } else if pragma_section == b"#version" {
 
            // Check if version is defined twice within the same file
 
            if self.has_pragma_version {
 
                return Err(ParseError::new_error(&module.source, pragma_start, "module version is defined twice"));
 
                return Err(ParseError::new_error_str_at_pos(&module.source, pragma_start, "module version is defined twice"));
 
            }
 

	
 
            // Consume the version pragma
 
            let (version, version_span) = consume_integer_literal(&module.source, &mut iter, &mut self.buffer)?;
 
            let pragma_id = ctx.heap.alloc_pragma(|this| Pragma::Version(PragmaVersion{
 
                this,
 
                span: InputSpan::from_positions(pragma_start, version_span.end),
 
                version,
 
            }));
 
            self.pragmas.push(pragma_id);
 
            self.has_pragma_version = true;
 
        } else {
 
            // Custom pragma, maybe we support this in the future, but for now
 
            // we don't.
 
            return Err(ParseError::new_error(&module.source, pragma_start, "illegal pragma name"));
 
            return Err(ParseError::new_error_str_at_pos(&module.source, pragma_start, "illegal pragma name"));
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_definition_range(&mut self, modules: &[Module], module_idx: usize, ctx: &mut PassCtx, range_idx: usize) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let range = &module.tokens.ranges[range_idx];
 
        let definition_span = InputSpan::from_positions(
 
            module.tokens.start_pos(range),
 
            module.tokens.end_pos(range)
 
        );
 
        let mut iter = module.tokens.iter_range(range);
 

	
 
        // First ident must be type of symbol
 
        let (kw_text, _) = consume_any_ident(&module.source, &mut iter).unwrap();
 

	
 
        // Retrieve identifier of definition
 
        let identifier = consume_ident_interned(&module.source, &mut iter, ctx)?;
 
        let mut poly_vars = Vec::new();
 
        maybe_consume_comma_separated(
 
            TokenKind::OpenAngle, TokenKind::CloseAngle, &module.source, &mut iter,
 
            |source, iter| consume_ident_interned(source, iter, ctx),
 
            &mut poly_vars, "a polymorphic variable", None
 
        )?;
 
        let ident_text = identifier.value.clone(); // because we need it later
 
        let ident_span = identifier.span.clone();
 

	
 
        // Reserve space in AST for definition and add it to the symbol table
 
        let definition_class;
 
        let ast_definition_id;
 
        match kw_text {
 
            KW_STRUCT => {
 
                let struct_def_id = ctx.heap.alloc_struct_definition(|this| {
 
                    StructDefinition::new_empty(this, definition_span, identifier, poly_vars)
 
                    StructDefinition::new_empty(this, module.root_id, definition_span, identifier, poly_vars)
 
                });
 
                definition_class = DefinitionClass::Struct;
 
                ast_definition_id = struct_def_id.upcast();
 
            },
 
            KW_ENUM => {
 
                let enum_def_id = ctx.heap.alloc_enum_definition(|this| {
 
                    EnumDefinition::new_empty(this, definition_span, identifier, poly_vars)
 
                    EnumDefinition::new_empty(this, module.root_id, definition_span, identifier, poly_vars)
 
                });
 
                definition_class = DefinitionClass::Enum;
 
                ast_definition_id = enum_def_id.upcast();
 
            },
 
            KW_UNION => {
 
                let union_def_id = ctx.heap.alloc_union_definition(|this| {
 
                    UnionDefinition::new_empty(this, definition_span, identifier, poly_vars)
 
                    UnionDefinition::new_empty(this, module.root_id, definition_span, identifier, poly_vars)
 
                });
 
                definition_class = DefinitionClass::Union;
 
                ast_definition_id = union_def_id.upcast()
 
            },
 
            KW_FUNCTION => {
 
                let func_def_id = ctx.heap.alloc_function_definition(|this| {
 
                    FunctionDefinition::new_empty(this, definition_span, identifier, poly_vars)
 
                    FunctionDefinition::new_empty(this, module.root_id, definition_span, identifier, poly_vars)
 
                });
 
                definition_class = DefinitionClass::Function;
 
                ast_definition_id = func_def_id.upcast();
 
            },
 
            KW_PRIMITIVE | KW_COMPOSITE => {
 
                let component_variant = if kw_text == KW_PRIMITIVE {
 
                    ComponentVariant::Primitive
 
                } else {
 
                    ComponentVariant::Composite
 
                };
 
                let comp_def_id = ctx.heap.alloc_component_definition(|this| {
 
                    ComponentDefinition::new_empty(this, definition_span, component_variant, identifier, poly_vars)
 
                    ComponentDefinition::new_empty(this, module.root_id, definition_span, component_variant, identifier, poly_vars)
 
                });
 
                definition_class = DefinitionClass::Component;
 
                ast_definition_id = comp_def_id.upcast();
 
            },
 
            _ => unreachable!("encountered keyword '{}' in definition range", String::from_utf8_lossy(kw_text)),
 
        }
 

	
 
        let symbol = Symbol{
 
            name: ident_text,
 
            variant: SymbolVariant::Definition(SymbolDefinition{
 
                defined_in_module: module.root_id,
 
                defined_in_scope: SymbolScope::Module(module.root_id),
 
                definition_span,
 
                identifier_span,
 
                identifier_span: ident_span,
 
                imported_at: None,
 
                class: definition_class,
 
                definition_id: ast_definition_id,
 
            }),
 
        };
 
        self.symbols.push(symbol);
 
        self.definitions.push(ast_definition_id);
 

	
 
        Ok(())
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/pass_tokenizer.rs
Show inline comments
 
use crate::protocol::input_source2::{
 
    InputSource2 as InputSource,
 
use crate::protocol::input_source::{
 
    InputSource as InputSource,
 
    ParseError,
 
    InputPosition2 as InputPosition,
 
    InputPosition as InputPosition,
 
    InputSpan
 
};
 

	
 
use super::tokens::*;
 
use super::token_parsing::*;
 

	
 
/// Tokenizer is a reusable parser to tokenize multiple source files using the
 
/// same allocated buffers. In a well-formed program, we produce a consistent
 
/// tree of token ranges such that we may identify tokens that represent a
 
/// defintion or an import before producing the entire AST.
 
///
 
/// If the program is not well-formed then the tree may be inconsistent, but we
 
/// will detect this once we transform the tokens into the AST. To ensure a
 
/// consistent AST-producing phase we will require the import to have balanced
 
/// curly braces
 
pub(crate) struct PassTokenizer {
 
    // Stack of input positions of opening curly braces, used to detect
 
    // unmatched opening braces, unmatched closing braces are detected
 
    // immediately.
 
    curly_stack: Vec<InputPosition>,
 
    // Points to an element in the `TokenBuffer.ranges` variable.
 
    stack_idx: usize,
 
}
 

	
 
impl PassTokenizer {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            curly_stack: Vec::with_capacity(32),
 
            stack_idx: 0
 
        }
 
    }
 

	
 
    pub(crate) fn tokenize(&mut self, source: &mut InputSource, target: &mut TokenBuffer) -> Result<(), ParseError> {
 
        // Assert source and buffer are at start
 
        debug_assert_eq!(source.pos().offset, 0);
 
        debug_assert!(target.tokens.is_empty());
 
        debug_assert!(target.ranges.is_empty());
 

	
 
        // Set up for tokenization by pushing the first range onto the stack.
 
        // This range may get transformed into the appropriate range kind later,
 
        // see `push_range` and `pop_range`.
 
        self.curly_depth = 0;
 
        self.stack_idx = 0;
 
        target.ranges.push(TokenRange{
 
            parent_idx: 0,
 
            range_kind: TokenRangeKind::Module,
 
            curly_depth: 0,
 
            start: 0,
 
            end: 0,
 
            num_child_ranges: 0,
 
            first_child_idx: 0,
 
            last_child_idx: 0,
 
            next_sibling_idx: None,
 
        });
 

	
 
        // Main tokenization loop
 
        while let Some(c) = source.next() {
 
            let token_index = target.tokens.len() as u32;
 

	
 
            if is_char_literal_start(c) {
 
                self.consume_char_literal(source, target)?;
 
            } else if is_string_literal_start(c) {
 
                self.consume_string_literal(source, target)?;
 
            } else if is_identifier_start(c) {
 
                let ident = self.consume_identifier(source, target)?;
 

	
 
                if demarks_definition(ident) {
 
                    self.push_range(target, TokenRangeKind::Definition, token_index);
 
                } else if demarks_import(ident) {
 
                    self.push_range(target, TokenRangeKind::Import, token_index);
 
                }
 
            } else if is_integer_literal_start(c) {
 
                self.consume_number(source, target)?;
 
            } else if is_pragma_start_or_pound(c) {
 
                let was_pragma = self.consume_pragma_or_pound(c, source, target)?;
 
                if was_pragma {
 
                    self.push_range(target, TokenRangeKind::Pragma, token_index);
 
                }
 
            } else if self.is_line_comment_start(c, source) {
 
                self.consume_line_comment(source, target)?;
 
            } else if self.is_block_comment_start(c, source) {
 
                self.consume_block_comment(source, target)?;
 
            } else if is_whitespace(c) {
 
                let contained_newline = self.consume_whitespace(source);
 
                if contained_newline {
 
                    let range = &target.ranges[self.stack_idx];
 
                    if range.range_kind == TokenRangeKind::Pragma {
 
                        self.pop_range(target, target.tokens.len() as u32);
 
                    }
 
                }
 
            } else {
 
                let was_punctuation = self.maybe_parse_punctuation(c, source, target)?;
 
                if let Some((token, token_pos)) = was_punctuation {
 
                    if token == TokenKind::OpenCurly {
 
                        self.curly_stack.push(token_pos);
 
                    } else if token == TokenKind::CloseCurly {
 
                        // Check if this marks the end of a range we're
 
                        // currently processing
 
                        if self.curly_stack.is_empty() {
 
                            return Err(ParseError::new_error(
 
                            return Err(ParseError::new_error_str_at_pos(
 
                                source, token_pos, "unmatched closing curly brace '}'"
 
                            ));
 
                        }
 

	
 
                        self.curly_stack.pop();
 

	
 
                        let range = &target.ranges[self.stack_idx];
 
                        if range.range_kind == TokenRangeKind::Definition && range.curly_depth == self.curly_depth {
 
                        if range.range_kind == TokenRangeKind::Definition && range.curly_depth == self.curly_stack.len() as u32 {
 
                            self.pop_range(target, target.tokens.len() as u32);
 
                        }
 

	
 
                        // Exit early if we have more closing curly braces than
 
                        // opening curly braces
 
                    } else if token == TokenKind::SemiColon {
 
                        // Check if this marks the end of an import
 
                        let range = &target.ranges[self.stack_idx];
 
                        if range.range_kind == TokenRangeKind::Import {
 
                            self.pop_range(target, target.tokens.len() as u32);
 
                        }
 
                    }
 
                } else {
 
                    return Err(ParseError::new_error(source, source.pos(), "unexpected character"));
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        source, source.pos(), "unexpected character"
 
                    ));
 
                }
 
            }
 
        }
 

	
 
        // End of file, check if our state is correct
 
        if let Some(error) = source.had_error.take() {
 
            return Err(error);
 
        }
 

	
 
        if !self.curly_stack.is_empty() {
 
            // Let's not add a lot of heuristics and just tell the programmer
 
            // that something is wrong
 
            let last_unmatched_open = self.curly_stack.pop().unwrap();
 
            return Err(ParseError::new_error(
 
            return Err(ParseError::new_error_str_at_pos(
 
                source, last_unmatched_open, "unmatched opening curly brace '{'"
 
            ));
 
        }
 

	
 
        // TODO: @remove once I'm sure the algorithm works. For now it is better
 
        //  if the debugging is a little more expedient
 
        if cfg!(debug_assertions) {
 
            // For each range make sure its children make sense
 
            for parent_idx in 0..target.ranges.len() {
 
                let cur_range = &target.ranges[parent_idx];
 
                if cur_range.num_child_ranges == 0 {
 
                    assert_eq!(cur_range.first_child_idx, parent_idx as u32);
 
                    assert_eq!(cur_range.last_child_idx, parent_idx as u32);
 
                } else {
 
                    assert_ne!(cur_range.first_child_idx, parent_idx as u32);
 
                    assert_ne!(cur_range.last_child_idx, parent_idx as u32);
 

	
 
                    let mut child_counter = 0u32;
 
                    let mut last_child_idx = cur_range.first_child_idx;
 
                    let mut child_idx = Some(cur_range.first_child_idx);
 
                    while let Some(cur_child_idx) = child_idx {
 
                        let child_range = &target.ranges[cur_child_idx as usize];
 
                        assert_eq!(child_range.parent_idx, parent_idx);
 
                        child_idx = child_range.next_sibling_idx;
 
                        child_counter += 1;
 
                    }
 

	
 
                    assert_eq!(cur_range.last_child_idx, last_child_idx);
 
                    assert_eq!(cur_range.num_child_ranges, child_counter);
 
                }
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn is_line_comment_start(&self, first_char: u8, source: &InputSource) -> bool {
 
        return first_char == b'/' && Some(b'/') == source.lookahead(1);
 
    }
 

	
 
    fn is_block_comment_start(&self, first_char: u8, source: &InputSource) -> bool {
 
        return first_char == b'/' && Some(b'*') == source.lookahead(1);
 
    }
 

	
 
    fn maybe_parse_punctuation(
 
        &mut self, first_char: u8, source: &mut InputSource, target: &mut TokenBuffer
 
    ) -> Result<Option<(TokenKind, InputPosition)>, ParseError> {
 
        debug_assert!(first_char != b'#', "'#' needs special handling");
 
        debug_assert!(first_char != b'\'', "'\'' needs special handling");
 
        debug_assert!(first_char != b'"', "'\"' needs special handling");
 

	
 
        let pos = source.pos();
 
        let token_kind;
 
        if first_char == b'!' {
 
            source.consume();
 
            if Some(b'=') == source.next() {
 
                source.consume();
 
                token_kind = TokenKind::NotEqual;
 
            } else {
 
                token_kind = TokenKind::Exclamation;
 
            }
 
        } else if first_char == b'%' {
 
            source.consume();
 
            if Some(b'=') == source.next() {
 
                source.consume();
 
                token_kind = TokenKind::PercentEquals;
 
            } else {
 
                token_kind = TokenKind::Percent;
 
            }
 
        } else if first_char == b'&' {
 
            source.consume();
 
            let next = source.next();
 
            if Some(b'&') == next {
 
                source.consume();
 
                token_kind = TokenKind::AndAnd;
 
            } else if Some(b'=') == next {
 
                source.consume();
 
                token_kind = TokenKind::AndEquals;
 
            } else {
 
                token_kind = TokenKind::And;
 
            }
 
        } else if first_char == b'(' {
 
            source.consume();
 
            token_kind = TokenKind::OpenParen;
 
        } else if first_char == b')' {
 
            source.consume();
 
            token_kind = TokenKind::CloseParen;
 
        } else if first_char == b'*' {
 
            source.consume();
 
            if let Some(b'=') = source.next() {
 
                source.consume();
 
                token_kind = TokenKind::StarEquals;
 
            } else {
 
                token_kind = TokenKind::Star;
 
            }
 
        } else if first_char == b'+' {
 
            source.consume();
 
            let next = source.next();
 
            if Some(b'+') == next {
 
                source.consume();
 
                token_kind = TokenKind::PlusPlus;
 
            } else if Some(b'=') == next {
 
                source.consume();
 
                token_kind = TokenKind::PlusEquals;
 
            } else {
 
                token_kind = TokenKind::Plus;
 
            }
 
        } else if first_char == b',' {
 
            source.consume();
 
            token_kind = TokenKind::Comma;
 
        } else if first_char == b'-' {
 
            source.consume();
 
            let next = source.next();
 
            if Some(b'-') == next {
 
                source.consume();
 
                token_kind = TokenKind::MinusMinus;
 
            } else if Some(b'>') == next {
 
                source.consume();
 
                token_kind = TokenKind::ArrowRight;
 
            } else if Some(b'=') == next {
 
                source.consume();
 
                token_kind = TokenKind::MinusEquals;
 
            } else {
 
                token_kind = TokenKind::Minus;
 
            }
 
        } else if first_char == b'.' {
 
            source.consume();
 
            if let Some(b'.') = source.next() {
 
                source.consume();
 
                token_kind = TokenKind::DotDot;
 
            } else {
 
                token_kind = TokenKind::Dot
 
            }
 
        } else if first_char == b'/' {
 
            source.consume();
 
            debug_assert_ne!(Some(b'/'), source.next());
 
            debug_assert_ne!(Some(b'*'), source.next());
 
            if let Some(b'=') = source.next() {
 
                source.consume();
 
                token_kind = TokenKind::SlashEquals;
 
            } else {
 
                token_kind = TokenKind::Slash;
 
            }
 
        } else if first_char == b':' {
 
            source.consume();
 
            if let Some(b':') = source.next() {
 
                source.consume();
 
                token_kind = TokenKind::ColonColon;
 
            } else {
 
                token_kind = TokenKind::Colon;
 
            }
 
        } else if first_char == b';' {
 
            source.consume();
 
            token_kind = TokenKind::SemiColon;
 
        } else if first_char == b'<' {
 
            source.consume();
 
            let next = source.next();
 
            if let Some(b'<') = next {
 
                source.consume();
 
                if let Some(b'=') = source.next() {
 
                    source.consume();
 
                    token_kind = TokenKind::ShiftLeftEquals;
 
                } else {
 
                    token_kind = TokenKind::ShiftLeft;
 
                }
 
            } else if let Some(b'=') = next {
 
                source.consume();
 
                token_kind = TokenKind::LessEquals;
 
            } else {
 
                token_kind = TokenKind::OpenAngle;
 
            }
 
        } else if first_char == b'=' {
 
            source.consume();
 
            if let Some(b'=') = source.next() {
 
                source.consume();
 
                token_kind = TokenKind::EqualEqual;
 
            } else {
 
                token_kind = TokenKind::Equal;
 
            }
 
        } else if first_char == b'>' {
 
            source.consume();
 
            let next = source.next();
 
            if let Some(b'>') = next {
 
            if Some(b'>') == next {
 
                source.consume();
 
                if let Some(b'=') = source.next() {
 
                if Some(b'=') == source.next() {
 
                    source.consume();
 
                    token_kind = TokenKind::ShiftRightEquals;
 
                } else {
 
                    token_kind = TokenKind::ShiftRight;
 
                }
 
            } else if Some(b'=') = next {
 
            } else if Some(b'=') == next {
 
                source.consume();
 
                token_kind = TokenKind::GreaterEquals;
 
            } else {
 
                token_kind = TokenKind::CloseAngle;
 
            }
 
        } else if first_char == b'?' {
 
            source.consume();
 
            token_kind = TokenKind::Question;
 
        } else if first_char == b'@' {
 
            source.consume();
 
            token_kind = TokenKind::At;
 
        } else if first_char == b'[' {
 
            source.consume();
 
            token_kind = TokenKind::OpenSquare;
 
        } else if first_char == b']' {
 
            source.consume();
 
            token_kind = TokenKind::CloseSquare;
 
        } else if first_char == b'^' {
 
            source.consume();
 
            if let Some(b'=') = source.next() {
 
                source.consume();
 
                token_kind = TokenKind::CaretEquals;
 
            } else {
 
                token_kind = TokenKind::Caret;
 
            }
 
        } else if first_char == b'{' {
 
            source.consume();
 
            token_kind = TokenKind::OpenCurly;
 
        } else if first_char == b'|' {
 
            source.consume();
 
            let next = source.next();
 
            if Some(b'|') == next {
 
                source.consume();
 
                token_kind = TokenKind::OrOr;
 
            } else if Some(b'=') == next {
 
                source.consume();
 
                token_kind = TokenKind::OrEquals;
 
            } else {
 
                token_kind = TokenKind::Or;
 
            }
 
        } else if first_char == b'}' {
 
            source.consume();
 
            token_kind = TokenKind::CloseCurly
 
        } else {
 
            self.check_ascii(source)?;
 
            return Ok(None);
 
        }
 

	
 
        target.tokens.push(Token::new(token_kind, pos));
 
        Ok(Some((token_kind, pos)))
 
    }
 

	
 
    fn consume_char_literal(&mut self, source: &mut InputSource, target: &mut TokenBuffer) -> Result<(), ParseError> {
 
        let begin_pos = source.pos();
 

	
 
        // Consume the leading quote
 
        debug_assert!(source.next().unwrap() == b'\'');
 
        source.consume();
 

	
 
        let mut prev_char = b'\'';
 
        while let Some(c) = source.next() {
 
            if !c.is_ascii() {
 
                return Err(ParseError::new_error(source, source.pos(), "non-ASCII character in char literal"));
 
                return Err(ParseError::new_error_str_at_pos(source, source.pos(), "non-ASCII character in char literal"));
 
            }
 
            source.consume();
 

	
 
            // Make sure ending quote was not escaped
 
            if c == b'\'' && prev_char != b'\\' {
 
                prev_char = c;
 
                break;
 
            }
 

	
 
            prev_char = c;
 
        }
 

	
 
        if prev_char != b'\'' {
 
            // Unterminated character literal, reached end of file.
 
            return Err(ParseError::new_error(source, begin_pos, "encountered unterminated character literal"));
 
            return Err(ParseError::new_error_str_at_pos(source, begin_pos, "encountered unterminated character literal"));
 
        }
 

	
 
        let end_pos = source.pos();
 

	
 
        target.tokens.push(Token::new(TokenKind::Character, begin_pos));
 
        target.tokens.push(Token::new(TokenKind::SpanEnd, end_pos));
 

	
 
        Ok(())
 
    }
 

	
 
    fn consume_string_literal(&mut self, source: &mut InputSource, target: &mut TokenBuffer) -> Result<(), ParseError> {
 
        let begin_pos = source.pos();
 

	
 
        // Consume the leading double quotes
 
        debug_assert!(source.next().unwrap() == b'"');
 
        source.consume();
 

	
 
        let mut prev_char = b'"';
 
        while let Some(c) = source.next() {
 
            if !c.is_ascii() {
 
                return Err(ParseError::new_error(source, source.pos(), "non-ASCII character in string literal"));
 
                return Err(ParseError::new_error_str_at_pos(source, source.pos(), "non-ASCII character in string literal"));
 
            }
 

	
 
            source.consume();
 
            if c == b'"' && prev_char != b'\\' {
 
                prev_char = c;
 
                break;
 
            }
 

	
 
            prev_char = c;
 
        }
 

	
 
        if prev_char != b'"' {
 
            // Unterminated string literal
 
            return Err(ParseError::new_error(source, begin_pos, "encountered unterminated string literal"));
 
            return Err(ParseError::new_error_str_at_pos(source, begin_pos, "encountered unterminated string literal"));
 
        }
 

	
 
        let end_pos = source.pos();
 
        target.tokens.push(Token::new(TokenKind::String, begin_pos));
 
        target.tokens.push(Token::new(TokenKind::SpanEnd, end_pos));
 

	
 
        Ok(())
 
    }
 

	
 
    fn consume_pragma_or_pound(&mut self, first_char: u8, source: &mut InputSource, target: &mut TokenBuffer) -> Result<bool, ParseError> {
 
        let start_pos = source.pos();
 
        debug_assert_eq!(first_char, b'#');
 
        source.consume();
 

	
 
        let next = source.next();
 
        if next.is_none() || !is_identifier_start(next.unwrap()) {
 
            // Just a pound sign
 
            target.tokens.push(Token::new(TokenKind::Pound, start_pos));
 
            Ok(false)
 
        } else {
 
            // Pound sign followed by identifier
 
            source.consume();
 
            while let Some(c) = source.next() {
 
                if !is_identifier_remaining(c) {
 
                    break;
 
                }
 
                source.consume();
 
            }
 

	
 
            self.check_ascii(source)?;
 

	
 
            let end_pos = source.pos();
 
            target.tokens.push(Token::new(TokenKind::Pragma, start_pos));
 
            target.tokens.push(Token::new(TokenKind::SpanEnd, end_pos));
 
            Ok(true)
 
        }
 
    }
 

	
 
    fn consume_line_comment(&mut self, source: &mut InputSource, target: &mut TokenBuffer) -> Result<(), ParseError> {
 
        let begin_pos = source.pos();
 

	
 
        // Consume the leading "//"
 
        debug_assert!(source.next().unwrap() == b'/' && source.lookahead(1).unwrap() == b'/');
 
        source.consume();
 
        source.consume();
 

	
 
        let mut prev_char = b'/';
 
        let mut cur_char = b'/';
 
        while let Some(c) = source.next() {
 
            prev_char = cur_char;
 
            cur_char = c;
 

	
 
            if c == b'\n' {
 
                // End of line, note that the newline is not consumed
 
                break;
 
            }
 

	
 
            source.consume();
 
        }
 

	
 
        let mut end_pos = source.pos();
 
        debug_assert_eq!(begin_pos.line, end_pos.line);
 

	
 
        // Modify offset to not include the newline characters
 
        if cur_char == b'\n' {
 
            if prev_char == b'\r' {
 
                end_pos.offset -= 2;
 
            } else {
 
                end_pos.offset -= 1;
 
            }
 
            // Consume final newline
 
            source.consume();
 
        } else {
 
            // End of comment was due to EOF
 
            debug_assert!(source.next().is_none())
 
        }
 

	
 
        target.tokens.push(Token::new(TokenKind::LineComment, begin_pos));
 
        target.tokens.push(Token::new(TokenKind::SpanEnd, end_pos));
 

	
 
        Ok(())
 
    }
 

	
 
    fn consume_block_comment(&mut self, source: &mut InputSource, target: &mut TokenBuffer) -> Result<(), ParseError> {
 
        let begin_pos = source.pos();
 

	
 
        // Consume the leading "/*"
 
        debug_assert!(source.next().unwrap() == b'/' && source.lookahead(1).unwrap() == b'*');
 
        source.consume();
 
        source.consume();
 

	
 
        // Explicitly do not put prev_char at "*", because then "/*/" would
 
        // represent a valid and closed block comment
 
        let mut prev_char = b' ';
 
        let mut is_closed = false;
 
        while let Some(c) = source.next() {
 
            source.consume();
 
            if prev_char == b'*' && c == b'/' {
 
                // End of block comment
 
                is_closed = true;
 
                break;
 
            }
 
            prev_char = c;
 
        }
 

	
 
        if !is_closed {
 
            return Err(ParseError::new_error(source, source.pos(), "encountered unterminated block comment"));
 
            return Err(ParseError::new_error_str_at_pos(
 
                source, source.pos(), "encountered unterminated block comment")
 
            );
 
        }
 

	
 
        let end_pos = source.pos();
 
        target.tokens.push(Token::new(TokenKind::BlockComment, begin_pos));
 
        target.tokens.push(Token::new(TokenKind::SpanEnd, end_pos));
 

	
 
        Ok(())
 
    }
 

	
 
    fn consume_identifier<'a>(&mut self, source: &'a mut InputSource, target: &mut TokenBuffer) -> Result<&'a [u8], ParseError> {
 
        let begin_pos = source.pos();
 
        debug_assert!(is_identifier_start(source.next().unwrap()));
 
        source.consume();
 

	
 
        // Keep reading until no more identifier
 
        while let Some(c) = source.next() {
 
            if !is_identifier_remaining(c) {
 
                break;
 
            }
 

	
 
            source.consume();
 
        }
 
        self.check_ascii(source)?;
 

	
 
        let end_pos = source.pos();
 
        target.tokens.push(Token::new(TokenKind::Ident, begin_pos));
 
        target.tokens.push(Token::new(TokenKind::SpanEnd, end_pos));
 
        Ok(source.section_at_pos(begin_pos, end_pos))
 
    }
 

	
 
    fn consume_number(&mut self, source: &mut InputSource, target: &mut TokenBuffer) -> Result<(), ParseError> {
 
        let begin_pos = source.pos();
 
        debug_assert!(is_integer_literal_start(source.next().unwrap()));
 
        source.consume();
 

	
 
        // Keep reading until it doesn't look like a number anymore
 
        while let Some(c) = source.next() {
 
            if !maybe_number_remaining(c) {
 
                break;
 
            }
 

	
 
            source.consume();
 
        }
 
        self.check_ascii(source)?;
 

	
 
        let end_pos = source.pos();
 
        target.tokens.push(Token::new(TokenKind::Integer, begin_pos));
 
        target.tokens.push(Token::new(TokenKind::SpanEnd, end_pos));
 

	
 
        Ok(())
 
    }
 

	
 
    // Consumes whitespace and returns whether or not the whitespace contained
 
    // a newline.
 
    fn consume_whitespace(&self, source: &mut InputSource) -> bool {
 
        debug_assert!(is_whitespace(source.next().unwrap()));
 

	
 
        let mut has_newline = false;
 
        while let Some(c) = source.next() {
 
            if !is_whitespace(c) {
 
                break;
 
            }
 

	
 
            if c == b'\n' {
 
                has_newline = true;
 
            }
 
            source.consume();
 
        }
 

	
 
        has_newline
 
    }
 

	
 
    /// Pushes a new token range onto the stack in the buffers.
 
    fn push_range(&mut self, target: &mut TokenBuffer, range_kind: TokenRangeKind, first_token: u32) {
 
        let cur_range = &mut target.ranges[self.stack_idx];
 

	
 
        // If we have just popped a range and then push a new range, then the
 
        // first token is equal to the last token registered on the current
 
        // range. If not, then we had some intermediate tokens that did not
 
        // belong to a particular kind of token range: hence we insert an
 
        // intermediate "code" range.
 
        if cur_range.end != first_token {
 
            let code_start = cur_range.end;
 
            let code_range_idx = target.ranges.len() as u32;
 

	
 
            if cur_range.first_child_idx == self.stack_idx as u32 {
 
                // The parent of the new "code" range we're going to push does
 
                // not have any registered children yet.
 
                cur_range.first_child_idx = code_range_idx;
 
            }
 
            cur_range.last_child_idx = code_range_idx + 1;
 

	
 
            cur_range.end = first_token;
 
            cur_range.num_child_ranges += 1;
 
            target.ranges.push(TokenRange{
 
                parent_idx: self.stack_idx,
 
                range_kind: TokenRangeKind::Code,
 
                curly_depth: self.curly_depth,
 
                curly_depth: self.curly_stack.len() as u32,
 
                start: code_start,
 
                end: first_token,
 
                num_child_ranges: 0,
 
                first_child_idx: code_range_idx,
 
                last_child_idx: code_range_idx,
 
                next_sibling_idx: Some(code_range_idx + 1), // we're going to push this thing next
 
            });
 
        } else {
 
            // We're going to push the range in the code below, but while we
 
            // have the `cur_range` borrowed mutably, we fix up its children
 
            // indices.
 
            let new_range_idx = target.ranges.len() as u32;
 
            if cur_range.first_child_idx == self.stack_idx as u32 {
 
                cur_range.first_child_idx = new_range_idx;
 
            }
 
            cur_range.last_child_idx = new_range_idx;
 
        }
 

	
 
        // Insert a new range
 
        let parent_idx = self.stack_idx;
 
        self.stack_idx = target.ranges.len();
 
        let new_range_idx = self.stack_idx as u32;
 
        target.ranges.push(TokenRange{
 
            parent_idx,
 
            range_kind,
 
            curly_depth: self.curly_depth,
 
            curly_depth: self.curly_stack.len() as u32,
 
            start: first_token,
 
            end: first_token,
 
            num_child_ranges: 0,
 
            first_child_idx: new_range_idx,
 
            last_child_idx: new_range_idx,
 
            next_sibling_idx: None,
 
        });
 
    }
 

	
 
    fn pop_range(&mut self, target: &mut TokenBuffer, end_index: u32) {
 
        let last = &mut target.ranges[self.stack_idx];
 
        debug_assert!(self.stack_idx != last.parent_idx, "attempting to pop top-level range");
 

	
 
        // Fix up the current range before going back to parent
 
        last.end = end_index;
 
        debug_assert_ne!(last.start, end_index);
 

	
 
        // Go back to parent
 
        self.stack_idx = last.parent_idx;
 
        let parent = &mut target.ranges[self.stack_idx];
 
        parent.end = end_index;
 
        parent.num_child_ranges += 1;
 
    }
 

	
 

	
 
    fn check_ascii(&self, source: &InputSource) -> Result<(), ParseError> {
 
        match source.next() {
 
            Some(c) if !c.is_ascii() => {
 
                Err(ParseError::new_error(source, source.pos(), "encountered a non-ASCII character"))
 
                Err(ParseError::new_error_str_at_pos(source, source.pos(), "encountered a non-ASCII character"))
 
            },
 
            _else => {
 
                Ok(())
 
            },
 
        }
 
    }
 
}
 

	
 
// Helpers for characters
 
fn demarks_definition(ident: &[u8]) -> bool {
 
    return
 
        ident == KW_STRUCT ||
 
            ident == KW_ENUM ||
 
            ident == KW_UNION ||
 
            ident == KW_FUNCTION ||
 
            ident == KW_PRIMITIVE ||
 
            ident == KW_COMPOSITE
 
}
 

	
 
fn demarks_import(ident: &[u8]) -> bool {
 
    return ident == KW_IMPORT;
 
}
 

	
 
fn is_whitespace(c: u8) -> bool {
 
    c.is_ascii_whitespace()
 
}
 

	
 
fn is_char_literal_start(c: u8) -> bool {
 
    return c == b'\'';
 
}
 

	
 
fn is_string_literal_start(c: u8) -> bool {
 
    return c == b'"';
 
}
 

	
 
fn is_pragma_start_or_pound(c: u8) -> bool {
 
    return c == b'#';
 
}
 

	
 
fn is_identifier_start(c: u8) -> bool {
 
    return
 
        (c >= b'a' && c <= b'z') ||
 
            (c >= b'A' && c <= b'Z') ||
 
            c == b'_'
 
}
 

	
 
fn is_identifier_remaining(c: u8) -> bool {
 
    return
 
        (c >= b'0' && c <= b'9') ||
 
            (c >= b'a' && c <= b'z') ||
 
            (c >= b'A' && c <= b'Z') ||
 
            c == b'_'
 
}
 

	
 
fn is_integer_literal_start(c: u8) -> bool {
 
    return c >= b'0' && c <= b'9';
 
}
 

	
 
fn maybe_number_remaining(c: u8) -> bool {
 
    return
 
        (c == b'b' || c == b'B' || c == b'o' || c == b'O' || c == b'x' || c == b'X') ||
 
            (c >= b'0' && c <= b'9') ||
 
            c == b'_';
 
}
 

	
 
#[cfg(test)]
 
mod tests {
 
    use super::*;
 

	
 
    // TODO: Remove at some point
 
    #[test]
 
    fn test_tokenizer() {
 
        let mut source = InputSource::new_test("
 

	
 
        #version 500
 
        # hello 2
 

	
 
        import std.reo::*;
 

	
 
        struct Thing {
 
            int a: 5,
 
        }
 
        enum Hello {
 
            A,
 
            B
 
        }
 

	
 
        // Hello hello, is it me you are looking for?
 
        // I can seee it in your eeeyes
 

	
 
        func something(int a, int b, int c) -> byte {
 
            int a = 5;
 
            struct Inner {
 
                int a
 
            }
 
            struct City {
 
                int b
 
            }
 
            /* Waza
 
            How are you doing
 
            Things in here yo
 
            /* */ */
 

	
 
            a = a + 5 * 2;
 
            struct Pressure {
 
                int d
 
            }
 
        }
 
        ");
 
        let mut t = PassTokenizer::new();
 
        let mut buffer = TokenBuffer::new();
 
        t.tokenize(&mut source, &mut buffer).expect("tokenize");
 

	
 
        println!("Ranges:\n");
 
        for (idx, range) in buffer.ranges.iter().enumerate() {
 
            println!("[{}] {:?}", idx, range)
 
        }
 

	
 
        println!("Tokens:\n");
 
        let mut iter = buffer.tokens.iter().enumerate();
 
        while let Some((idx, token)) = iter.next() {
 
            match token.kind {
 
                TokenKind::Ident | TokenKind::Pragma | TokenKind::Integer |
 
                TokenKind::String | TokenKind::Character | TokenKind::LineComment |
 
                TokenKind::BlockComment => {
 
                    let (_, end) = iter.next().unwrap();
 
                    println!("[{}] {:?} ......", idx, token.kind);
 
                    assert_eq!(end.kind, TokenKind::SpanEnd);
 
                    let text = source.section_at_pos(token.pos, end.pos);
 
                    println!("{}", String::from_utf8_lossy(text));
 
                },
 
                _ => {
 
                    println!("[{}] {:?}", idx, token.kind);
 
                }
 
            }
 
        }
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/pass_typing.rs
Show inline comments
 
file renamed from src/protocol/parser/type_resolver.rs to src/protocol/parser/pass_typing.rs
 
/// type_resolver.rs
 
/// pass_typing
 
///
 
/// Performs type inference and type checking. Type inference is implemented by
 
/// applying constraints on (sub)trees of types. During this process the
 
/// resolver takes the `ParserType` structs (the representation of the types
 
/// written by the programmer), converts them to `InferenceType` structs (the
 
/// temporary data structure used during type inference) and attempts to arrive
 
/// at `ConcreteType` structs (the representation of a fully checked and
 
/// validated type).
 
///
 
/// The resolver will visit every statement and expression relevant to the
 
/// procedure and insert and determine its initial type based on context (e.g. a
 
/// return statement's expression must match the function's return type, an
 
/// if statement's test expression must evaluate to a boolean). When all are
 
/// visited we attempt to make progress in evaluating the types. Whenever a type
 
/// is progressed we queue the related expressions for further type progression.
 
/// Once no more expressions are in the queue the algorithm is finished. At this
 
/// point either all types are inferred (or can be trivially implicitly
 
/// determined), or we have incomplete types. In the latter casee we return an
 
/// error.
 
///
 
/// Inference may be applied on non-polymorphic procedures and on polymorphic
 
/// procedures. When dealing with a non-polymorphic procedure we apply the type
 
/// resolver and annotate the AST with the `ConcreteType`s. When dealing with
 
/// polymorphic procedures we will only annotate the AST once, preserving
 
/// references to polymorphic variables. Any later pass will perform just the
 
/// type checking.
 
///
 
/// TODO: Needs a thorough rewrite:
 
///  0. polymorph_progress is intentionally broken at the moment.
 
///  1. For polymorphic type inference we need to have an extra datastructure
 
///     for progressing the polymorphic variables and mapping them back to each
 
///     signature type that uses that polymorphic type. The two types of markers
 
///     became somewhat of a mess.
 
///  2. We're doing a lot of extra work. It seems better to apply the initial
 
///     type based on expression parents, then to apply forced constraints (arg
 
///     to a fires() call must be port-like), only then to start progressing the
 
///     types.
 
///     Furthermore, queueing of expressions can be more intelligent, currently
 
///     every child/parent of an expression is inferred again when queued. Hence
 
///     we need to queue only specific children/parents of expressions.
 
///  3. Remove the `msg` type?
 
///  4. Disallow certain types in certain operations (e.g. `Void`).
 
///  5. Implement implicit and explicit casting.
 
///  6. Investigate different ways of performing the type-on-type inference,
 
///     maybe there is a better way then flattened trees + markers?
 

	
 
macro_rules! debug_log {
 
    ($format:literal) => {
 
        enabled_debug_print!(true, "types", $format);
 
    };
 
    ($format:literal, $($args:expr),*) => {
 
        enabled_debug_print!(true, "types", $format, $($args),*);
 
    };
 
}
 

	
 
use std::collections::{HashMap, HashSet, VecDeque};
 
use std::collections::{HashMap, HashSet};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::input_source2::{InputSource2 as InputSource, ParseError};
 
use crate::protocol::input_source::ParseError;
 
use crate::protocol::parser::ModuleCompilationPhase;
 
use crate::protocol::parser::type_table::*;
 
use super::visitor::{
 
    STMT_BUFFER_INIT_CAPACITY,
 
    EXPR_BUFFER_INIT_CAPACITY,
 
    Ctx,
 
    Visitor2,
 
    VisitorResult
 
};
 
use std::collections::hash_map::Entry;
 

	
 
const MESSAGE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Message, InferenceTypePart::Byte ];
 
const MESSAGE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Message, InferenceTypePart::UInt8 ];
 
const BOOL_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Bool ];
 
const CHARACTER_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Character ];
 
const STRING_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::String ];
 
const NUMBERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::NumberLike ];
 
const INTEGERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::IntegerLike ];
 
const ARRAY_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Array, InferenceTypePart::Unknown ];
 
const ARRAYLIKE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::ArrayLike, InferenceTypePart::Unknown ];
 

	
 
/// TODO: @performance Turn into PartialOrd+Ord to simplify checks
 
/// TODO: @types Remove the Message -> Byte hack at some point...
 
#[derive(Debug, Clone, Eq, PartialEq)]
 
pub(crate) enum InferenceTypePart {
 
    // A marker with an identifier which we can use to retrieve the type subtree
 
    // that follows the marker. This is used to perform type inference on
 
    // polymorphs: an expression may determine the polymorphs type, after we
 
    // need to apply that information to all other places where the polymorph is
 
    // used.
 
    MarkerDefinition(usize), // marker for polymorph types on a procedure's definition
 
    MarkerBody(usize), // marker for polymorph types within a procedure body
 
    // Completely unknown type, needs to be inferred
 
    Unknown,
 
    // Partially known type, may be inferred to to be the appropriate related 
 
    // type.
 
    // IndexLike,      // index into array/slice
 
    NumberLike,     // any kind of integer/float
 
    IntegerLike,    // any kind of integer
 
    ArrayLike,      // array or slice. Note that this must have a subtype
 
    PortLike,       // input or output port
 
    // Special types that cannot be instantiated by the user
 
    Void, // For builtin functions that do not return anything
 
    // Concrete types without subtypes
 
    Bool,
 
    UInt8,
 
    UInt16,
 
    UInt32,
 
    UInt64,
 
    SInt8,
 
    SInt16,
 
    SInt32,
 
    SInt64,
 
    Character,
 
    String,
 
    // One subtype
 
    Message,
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // A user-defined type with any number of subtypes
 
    Instance(DefinitionId, usize)
 
}
 

	
 
impl InferenceTypePart {
 
    fn is_marker(&self) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        match self {
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    /// Checks if the type is concrete, markers are interpreted as concrete
 
    /// types.
 
    fn is_concrete(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | 
 
            ITP::ArrayLike | ITP::PortLike => false,
 
            _ => true
 
        }
 
    }
 

	
 
    fn is_concrete_number(&self) -> bool {
 
        // TODO: @float
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_integer(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_msg_array_or_slice(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Array | ITP::Slice | ITP::Message => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_port(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Input | ITP::Output => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    /// Checks if a part is less specific than the argument. Only checks for 
 
    /// single-part inference (i.e. not the replacement of an `Unknown` variant 
 
    /// with the argument)
 
    fn may_be_inferred_from(&self, arg: &InferenceTypePart) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        (*self == ITP::IntegerLike && arg.is_concrete_integer()) ||
 
        (*self == ITP::NumberLike && (arg.is_concrete_number() || *arg == ITP::IntegerLike)) ||
 
        (*self == ITP::ArrayLike && arg.is_concrete_msg_array_or_slice()) ||
 
        (*self == ITP::PortLike && arg.is_concrete_port())
 
    }
 

	
 
    /// Returns the change in "iteration depth" when traversing this particular
 
    /// part. The iteration depth is used to traverse the tree in a linear 
 
    /// fashion. It is basically `number_of_subtypes - 1`
 
    fn depth_change(&self) -> i32 {
 
        use InferenceTypePart as ITP;
 
        match &self {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike |
 
            ITP::Void | ITP::Bool |
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long | 
 
            ITP::String => {
 
                -1
 
            },
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) |
 
            ITP::ArrayLike | ITP::Message | ITP::Array | ITP::Slice |
 
            ITP::PortLike | ITP::Input | ITP::Output => {
 
                // One subtype, so do not modify depth
 
                0
 
            },
 
            ITP::Instance(_, num_args) => {
 
                (*num_args as i32) - 1
 
            }
 
        }
 
    }
 
}
 

	
 
impl From<ConcreteTypePart> for InferenceTypePart {
 
    fn from(v: ConcreteTypePart) -> InferenceTypePart {
 
        use ConcreteTypePart as CTP;
 
        use InferenceTypePart as ITP;
 

	
 
        match v {
 
            CTP::Marker(_) => {
 
                unreachable!("encountered marker while converting concrete type to inferred type");
 
            }
 
            CTP::Void => ITP::Void,
 
            CTP::Message => ITP::Message,
 
            CTP::Bool => ITP::Bool,
 
            CTP::Byte => ITP::Byte,
 
            CTP::Short => ITP::Short,
 
            CTP::Int => ITP::Int,
 
            CTP::Long => ITP::Long,
 
            CTP::String => ITP::String,
 
            CTP::Array => ITP::Array,
 
            CTP::Slice => ITP::Slice,
 
            CTP::Input => ITP::Input,
 
            CTP::Output => ITP::Output,
 
            CTP::Instance(id, num) => ITP::Instance(id, num),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug)]
 
struct InferenceType {
 
    has_body_marker: bool,
 
    is_done: bool,
 
    parts: Vec<InferenceTypePart>,
 
}
 

	
 
impl InferenceType {
 
    /// Generates a new InferenceType. The two boolean flags will be checked in
 
    /// debug mode.
 
    fn new(has_body_marker: bool, is_done: bool, parts: Vec<InferenceTypePart>) -> Self {
 
        if cfg!(debug_assertions) {
 
            debug_assert!(!parts.is_empty());
 
            let parts_body_marker = parts.iter().any(|v| {
 
                if let InferenceTypePart::MarkerBody(_) = v { true } else { false }
 
            });
 
            debug_assert_eq!(has_body_marker, parts_body_marker);
 
            let parts_done = parts.iter().all(|v| v.is_concrete());
 
            debug_assert_eq!(is_done, parts_done, "{:?}", parts);
 
        }
 
        Self{ has_body_marker, is_done, parts }
 
    }
 

	
 
    /// Replaces a type subtree with the provided subtree. The caller must make
 
    /// sure the the replacement is a well formed type subtree.
 
    fn replace_subtree(&mut self, start_idx: usize, with: &[InferenceTypePart]) {
 
        let end_idx = Self::find_subtree_end_idx(&self.parts, start_idx);
 
        debug_assert_eq!(with.len(), Self::find_subtree_end_idx(with, 0));
 
        self.parts.splice(start_idx..end_idx, with.iter().cloned());
 
        self.recompute_is_done();
 
    }
 

	
 
    // TODO: @performance, might all be done inline in the type inference methods
 
    fn recompute_is_done(&mut self) {
 
        self.is_done = self.parts.iter().all(|v| v.is_concrete());
 
    }
 

	
 
    /// Seeks a body marker starting at the specified position. If a marker is
 
    /// found then its value and the index of the type subtree that follows it
 
    /// is returned.
 
    fn find_body_marker(&self, mut start_idx: usize) -> Option<(usize, usize)> {
 
        while start_idx < self.parts.len() {
 
            if let InferenceTypePart::MarkerBody(marker) = &self.parts[start_idx] {
 
                return Some((*marker, start_idx + 1))
 
            }
 

	
 
            start_idx += 1;
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Returns an iterator over all body markers and the partial type tree that
 
    /// follows those markers. If it is a problem that `InferenceType` is 
 
    /// borrowed by the iterator, then use `find_body_marker`.
 
    fn body_marker_iter(&self) -> InferenceTypeMarkerIter {
 
        InferenceTypeMarkerIter::new(&self.parts)
 
    }
 

	
 
    /// Given that the `parts` are a depth-first serialized tree of types, this
 
    /// function finds the subtree anchored at a specific node. The returned 
 
    /// index is exclusive.
 
    fn find_subtree_end_idx(parts: &[InferenceTypePart], start_idx: usize) -> usize {
 
        let mut depth = 1;
 
        let mut idx = start_idx;
 

	
 
        while idx < parts.len() {
 
            depth += parts[idx].depth_change();
 
            if depth == 0 {
 
                return idx + 1;
 
            }
 
            idx += 1;
 
        }
 

	
 
        // If here, then the inference type is malformed
 
        unreachable!();
 
    }
 

	
 
    /// Call that attempts to infer the part at `to_infer.parts[to_infer_idx]` 
 
    /// using the subtree at `template.parts[template_idx]`. Will return 
 
    /// `Some(depth_change_due_to_traversal)` if type inference has been 
 
    /// applied. In this case the indices will also be modified to point to the 
 
    /// next part in both templates. If type inference has not (or: could not) 
 
    /// be applied then `None` will be returned. Note that this might mean that 
 
    /// the types are incompatible.
 
    ///
 
    /// As this is a helper functions, some assumptions: the parts are not 
 
    /// exactly equal, and neither of them contains a marker. Also: only the
 
    /// `to_infer` parts are checked for inference. It might be that this 
 
    /// function returns `None`, but that that `template` is still compatible
 
    /// with `to_infer`, e.g. when `template` has an `Unknown` part.
 
    fn infer_part_for_single_type(
 
        to_infer: &mut InferenceType, to_infer_idx: &mut usize,
 
        template_parts: &[InferenceTypePart], template_idx: &mut usize,
 
    ) -> Option<i32> {
 
        use InferenceTypePart as ITP;
 

	
 
        let to_infer_part = &to_infer.parts[*to_infer_idx];
 
        let template_part = &template_parts[*template_idx];
 

	
 
        // TODO: Maybe do this differently?
 
        let mut template_definition_marker = None;
 
        if *template_idx > 0 {
 
            if let ITP::MarkerDefinition(marker) = &template_parts[*template_idx - 1] {
 
                template_definition_marker = Some(*marker)
 
            }
 
        }
 

	
 
        // Check for programmer mistakes
 
        debug_assert_ne!(to_infer_part, template_part);
 
        debug_assert!(!to_infer_part.is_marker(), "marker encountered in 'infer part'");
 
        debug_assert!(!template_part.is_marker(), "marker encountered in 'template part'");
 

	
 
        // Inference of a somewhat-specified type
 
        if to_infer_part.may_be_inferred_from(template_part) {
 
            let depth_change = to_infer_part.depth_change();
 
            debug_assert_eq!(depth_change, template_part.depth_change());
 

	
 
            if let Some(marker) = template_definition_marker {
 
                to_infer.parts.insert(*to_infer_idx, ITP::MarkerDefinition(marker));
 
                *to_infer_idx += 1;
 
            }
 

	
 
            to_infer.parts[*to_infer_idx] = template_part.clone();
 

	
 
            *to_infer_idx += 1;
 
            *template_idx += 1;
 
            return Some(depth_change);
 
        }
 

	
 
        // Inference of a completely unknown type
 
        if *to_infer_part == ITP::Unknown {
 
            // template part is different, so cannot be unknown, hence copy the
 
            // entire subtree. Make sure to copy definition markers, but not to
 
            // transfer polymorph markers.
 
            let template_end_idx = Self::find_subtree_end_idx(template_parts, *template_idx);
 
            let template_start_idx = if let Some(marker) = template_definition_marker {
 
                to_infer.parts[*to_infer_idx] = ITP::MarkerDefinition(marker);
 
                *template_idx
 
            } else {
 
                to_infer.parts[*to_infer_idx] = template_parts[*template_idx].clone();
 
                *template_idx + 1
 
            };
 
            *to_infer_idx += 1;
 

	
 
            for template_idx in template_start_idx..template_end_idx {
 
                let template_part = &template_parts[template_idx];
 
                if let ITP::MarkerBody(_) = template_part {
 
                    // Do not copy this one
 
                } else {
 
                    to_infer.parts.insert(*to_infer_idx, template_part.clone());
 
                    *to_infer_idx += 1;
 
                }
 
            }
 
            *template_idx = template_end_idx;
 

	
 
            // Note: by definition the LHS was Unknown and the RHS traversed a 
 
            // full subtree.
 
            return Some(-1);
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Call that checks if the `to_check` part is compatible with the `infer`
 
    /// part. This is essentially a copy of `infer_part_for_single_type`, but
 
    /// without actually copying the type parts.
 
    fn check_part_for_single_type(
 
        to_check_parts: &[InferenceTypePart], to_check_idx: &mut usize,
 
        template_parts: &[InferenceTypePart], template_idx: &mut usize
 
    ) -> Option<i32> {
 
        use InferenceTypePart as ITP;
 

	
 
        let to_check_part = &to_check_parts[*to_check_idx];
 
        let template_part = &template_parts[*template_idx];
 

	
 
        // Checking programmer errors
 
        debug_assert_ne!(to_check_part, template_part);
 
        debug_assert!(!to_check_part.is_marker(), "marker encountered in 'to_check part'");
 
        debug_assert!(!template_part.is_marker(), "marker encountered in 'template part'");
 

	
 
        if to_check_part.may_be_inferred_from(template_part) {
 
            let depth_change = to_check_part.depth_change();
 
            debug_assert_eq!(depth_change, template_part.depth_change());
 
            *to_check_idx += 1;
 
            *template_idx += 1;
 
            return Some(depth_change);
 
        }
 

	
 
        if *to_check_part == ITP::Unknown {
 
            *to_check_idx += 1;
 
            *template_idx = Self::find_subtree_end_idx(template_parts, *template_idx);
 

	
 
            // By definition LHS and RHS had depth change of -1
 
            return Some(-1);
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Attempts to infer types between two `InferenceType` instances. This 
 
    /// function is unsafe as it accepts pointers to work around Rust's 
 
    /// borrowing rules. The caller must ensure that the pointers are distinct.
 
    unsafe fn infer_subtrees_for_both_types(
 
        type_a: *mut InferenceType, start_idx_a: usize,
 
        type_b: *mut InferenceType, start_idx_b: usize
 
    ) -> DualInferenceResult {
 
        debug_assert!(!std::ptr::eq(type_a, type_b), "encountered pointers to the same inference type");
 
        let type_a = &mut *type_a;
 
        let type_b = &mut *type_b;
 

	
 
        let mut modified_a = false;
 
        let mut modified_b = false;
 
        let mut idx_a = start_idx_a;
 
        let mut idx_b = start_idx_b;
 
        let mut depth = 1;
 

	
 
        while depth > 0 {
 
            // Advance indices if we encounter markers or equal parts
 
            let part_a = &type_a.parts[idx_a];
 
            let part_b = &type_b.parts[idx_b];
 
            
 
            if part_a == part_b {
 
                let depth_change = part_a.depth_change();
 
                depth += depth_change;
 
                debug_assert_eq!(depth_change, part_b.depth_change());
 
                idx_a += 1;
 
                idx_b += 1;
 
                continue;
 
            }
 
            if part_a.is_marker() { idx_a += 1; continue; }
 
            if part_b.is_marker() { idx_b += 1; continue; }
 

	
 
            // Types are not equal and are both not markers
 
            if let Some(depth_change) = Self::infer_part_for_single_type(type_a, &mut idx_a, &type_b.parts, &mut idx_b) {
 
                depth += depth_change;
 
                modified_a = true;
 
                continue;
 
            }
 
            if let Some(depth_change) = Self::infer_part_for_single_type(type_b, &mut idx_b, &type_a.parts, &mut idx_a) {
 
                depth += depth_change;
 
                modified_b = true;
 
                continue;
 
            }
 

	
 
            // And can also not be inferred in any way: types must be incompatible
 
            return DualInferenceResult::Incompatible;
 
        }
 

	
 
        if modified_a { type_a.recompute_is_done(); }
 
        if modified_b { type_b.recompute_is_done(); }
 

	
 
        // If here then we completely inferred the subtrees.
 
        match (modified_a, modified_b) {
 
            (false, false) => DualInferenceResult::Neither,
 
            (false, true) => DualInferenceResult::Second,
 
            (true, false) => DualInferenceResult::First,
 
            (true, true) => DualInferenceResult::Both
 
        }
 
    }
 

	
 
    /// Attempts to infer the first subtree based on the template. Like
 
    /// `infer_subtrees_for_both_types`, but now only applying inference to
 
    /// `to_infer` based on the type information in `template`.
 
    /// Secondary use is to make sure that a type follows a certain template.
 
    fn infer_subtree_for_single_type(
 
        to_infer: &mut InferenceType, mut to_infer_idx: usize,
 
        template: &[InferenceTypePart], mut template_idx: usize,
 
    ) -> SingleInferenceResult {
 
        let mut modified = false;
 
        let mut depth = 1;
 

	
 
        while depth > 0 {
 
            let to_infer_part = &to_infer.parts[to_infer_idx];
 
            let template_part = &template[template_idx];
 

	
 
            if to_infer_part == template_part {
 
                let depth_change = to_infer_part.depth_change();
 
                depth += depth_change;
 
                debug_assert_eq!(depth_change, template_part.depth_change());
 
                to_infer_idx += 1;
 
                template_idx += 1;
 
                continue;
 
            }
 
            if to_infer_part.is_marker() { to_infer_idx += 1; continue; }
 
            if template_part.is_marker() { template_idx += 1; continue; }
 

	
 
            // Types are not equal and not markers. So check if we can infer 
 
            // anything
 
            if let Some(depth_change) = Self::infer_part_for_single_type(
 
                to_infer, &mut to_infer_idx, template, &mut template_idx
 
            ) {
 
                depth += depth_change;
 
                modified = true;
 
                continue;
 
            }
 

	
 
            // We cannot infer anything, but the template may still be 
 
            // compatible with the type we're inferring
 
            if let Some(depth_change) = Self::check_part_for_single_type(
 
                template, &mut template_idx, &to_infer.parts, &mut to_infer_idx
 
            ) {
 
                depth += depth_change;
 
                continue;
 
            }
 

	
 
            return SingleInferenceResult::Incompatible
 
        }
 

	
 
        return if modified {
 
            to_infer.recompute_is_done();
 
            SingleInferenceResult::Modified
 
        } else {
 
            SingleInferenceResult::Unmodified
 
        }
 
    }
 

	
 
    /// Checks if both types are compatible, doesn't perform any inference
 
    fn check_subtrees(
 
        type_parts_a: &[InferenceTypePart], start_idx_a: usize,
 
        type_parts_b: &[InferenceTypePart], start_idx_b: usize
 
    ) -> bool {
 
        let mut depth = 1;
 
        let mut idx_a = start_idx_a;
 
        let mut idx_b = start_idx_b;
 

	
 
        while depth > 0 {
 
            let part_a = &type_parts_a[idx_a];
 
            let part_b = &type_parts_b[idx_b];
 

	
 
            if part_a == part_b {
 
                let depth_change = part_a.depth_change();
 
                depth += depth_change;
 
                debug_assert_eq!(depth_change, part_b.depth_change());
 
                idx_a += 1;
 
                idx_b += 1;
 
                continue;
 
            }
 
            
 
            if part_a.is_marker() { idx_a += 1; continue; }
 
            if part_b.is_marker() { idx_b += 1; continue; }
 

	
 
            if let Some(depth_change) = Self::check_part_for_single_type(
 
                type_parts_a, &mut idx_a, type_parts_b, &mut idx_b
 
            ) {
 
                depth += depth_change;
 
                continue;
 
            }
 
            if let Some(depth_change) = Self::check_part_for_single_type(
 
                type_parts_b, &mut idx_b, type_parts_a, &mut idx_a
 
            ) {
 
                depth += depth_change;
 
                continue;
 
            }
 

	
 
            return false;
 
        }
 

	
 
        true
 
    }
 

	
 
    /// Performs the conversion of the inference type into a concrete type.
 
    /// By calling this function you must make sure that no unspecified types
 
    /// (e.g. Unknown or IntegerLike) exist in the type.
 
    fn write_concrete_type(&self, concrete_type: &mut ConcreteType) {
 
        use InferenceTypePart as ITP;
 
        use ConcreteTypePart as CTP;
 

	
 
        // Make sure inference type is specified but concrete type is not yet specified
 
        debug_assert!(!self.parts.is_empty());
 
        debug_assert!(concrete_type.parts.is_empty());
 
        concrete_type.parts.reserve(self.parts.len());
 

	
 
        let mut idx = 0;
 
        while idx < self.parts.len() {
 
            let part = &self.parts[idx];
 
            let converted_part = match part {
 
                ITP::MarkerDefinition(marker) => {
 
                    // Outer markers are converted to regular markers, we
 
                    // completely remove the type subtree that follows it
 
                    idx = InferenceType::find_subtree_end_idx(&self.parts, idx + 1);
 
                    concrete_type.parts.push(CTP::Marker(*marker));
 
                    continue;
 
                },
 
                ITP::MarkerBody(_) => {
 
                    // Inner markers are removed when writing to the concrete
 
                    // type.
 
                    idx += 1;
 
                    continue;
 
                },
 
                ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | ITP::ArrayLike | ITP::PortLike => {
 
                    unreachable!("Attempted to convert inference type part {:?} into concrete type", part);
 
                },
 
                ITP::Void => CTP::Void,
 
                ITP::Message => CTP::Message,
 
                ITP::Bool => CTP::Bool,
 
                ITP::Byte => CTP::Byte,
 
                ITP::Short => CTP::Short,
 
                ITP::Int => CTP::Int,
 
                ITP::Long => CTP::Long,
 
                ITP::String => CTP::String,
 
                ITP::Array => CTP::Array,
 
                ITP::Slice => CTP::Slice,
 
                ITP::Input => CTP::Input,
 
                ITP::Output => CTP::Output,
 
                ITP::Instance(id, num) => CTP::Instance(*id, *num),
 
            };
 

	
 
            concrete_type.parts.push(converted_part);
 
            idx += 1;
 
        }
 
    }
 

	
 
    /// Writes a human-readable version of the type to a string. This is used
 
    /// to display error messages
 
    fn write_display_name(
 
        buffer: &mut String, heap: &Heap, parts: &[InferenceTypePart], mut idx: usize
 
    ) -> usize {
 
        use InferenceTypePart as ITP;
 

	
 
        match &parts[idx] {
 
            ITP::MarkerDefinition(thing) => {
 
                buffer.push_str(&format!("{{D:{}}}", *thing));
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
            }, 
 
            ITP::MarkerBody(thing) => {
 
                buffer.push_str(&format!("{{B:{}}}", *thing));
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
            },
 
            ITP::Unknown => buffer.push_str("?"),
 
            ITP::NumberLike => buffer.push_str("num?"),
 
            ITP::IntegerLike => buffer.push_str("int?"),
 
            ITP::ArrayLike => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[?]");
 
            },
 
            ITP::PortLike => {
 
                buffer.push_str("port?<");
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            }
 
            ITP::Void => buffer.push_str("void"),
 
            ITP::Bool => buffer.push_str("bool"),
 
            ITP::Byte => buffer.push_str("byte"),
 
            ITP::Short => buffer.push_str("short"),
 
            ITP::Int => buffer.push_str("int"),
 
            ITP::Long => buffer.push_str("long"),
 
            ITP::String => buffer.push_str("str"),
 
            ITP::Message => {
 
                buffer.push_str("msg<");
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Array => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[]");
 
            },
 
            ITP::Slice => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[..]");
 
            },
 
            ITP::Input => {
 
                buffer.push_str("in<");
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Output => {
 
                buffer.push_str("out<");
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Instance(definition_id, num_sub) => {
 
                let definition = &heap[*definition_id];
 
                buffer.push_str(definition.identifier().value.as_str());
 
                if *num_sub > 0 {
 
                    buffer.push('<');
 
                    idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    for _sub_idx in 1..*num_sub {
 
                        buffer.push_str(", ");
 
                        idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    }
 
                    buffer.push('>');
 
                }
 
            },
 
        }
 

	
 
        idx
 
    }
 

	
 
    /// Returns the display name of a (part of) the type tree. Will allocate a
 
    /// string.
 
    fn partial_display_name(heap: &Heap, parts: &[InferenceTypePart]) -> String {
 
        let mut buffer = String::with_capacity(parts.len() * 6);
 
        Self::write_display_name(&mut buffer, heap, parts, 0);
 
        buffer
 
    }
 

	
 
    /// Returns the display name of the full type tree. Will allocate a string.
 
    fn display_name(&self, heap: &Heap) -> String {
 
        Self::partial_display_name(heap, &self.parts)
 
    }
 
}
 

	
 
/// Iterator over the subtrees that follow a marker in an `InferenceType`
 
/// instance. Returns immutable slices over the internal parts
 
struct InferenceTypeMarkerIter<'a> {
 
    parts: &'a [InferenceTypePart],
 
    idx: usize,
 
}
 

	
 
impl<'a> InferenceTypeMarkerIter<'a> {
 
    fn new(parts: &'a [InferenceTypePart]) -> Self {
 
        Self{ parts, idx: 0 }
 
    }
 
}
 

	
 
impl<'a> Iterator for InferenceTypeMarkerIter<'a> {
 
    type Item = (usize, &'a [InferenceTypePart]);
 

	
 
    fn next(&mut self) -> Option<Self::Item> {
 
        // Iterate until we find a marker
 
        while self.idx < self.parts.len() {
 
            if let InferenceTypePart::MarkerBody(marker) = self.parts[self.idx] {
 
                // Found a marker, find the subtree end
 
                let start_idx = self.idx + 1;
 
                let end_idx = InferenceType::find_subtree_end_idx(self.parts, start_idx);
 

	
 
                // Modify internal index, then return items
 
                self.idx = end_idx;
 
                return Some((marker, &self.parts[start_idx..end_idx]));
 
            }
 

	
 
            self.idx += 1;
 
        }
 

	
 
        None
 
    }
 
}
 

	
 
#[derive(Debug, PartialEq, Eq)]
 
enum DualInferenceResult {
 
    Neither,        // neither argument is clarified
 
    First,          // first argument is clarified using the second one
 
    Second,         // second argument is clarified using the first one
 
    Both,           // both arguments are clarified
 
    Incompatible,   // types are incompatible: programmer error
 
}
 

	
 
impl DualInferenceResult {
 
    fn modified_lhs(&self) -> bool {
 
        match self {
 
            DualInferenceResult::First | DualInferenceResult::Both => true,
 
            _ => false
 
        }
 
    }
 
    fn modified_rhs(&self) -> bool {
 
        match self {
 
            DualInferenceResult::Second | DualInferenceResult::Both => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, PartialEq, Eq)]
 
enum SingleInferenceResult {
 
    Unmodified,
 
    Modified,
 
    Incompatible
 
}
 

	
 
enum DefinitionType{
 
    Component(ComponentDefinitionId),
 
    Function(FunctionDefinitionId),
 
}
 

	
 
impl DefinitionType {
 
    fn definition_id(&self) -> DefinitionId {
 
        match self {
 
            DefinitionType::Component(v) => v.upcast(),
 
            DefinitionType::Function(v) => v.upcast(),
 
        }
 
    }
 
}
 

	
 
#[derive(PartialEq, Eq)]
 
pub(crate) struct ResolveQueueElement {
 
    pub(crate) root_id: RootId,
 
    pub(crate) definition_id: DefinitionId,
 
    pub(crate) monomorph_types: Vec<ConcreteType>,
 
}
 

	
 
pub(crate) type ResolveQueue = Vec<ResolveQueueElement>;
 

	
 
/// This particular visitor will recurse depth-first into the AST and ensures
 
/// that all expressions have the appropriate types.
 
pub(crate) struct TypeResolvingVisitor {
 
pub(crate) struct PassTyping {
 
    // Current definition we're typechecking.
 
    definition_type: DefinitionType,
 
    poly_vars: Vec<ConcreteType>,
 

	
 
    // Buffers for iteration over substatements and subexpressions
 
    stmt_buffer: Vec<StatementId>,
 
    expr_buffer: Vec<ExpressionId>,
 

	
 
    // Mapping from parser type to inferred type. We attempt to continue to
 
    // specify these types until we're stuck or we've fully determined the type.
 
    var_types: HashMap<VariableId, VarData>,      // types of variables
 
    expr_types: HashMap<ExpressionId, InferenceType>,   // types of expressions
 
    extra_data: HashMap<ExpressionId, ExtraData>,       // data for polymorph inference
 
    // Keeping track of which expressions need to be reinferred because the
 
    // expressions they're linked to made progression on an associated type
 
    expr_queued: HashSet<ExpressionId>,
 
}
 

	
 
// TODO: @rename used for calls and struct literals, maybe union literals?
 
struct ExtraData {
 
    /// Progression of polymorphic variables (if any)
 
    poly_vars: Vec<InferenceType>,
 
    /// Progression of types of call arguments or struct members
 
    embedded: Vec<InferenceType>,
 
    returned: InferenceType,
 
}
 

	
 
struct VarData {
 
    /// Type of the variable
 
    var_type: InferenceType,
 
    /// VariableExpressions that use the variable
 
    used_at: Vec<ExpressionId>,
 
    /// For channel statements we link to the other variable such that when one
 
    /// channel's interior type is resolved, we can also resolve the other one.
 
    linked_var: Option<VariableId>,
 
}
 

	
 
impl VarData {
 
    fn new_channel(var_type: InferenceType, other_port: VariableId) -> Self {
 
        Self{ var_type, used_at: Vec::new(), linked_var: Some(other_port) }
 
    }
 
    fn new_local(var_type: InferenceType) -> Self {
 
        Self{ var_type, used_at: Vec::new(), linked_var: None }
 
    }
 
}
 

	
 
impl TypeResolvingVisitor {
 
impl PassTyping {
 
    pub(crate) fn new() -> Self {
 
        TypeResolvingVisitor{
 
        PassTyping {
 
            definition_type: DefinitionType::None,
 
            poly_vars: Vec::new(),
 
            stmt_buffer: Vec::with_capacity(STMT_BUFFER_INIT_CAPACITY),
 
            expr_buffer: Vec::with_capacity(EXPR_BUFFER_INIT_CAPACITY),
 
            var_types: HashMap::new(),
 
            expr_types: HashMap::new(),
 
            extra_data: HashMap::new(),
 
            expr_queued: HashSet::new(),
 
        }
 
    }
 

	
 
    // TODO: @cleanup Unsure about this, maybe a pattern will arise after
 
    //  a while.
 
    pub(crate) fn queue_module_definitions(ctx: &Ctx, queue: &mut ResolveQueue) {
 
        debug_assert_eq!(ctx.module.phase, ModuleCompilationPhase::ValidatedAndLinked);
 
        let root_id = ctx.module.root_id;
 
        let root = &ctx.heap.protocol_descriptions[root_id];
 
        for definition_id in &root.definitions {
 
            let definition = &ctx.heap[*definition_id];
 
            match definition {
 
                Definition::Function(definition) => {
 
                    if definition.poly_vars.is_empty() {
 
                        queue.push(ResolveQueueElement{
 
                            root_id,
 
                            definition_id: *definition_id,
 
                            monomorph_types: Vec::new(),
 
                        })
 
                    }
 
                },
 
                Definition::Component(definition) => {
 
                    if definition.poly_vars.is_empty() {
 
                        queue.push(ResolveQueueElement{
 
                            root_id,
 
                            definition_id: *definition_id,
 
                            monomorph_types: Vec::new(),
 
                        })
 
                    }
 
                },
 
                Definition::Enum(_) | Definition::Struct(_) | Definition::Union(_) => {},
 
            }
 
        }
 
    }
 

	
 
    pub(crate) fn handle_module_definition(
 
        &mut self, ctx: &mut Ctx, queue: &mut ResolveQueue, element: ResolveQueueElement
 
    ) -> VisitorResult {
 
        // Visit the definition
 
        debug_assert_eq!(ctx.module.root_id, element.root_id);
 
        self.reset();
 
        self.poly_vars.clear();
 
        self.poly_vars.extend(element.monomorph_types.iter().cloned());
 
        self.visit_definition(ctx, element.definition_id)?;
 

	
 
        // Keep resolving types
 
        self.resolve_types(ctx, queue)?;
 
        Ok(())
 
    }
 

	
 
    fn reset(&mut self) {
 
        self.definition_type = DefinitionType::None;
 
        self.poly_vars.clear();
 
        self.stmt_buffer.clear();
 
        self.expr_buffer.clear();
 
        self.var_types.clear();
 
        self.expr_types.clear();
 
        self.extra_data.clear();
 
        self.expr_queued.clear();
 
    }
 
}
 

	
 
impl Visitor2 for TypeResolvingVisitor {
 
impl Visitor2 for PassTyping {
 
    // Definitions
 

	
 
    fn visit_component_definition(&mut self, ctx: &mut Ctx, id: ComponentDefinitionId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Component(id);
 

	
 
        let comp_def = &ctx.heap[id];
 
        debug_assert_eq!(comp_def.poly_vars.len(), self.poly_vars.len(), "component polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting component '{}': {}", &String::from_utf8_lossy(&comp_def.identifier.value), id.0.index);
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        for param_id in comp_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type(ctx, &param.parser_type, true);
 
            debug_assert!(var_type.is_done, "expected component arguments to be concrete types");
 
            self.var_types.insert(param_id.upcast(), VarData::new_local(var_type));
 
        }
 

	
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_block_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    fn visit_function_definition(&mut self, ctx: &mut Ctx, id: FunctionDefinitionId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Function(id);
 

	
 
        let func_def = &ctx.heap[id];
 
        debug_assert_eq!(func_def.poly_vars.len(), self.poly_vars.len(), "function polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting function '{}': {}", &String::from_utf8_lossy(&func_def.identifier.value), id.0.index);
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        for param_id in func_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type(ctx, &param.parser_type, true);
 
            debug_assert!(var_type.is_done, "expected function arguments to be concrete types");
 
            self.var_types.insert(param_id.upcast(), VarData::new_local(var_type));
 
        }
 

	
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_block_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    // Statements
 

	
 
    fn visit_block_stmt(&mut self, ctx: &mut Ctx, id: BlockStatementId) -> VisitorResult {
 
        // Transfer statements for traversal
 
        let block = &ctx.heap[id];
 

	
 
        for stmt_id in block.statements.clone() {
 
            self.visit_stmt(ctx, stmt_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_memory_stmt(&mut self, ctx: &mut Ctx, id: MemoryStatementId) -> VisitorResult {
 
        let memory_stmt = &ctx.heap[id];
 

	
 
        let local = &ctx.heap[memory_stmt.variable];
 
        let var_type = self.determine_inference_type_from_parser_type(ctx, &local.parser_type, true);
 
        self.var_types.insert(memory_stmt.variable.upcast(), VarData::new_local(var_type));
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_channel_stmt(&mut self, ctx: &mut Ctx, id: ChannelStatementId) -> VisitorResult {
 
        let channel_stmt = &ctx.heap[id];
 

	
 
        let from_local = &ctx.heap[channel_stmt.from];
 
        let from_var_type = self.determine_inference_type_from_parser_type(ctx, &from_local.parser_type, true);
 
        self.var_types.insert(from_local.this.upcast(), VarData::new_channel(from_var_type, channel_stmt.to.upcast()));
 

	
 
        let to_local = &ctx.heap[channel_stmt.to];
 
        let to_var_type = self.determine_inference_type_from_parser_type(ctx, &to_local.parser_type, true);
 
        self.var_types.insert(to_local.this.upcast(), VarData::new_channel(to_var_type, channel_stmt.from.upcast()));
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_labeled_stmt(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> VisitorResult {
 
        let labeled_stmt = &ctx.heap[id];
 
        let substmt_id = labeled_stmt.body;
 
        self.visit_stmt(ctx, substmt_id)
 
    }
 

	
 
    fn visit_if_stmt(&mut self, ctx: &mut Ctx, id: IfStatementId) -> VisitorResult {
 
        let if_stmt = &ctx.heap[id];
 

	
 
        let true_body_id = if_stmt.true_body;
 
        let false_body_id = if_stmt.false_body;
 
        let test_expr_id = if_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_block_stmt(ctx, true_body_id)?;
 
        if let Some(false_body_id) = false_body_id {
 
            self.visit_block_stmt(ctx, false_body_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_while_stmt(&mut self, ctx: &mut Ctx, id: WhileStatementId) -> VisitorResult {
 
        let while_stmt = &ctx.heap[id];
 

	
 
        let body_id = while_stmt.body;
 
        let test_expr_id = while_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_block_stmt(ctx, body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_synchronous_stmt(&mut self, ctx: &mut Ctx, id: SynchronousStatementId) -> VisitorResult {
 
        let sync_stmt = &ctx.heap[id];
 
        let body_id = sync_stmt.body;
 

	
 
        self.visit_block_stmt(ctx, body_id)
 
    }
 

	
 
    fn visit_return_stmt(&mut self, ctx: &mut Ctx, id: ReturnStatementId) -> VisitorResult {
 
        let return_stmt = &ctx.heap[id];
 
        let expr_id = return_stmt.expression;
 

	
 
        self.visit_expr(ctx, expr_id)
 
    }
 

	
 
    fn visit_new_stmt(&mut self, ctx: &mut Ctx, id: NewStatementId) -> VisitorResult {
 
        let new_stmt = &ctx.heap[id];
 
        let call_expr_id = new_stmt.expression;
 

	
 
        self.visit_call_expr(ctx, call_expr_id)
 
    }
 

	
 
    fn visit_expr_stmt(&mut self, ctx: &mut Ctx, id: ExpressionStatementId) -> VisitorResult {
 
        let expr_stmt = &ctx.heap[id];
 
        let subexpr_id = expr_stmt.expression;
 

	
 
        self.visit_expr(ctx, subexpr_id)
 
    }
 

	
 
    // Expressions
 

	
 
    fn visit_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let assign_expr = &ctx.heap[id];
 
        let left_expr_id = assign_expr.left;
 
        let right_expr_id = assign_expr.right;
 

	
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.visit_expr(ctx, right_expr_id)?;
 

	
 
        self.progress_assignment_expr(ctx, id)
 
    }
 

	
 
    fn visit_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let conditional_expr = &ctx.heap[id];
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        self.expr_types.insert(test_expr_id, InferenceType::new(false, true, vec![InferenceTypePart::Bool]));
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_expr(ctx, true_expr_id)?;
 
        self.visit_expr(ctx, false_expr_id)?;
 

	
 
        self.progress_conditional_expr(ctx, id)
 
    }
 

	
 
    fn visit_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let binary_expr = &ctx.heap[id];
 
        let lhs_expr_id = binary_expr.left;
 
        let rhs_expr_id = binary_expr.right;
 

	
 
        self.visit_expr(ctx, lhs_expr_id)?;
 
        self.visit_expr(ctx, rhs_expr_id)?;
 

	
 
        self.progress_binary_expr(ctx, id)
 
    }
 

	
 
    fn visit_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let unary_expr = &ctx.heap[id];
 
        let arg_expr_id = unary_expr.expression;
 

	
 
        self.visit_expr(ctx, arg_expr_id)?;
 

	
 
        self.progress_unary_expr(ctx, id)
 
    }
 

	
 
    fn visit_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let indexing_expr = &ctx.heap[id];
 
        let subject_expr_id = indexing_expr.subject;
 
        let index_expr_id = indexing_expr.index;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.visit_expr(ctx, index_expr_id)?;
 

	
 
        self.progress_indexing_expr(ctx, id)
 
    }
 

	
 
    fn visit_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let slicing_expr = &ctx.heap[id];
 
        let subject_expr_id = slicing_expr.subject;
 
        let from_expr_id = slicing_expr.from_index;
 
        let to_expr_id = slicing_expr.to_index;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.visit_expr(ctx, from_expr_id)?;
 
        self.visit_expr(ctx, to_expr_id)?;
 

	
 
        self.progress_slicing_expr(ctx, id)
 
    }
 

	
 
    fn visit_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let select_expr = &ctx.heap[id];
 
        let subject_expr_id = select_expr.subject;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 

	
 
        self.progress_select_expr(ctx, id)
 
    }
 

	
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let literal_expr = &ctx.heap[id];
 
        match &literal_expr.value {
 
            Literal::Null | Literal::False | Literal::True |
 
            Literal::Integer(_) | Literal::Character(_) | Literal::String(_) => {
 
                // No subexpressions
 
            },
 
            Literal::Struct(literal) => {
 
                // TODO: @performance
 
                let expr_ids: Vec<_> = literal.fields
 
                    .iter()
 
                    .map(|f| f.value)
 
                    .collect();
 

	
 
                self.insert_initial_struct_polymorph_data(ctx, id);
 

	
 
                for expr_id in expr_ids {
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 
            },
 
            Literal::Enum(_) => {
 
                // Enumerations do not carry any subexpressions, but may still
 
                // have a user-defined polymorphic marker variable. For this 
 
                // reason we may still have to apply inference to this 
 
                // polymorphic variable
 
                self.insert_initial_enum_polymorph_data(ctx, id);
 
            },
 
            Literal::Union(literal) => {
 
                // May carry subexpressions and polymorphic arguments
 
                // TODO: @performance
 
                let expr_ids = literal.values.clone();
 
                self.insert_initial_union_polymorph_data(ctx, id);
 

	
 
                for expr_id in expr_ids {
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 
            },
 
            Literal::Array(expressions) => {
 
                // TODO: @performance
 
                let expr_ids = expressions.clone();
 
                for expr_id in expr_ids {
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 
            }
 
        }
 

	
 
        self.progress_literal_expr(ctx, id)
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        self.insert_initial_call_polymorph_data(ctx, id);
 

	
 
        // TODO: @performance
 
        let call_expr = &ctx.heap[id];
 
        for arg_expr_id in call_expr.arguments.clone() {
 
            self.visit_expr(ctx, arg_expr_id)?;
 
        }
 

	
 
        self.progress_call_expr(ctx, id)
 
    }
 

	
 
    fn visit_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let var_expr = &ctx.heap[id];
 
        debug_assert!(var_expr.declaration.is_some());
 
        let var_data = self.var_types.get_mut(var_expr.declaration.as_ref().unwrap()).unwrap();
 
        var_data.used_at.push(upcast_id);
 

	
 
        self.progress_variable_expr(ctx, id)
 
    }
 
}
 

	
 
macro_rules! debug_assert_expr_ids_unique_and_known {
 
    // Base case for a single expression ID
 
    ($resolver:ident, $id:ident) => {
 
        if cfg!(debug_assertions) {
 
            $resolver.expr_types.contains_key(&$id);
 
        }
 
    };
 
    // Base case for two expression IDs
 
    ($resolver:ident, $id1:ident, $id2:ident) => {
 
        debug_assert_ne!($id1, $id2);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2);
 
    };
 
    // Generic case
 
    ($resolver:ident, $id1:ident, $id2:ident, $($tail:ident),+) => {
 
        debug_assert_ne!($id1, $id2);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2, $($tail),+);
 
    };
 
}
 

	
 
macro_rules! debug_assert_ptrs_distinct {
 
    // Base case
 
    ($ptr1:ident, $ptr2:ident) => {
 
        debug_assert!(!std::ptr::eq($ptr1, $ptr2));
 
    };
 
    // Generic case
 
    ($ptr1:ident, $ptr2:ident, $($tail:ident),+) => {
 
        debug_assert_ptrs_distinct!($ptr1, $ptr2);
 
        debug_assert_ptrs_distinct!($ptr2, $($tail),+);
 
    };
 
}
 

	
 
impl TypeResolvingVisitor {
 
impl PassTyping {
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError> {
 
        // Keep inferring until we can no longer make any progress
 
        while let Some(next_expr_id) = self.expr_queued.iter().next() {
 
            let next_expr_id = *next_expr_id;
 
            self.expr_queued.remove(&next_expr_id);
 
            self.progress_expr(ctx, next_expr_id)?;
 
        }
 

	
 
        // We check if we have all the types we need. If we're typechecking a 
 
        // polymorphic procedure more than once, then we have already annotated
 
        // the AST and have now performed typechecking for a different 
 
        // monomorph. In that case we just need to perform typechecking, no need
 
        // to annotate the AST again.
 
        let definition_id = match &self.definition_type {
 
            DefinitionType::Component(id) => id.upcast(),
 
            DefinitionType::Function(id) => id.upcast(),
 
            _ => unreachable!(),
 
        };
 

	
 
        let already_checked = ctx.types.get_base_definition(&definition_id).unwrap().has_any_monomorph();
 
        for (expr_id, expr_type) in self.expr_types.iter_mut() {
 
            if !expr_type.is_done {
 
                // Auto-infer numberlike/integerlike types to a regular int
 
                if expr_type.parts.len() == 1 && expr_type.parts[0] == InferenceTypePart::IntegerLike {
 
                    expr_type.parts[0] = InferenceTypePart::Int;
 
                } else {
 
                    let expr = &ctx.heap[*expr_id];
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module.source, expr.span(), format!(
 
                            "could not fully infer the type of this expression (got '{}')",
 
                            expr_type.display_name(&ctx.heap)
 
                        )
 
                    ));
 
                }
 
            }
 

	
 
            if !already_checked {
 
                let concrete_type = ctx.heap[*expr_id].get_type_mut();
 
                expr_type.write_concrete_type(concrete_type);
 
            } else {
 
                if cfg!(debug_assertions) {
 
                    let mut concrete_type = ConcreteType::default();
 
                    expr_type.write_concrete_type(&mut concrete_type);
 
                    debug_assert_eq!(*ctx.heap[*expr_id].get_type(), concrete_type);
 
                }
 
            }
 
        }
 

	
 
        // All types are fine
 
        ctx.types.add_monomorph(&definition_id, self.poly_vars.clone());
 

	
 
        // Check all things we need to monomorphize
 
        // TODO: Struct/enum/union monomorphization
 
        for (expr_id, extra_data) in self.extra_data.iter() {
 
            if extra_data.poly_vars.is_empty() { continue; }
 

	
 
            // Retrieve polymorph variable specification. Those of struct 
 
            // literals and those of procedure calls need to be fully inferred.
 
            // The remaining ones (e.g. select expressions) allow partial 
 
            // inference of types, as long as the accessed field's type is
 
            // fully inferred.
 
            let needs_full_inference = match &ctx.heap[*expr_id] {
 
                Expression::Call(_) => true,
 
                Expression::Literal(_) => true,
 
                _ => false
 
            };
 

	
 
            if needs_full_inference {
 
                let mut monomorph_types = Vec::with_capacity(extra_data.poly_vars.len());
 
                for (poly_idx, poly_type) in extra_data.poly_vars.iter().enumerate() {
 
                    if !poly_type.is_done {
 
                        // TODO: Single clean function for function signatures and polyvars.
 
                        // TODO: Better error message
 
                        let expr = &ctx.heap[*expr_id];
 
                        return Err(ParseError::new_error_at_span(
 
                            &ctx.module.source, expr.span(), format!(
 
                                "could not fully infer the type of polymorphic variable {} of this expression (got '{}')",
 
                                poly_idx, poly_type.display_name(&ctx.heap)
 
                            )
 
                        ))
 
                    }
 

	
 
                    let mut concrete_type = ConcreteType::default();
 
                    poly_type.write_concrete_type(&mut concrete_type);
 
                    monomorph_types.insert(poly_idx, concrete_type);
 
                }
 

	
 
                // Resolve to the appropriate expression and instantiate 
 
                // monomorphs.
 
                match &ctx.heap[*expr_id] {
 
                    Expression::Call(call_expr) => {
 
                        // Add to type table if not yet typechecked
 
                        if let Method::Symbolic(symbolic) = &call_expr.method {
 
                            let definition_id = symbolic.definition.unwrap();
 
                            if !ctx.types.has_monomorph(&definition_id, &monomorph_types) {
 
                                let root_id = ctx.types
 
                                    .get_base_definition(&definition_id)
 
                                    .unwrap()
 
                                    .ast_root;
 

	
 
                                // Pre-emptively add the monomorph to the type table, but
 
                                // we still need to perform typechecking on it
 
                                // TODO: Unsure about this, performance wise
 
                                let queue_element = ResolveQueueElement{
 
                                    root_id,
 
                                    definition_id,
 
                                    monomorph_types,
 
                                };
 
                                if !queue.contains(&queue_element) {
 
                                    queue.push(queue_element);
 
                                }
 
                            }
 
                        }
 
                    },
 
                    Expression::Literal(lit_expr) => {
 
                        let definition_id = match &lit_expr.value {
 
                            Literal::Struct(literal) => literal.definition.as_ref().unwrap(),
 
                            Literal::Enum(literal) => literal.definition.as_ref().unwrap(),
 
                            Literal::Union(literal) => literal.definition.as_ref().unwrap(),
 
                            _ => unreachable!("post-inference monomorph for non-struct, non-enum literal")
 
                        };
 
                        if !ctx.types.has_monomorph(definition_id, &monomorph_types) {
 
                            ctx.types.add_monomorph(definition_id, monomorph_types);
 
                        }
 
                    },
 
                    _ => unreachable!("needs fully inference, but not a struct literal or call expression")
 
                }
 
            } // else: was just a helper structure...
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> Result<(), ParseError> {
 
        match &ctx.heap[id] {
 
            Expression::Assignment(expr) => {
 
                let id = expr.this;
 
                self.progress_assignment_expr(ctx, id)
 
            },
 
            Expression::Binding(expr) => {
 
                unimplemented!("progress binding expression");
 
            },
 
            Expression::Conditional(expr) => {
 
                let id = expr.this;
 
                self.progress_conditional_expr(ctx, id)
 
            },
 
            Expression::Binary(expr) => {
 
                let id = expr.this;
 
                self.progress_binary_expr(ctx, id)
 
            },
 
            Expression::Unary(expr) => {
 
                let id = expr.this;
 
                self.progress_unary_expr(ctx, id)
 
            },
 
            Expression::Indexing(expr) => {
 
                let id = expr.this;
 
                self.progress_indexing_expr(ctx, id)
 
            },
 
            Expression::Slicing(expr) => {
 
                let id = expr.this;
 
                self.progress_slicing_expr(ctx, id)
 
            },
 
            Expression::Select(expr) => {
 
                let id = expr.this;
 
                self.progress_select_expr(ctx, id)
 
            },
 
            Expression::Literal(expr) => {
 
                let id = expr.this;
 
                self.progress_literal_expr(ctx, id)
 
            },
 
            Expression::Call(expr) => {
 
                let id = expr.this;
 
                self.progress_call_expr(ctx, id)
 
            },
 
            Expression::Variable(expr) => {
 
                let id = expr.this;
 
                self.progress_variable_expr(ctx, id)
 
            }
 
        }
 
    }
 

	
 
    fn progress_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> Result<(), ParseError> {
 
        use AssignmentOperator as AO;
 

	
 
        // TODO: Assignable check
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.left;
 
        let arg2_expr_id = expr.right;
 

	
 
        debug_log!("Assignment expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let progress_base = match expr.operation {
 
            AO::Set =>
 
                false,
 
            AO::Multiplied | AO::Divided | AO::Added | AO::Subtracted =>
 
                self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?,
 
            AO::Remained | AO::ShiftedLeft | AO::ShiftedRight |
 
            AO::BitwiseAnded | AO::BitwiseXored | AO::BitwiseOred =>
 
                self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?,
 
        };
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = self.apply_equal3_constraint(
 
            ctx, upcast_id, arg1_expr_id, arg2_expr_id, 0
 
        )?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_base || progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 

	
 
        if progress_base || progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> Result<(), ParseError> {
 
        // Note: test expression type is already enforced
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.true_expression;
 
        let arg2_expr_id = expr.false_expression;
 

	
 
        debug_log!("Conditional expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = self.apply_equal3_constraint(
 
            ctx, upcast_id, arg1_expr_id, arg2_expr_id, 0
 
        )?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> Result<(), ParseError> {
 
        // Note: our expression type might be fixed by our parent, but we still
 
        // need to make sure it matches the type associated with our operation.
 
        use BinaryOperator as BO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_id = expr.left;
 
        let arg2_id = expr.right;
 

	
 
        debug_log!("Binary expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = match expr.operation {
 
            BO::Concatenate => {
 
                // Arguments may be arrays/slices, output is always an array
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &ARRAY_TEMPLATE)?;
 
                let progress_arg1 = self.apply_forced_constraint(ctx, arg1_id, &ARRAYLIKE_TEMPLATE)?;
 
                let progress_arg2 = self.apply_forced_constraint(ctx, arg2_id, &ARRAYLIKE_TEMPLATE)?;
 

	
 
                // If they're all arraylike, then we want the subtype to match
 
                let (subtype_expr, subtype_arg1, subtype_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 1)?;
 

	
 
                (progress_expr || subtype_expr, progress_arg1 || subtype_arg1, progress_arg2 || subtype_arg2)
 
            },
 
            BO::LogicalOr | BO::LogicalAnd => {
 
                // Forced boolean on all
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg1 = self.apply_forced_constraint(ctx, arg1_id, &BOOL_TEMPLATE)?;
 
                let progress_arg2 = self.apply_forced_constraint(ctx, arg2_id, &BOOL_TEMPLATE)?;
 

	
 
                (progress_expr, progress_arg1, progress_arg2)
 
            },
 
            BO::BitwiseOr | BO::BitwiseXor | BO::BitwiseAnd | BO::Remainder | BO::ShiftLeft | BO::ShiftRight => {
 
                // All equal of integer type
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg1, progress_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg1, progress_base || progress_arg2)
 
            },
 
            BO::Equality | BO::Inequality => {
 
                // Equal2 on args, forced boolean output
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let (progress_arg1, progress_arg2) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, arg1_id, 0, arg2_id, 0)?;
 

	
 
                (progress_expr, progress_arg1, progress_arg2)
 
            },
 
            BO::LessThan | BO::GreaterThan | BO::LessThanEqual | BO::GreaterThanEqual => {
 
                // Equal2 on args with numberlike type, forced boolean output
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg_base = self.apply_forced_constraint(ctx, arg1_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_arg1, progress_arg2) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, arg1_id, 0, arg2_id, 0)?;
 

	
 
                (progress_expr, progress_arg_base || progress_arg1, progress_arg_base || progress_arg2)
 
            },
 
            BO::Add | BO::Subtract | BO::Multiply | BO::Divide => {
 
                // All equal of number type
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg1, progress_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg1, progress_base || progress_arg2)
 
            },
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_id); }
 
        if progress_arg2 { self.queue_expr(arg2_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> Result<(), ParseError> {
 
        use UnaryOperation as UO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg_id = expr.expression;
 

	
 
        debug_log!("Unary expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg  type: {}", self.expr_types.get(&arg_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg) = match expr.operation {
 
            UO::Positive | UO::Negative => {
 
                // Equal types of numeric class
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, arg_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg)
 
            },
 
            UO::BitwiseNot | UO::PreIncrement | UO::PreDecrement | UO::PostIncrement | UO::PostDecrement => {
 
                // Equal types of integer class
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, arg_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg)
 
            },
 
            UO::LogicalNot => {
 
                // Both booleans
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                (progress_expr, progress_arg)
 
            }
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg  type [{}]: {}", progress_arg, self.expr_types.get(&arg_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg { self.queue_expr(arg_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> Result<(), ParseError> {
 
@@ -2650,916 +2652,867 @@ impl TypeResolvingVisitor {
 
        let expr_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(expr_type, start_idx, arg1_type, start_idx)
 
        };
 
        if expr_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_expr_type_error(ctx, expr_id, arg1_id));
 
        }
 

	
 
        let args_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(arg1_type, start_idx, arg2_type, start_idx) };
 
        if args_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
        }
 

	
 
        // If all types are compatible, but the second call caused the arg1_type
 
        // to be expanded, then we must also assign this to expr_type.
 
        let mut progress_expr = expr_res.modified_lhs();
 
        let mut progress_arg1 = expr_res.modified_rhs();
 
        let progress_arg2 = args_res.modified_rhs();
 

	
 
        if args_res.modified_lhs() { 
 
            unsafe {
 
                let end_idx = InferenceType::find_subtree_end_idx(&(*arg2_type).parts, start_idx);
 
                let subtree = &((*arg2_type).parts[start_idx..end_idx]);
 
                (*expr_type).replace_subtree(start_idx, subtree);
 
            }
 
            progress_expr = true;
 
            progress_arg1 = true;
 
        }
 

	
 
        Ok((progress_expr, progress_arg1, progress_arg2))
 
    }
 

	
 
    // TODO: @optimize Since we only deal with a single type this might be done
 
    //  a lot more efficiently, methinks (disregarding the allocations here)
 
    fn apply_equal_n_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId, args: &[ExpressionId],
 
    ) -> Result<Vec<bool>, ParseError> {
 
        // Early exit
 
        match args.len() {
 
            0 => return Ok(vec!()),         // nothing to progress
 
            1 => return Ok(vec![false]),    // only one type, so nothing to infer
 
            _ => {}
 
        }
 

	
 
        let mut progress = Vec::new();
 
        progress.resize(args.len(), false);
 

	
 
        // Do pairwise inference, keep track of the last entry we made progress
 
        // on. Once done we need to update everything to the most-inferred type.
 
        let mut arg_iter = args.iter();
 
        let mut last_arg_id = *arg_iter.next().unwrap();
 
        let mut last_lhs_progressed = 0;
 
        let mut lhs_arg_idx = 0;
 

	
 
        while let Some(next_arg_id) = arg_iter.next() {
 
            let arg1_type: *mut _ = self.expr_types.get_mut(&last_arg_id).unwrap();
 
            let arg2_type: *mut _ = self.expr_types.get_mut(next_arg_id).unwrap();
 

	
 
            let res = unsafe {
 
                InferenceType::infer_subtrees_for_both_types(arg1_type, 0, arg2_type, 0)
 
            };
 

	
 
            if res == DualInferenceResult::Incompatible {
 
                return Err(self.construct_arg_type_error(ctx, expr_id, last_arg_id, *next_arg_id));
 
            }
 

	
 
            if res.modified_lhs() {
 
                // We re-inferred something on the left hand side, so everything
 
                // up until now should be re-inferred.
 
                progress[lhs_arg_idx] = true;
 
                last_lhs_progressed = lhs_arg_idx;
 
            }
 
            progress[lhs_arg_idx + 1] = res.modified_rhs();
 

	
 
            last_arg_id = *next_arg_id;
 
            lhs_arg_idx += 1;
 
        }
 

	
 
        // Re-infer everything. Note that we do not need to re-infer the type
 
        // exactly at `last_lhs_progressed`, but only everything up to it.
 
        let last_type: *mut _ = self.expr_types.get_mut(args.last().unwrap()).unwrap();
 
        for arg_idx in 0..last_lhs_progressed {
 
            let arg_type: *mut _ = self.expr_types.get_mut(&args[arg_idx]).unwrap();
 
            unsafe{
 
                (*arg_type).replace_subtree(0, &(*last_type).parts);
 
            }
 
            progress[arg_idx] = true;
 
        }
 

	
 
        Ok(progress)
 
    }
 

	
 
    /// Determines the `InferenceType` for the expression based on the
 
    /// expression parent. Note that if the parent is another expression, we do
 
    /// not take special action, instead we let parent expressions fix the type
 
    /// of subexpressions before they have a chance to call this function.
 
    fn insert_initial_expr_inference_type(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId
 
    ) -> Result<(), ParseError> {
 
        use ExpressionParent as EP;
 
        use InferenceTypePart as ITP;
 

	
 
        let expr = &ctx.heap[expr_id];
 
        let inference_type = match expr.parent() {
 
            EP::None =>
 
                // Should have been set by linker
 
                unreachable!(),
 
            EP::ExpressionStmt(_) | EP::Expression(_, _) =>
 
                // Determined during type inference
 
                InferenceType::new(false, false, vec![ITP::Unknown]),
 
            EP::If(_) | EP::While(_) =>
 
                // Must be a boolean
 
                InferenceType::new(false, true, vec![ITP::Bool]),
 
            EP::Return(_) =>
 
                // Must match the return type of the function
 
                if let DefinitionType::Function(func_id) = self.definition_type {
 
                    let return_parser_type_id = ctx.heap[func_id].return_type;
 
                    self.determine_inference_type_from_parser_type(ctx, return_parser_type_id, true)
 
                } else {
 
                    // Cannot happen: definition always set upon body traversal
 
                    // and "return" calls in components are illegal.
 
                    unreachable!();
 
                },
 
            EP::New(_) =>
 
                // Must be a component call, which we assign a "Void" return
 
                // type
 
                InferenceType::new(false, true, vec![ITP::Void]),
 
        };
 

	
 
        match self.expr_types.entry(expr_id) {
 
            Entry::Vacant(vacant) => {
 
                vacant.insert(inference_type);
 
            },
 
            Entry::Occupied(mut preexisting) => {
 
                // We already have an entry, this happens if our parent fixed
 
                // our type (e.g. we're used in a conditional expression's test)
 
                // but we have a different type.
 
                // TODO: Is this ever called? Seems like it can't
 
                debug_assert!(false, "I am actually called, my ID is {}", expr_id.index);
 
                let old_type = preexisting.get_mut();
 
                if let SingleInferenceResult::Incompatible = InferenceType::infer_subtree_for_single_type(
 
                    old_type, 0, &inference_type.parts, 0
 
                ) {
 
                    return Err(self.construct_expr_type_error(ctx, expr_id, expr_id))
 
                }
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn insert_initial_call_polymorph_data(
 
        &mut self, ctx: &mut Ctx, call_id: CallExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 

	
 
        // Note: the polymorph variables may be partially specified and may
 
        // contain references to the wrapping definition's (i.e. the proctype
 
        // we are currently visiting) polymorphic arguments.
 
        //
 
        // The arguments of the call may refer to polymorphic variables in the
 
        // definition of the function we're calling, not of the wrapping
 
        // definition. We insert markers in these inferred types to be able to
 
        // map them back and forth to the polymorphic arguments of the function
 
        // we are calling.
 
        let call = &ctx.heap[call_id];
 

	
 
        // Handle the polymorphic variables themselves
 
        let mut poly_vars = Vec::with_capacity(call.poly_args.len());
 
        for poly_arg_type_id in call.poly_args.clone() { // TODO: @performance
 
            poly_vars.push(self.determine_inference_type_from_parser_type(ctx, poly_arg_type_id, true));
 
        }
 

	
 
        // Handle the arguments
 
        // TODO: @cleanup: Maybe factor this out for reuse in the validator/linker, should also
 
        //  make the code slightly more robust.
 
        let (embedded_types, return_type) = match &call.method {
 
            Method::Create => {
 
                // Not polymorphic
 
                (
 
                    vec![InferenceType::new(false, true, vec![ITP::Int])],
 
                    InferenceType::new(false, true, vec![ITP::Message, ITP::Byte])
 
                )
 
            },
 
            Method::Fires => {
 
                // bool fires<T>(PortLike<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::PortLike, ITP::MarkerBody(0), ITP::Unknown])],
 
                    InferenceType::new(false, true, vec![ITP::Bool])
 
                )
 
            },
 
            Method::Get => {
 
                // T get<T>(input<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::Input, ITP::MarkerBody(0), ITP::Unknown])],
 
                    InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown])
 
                )
 
            },
 
            Method::Put => {
 
                // void Put<T>(output<T> port, T msg)
 
                (
 
                    vec![
 
                        InferenceType::new(true, false, vec![ITP::Output, ITP::MarkerBody(0), ITP::Unknown]),
 
                        InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown])
 
                    ],
 
                    InferenceType::new(false, true, vec![ITP::Void])
 
                )
 
            }
 
            Method::Symbolic(symbolic) => {
 
                let definition = &ctx.heap[symbolic.definition.unwrap()];
 

	
 
                match definition {
 
                    Definition::Component(definition) => {
 
                        debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
                        for param_id in definition.parameters.clone() {
 
                            let param = &ctx.heap[param_id];
 
                            parameter_types.push(self.determine_inference_type_from_parser_type(ctx, &param.parser_type, false));
 
                        }
 

	
 
                        (parameter_types, InferenceType::new(false, true, vec![InferenceTypePart::Void]))
 
                    },
 
                    Definition::Function(definition) => {
 
                        debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
                        for param_id in definition.parameters.clone() {
 
                            let param = &ctx.heap[param_id];
 
                            parameter_types.push(self.determine_inference_type_from_parser_type(ctx, &param.parser_type, false));
 
                        }
 

	
 
                        debug_assert_eq!(definition.return_types.len(), 1, "multiple return types not yet implemented");
 

	
 
                        let return_type = self.determine_inference_type_from_parser_type(ctx, &definition.return_types[0], false);
 
                        (parameter_types, return_type)
 
                    },
 
                    Definition::Struct(_) | Definition::Enum(_) | Definition::Union(_) => {
 
                        unreachable!("insert initial polymorph data for struct/enum/union");
 
                    }
 
                }
 
            }
 
        };
 

	
 
        self.extra_data.insert(call_id.upcast(), ExtraData {
 
            poly_vars,
 
            embedded: embedded_types,
 
            returned: return_type
 
        });
 
    }
 

	
 
    fn insert_initial_struct_polymorph_data(
 
        &mut self, ctx: &mut Ctx, lit_id: LiteralExpressionId,
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_struct();
 

	
 
        // Handle polymorphic arguments
 
        let mut poly_vars = Vec::with_capacity(literal.poly_args2.len());
 
        let mut total_num_poly_parts = 0;
 
        for poly_arg_type_id in literal.poly_args2.clone() { // TODO: @performance
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, poly_arg_type_id, true
 
            ); 
 
            total_num_poly_parts += inference_type.parts.len();
 
            poly_vars.push(inference_type);
 
        }
 

	
 
        // Handle parser types on struct definition
 
        let definition = &ctx.heap[literal.definition.unwrap()];
 
        let definition = match definition {
 
            Definition::Struct(definition) => {
 
                debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                definition
 
            },
 
            _ => unreachable!("definition for struct literal does not point to struct definition")
 
        };
 

	
 
        // Note: programmer is capable of specifying fields in a struct literal
 
        // in a different order than on the definition. We take the literal-
 
        // specified order to be leading.
 
        let mut embedded_types = Vec::with_capacity(definition.fields.len());
 
        for lit_field in literal.fields.iter() {
 
            let def_field = &definition.fields[lit_field.field_idx];
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, &def_field.parser_type, false
 
            );
 
            embedded_types.push(inference_type);
 
        }
 

	
 
        // Return type is the struct type itself, with the appropriate 
 
        // polymorphic variables. So:
 
        // - 1 part for definition
 
        // - N_poly_arg marker parts for each polymorphic argument
 
        // - all the parts for the currently known polymorphic arguments 
 
        let parts_reserved = 1 + poly_vars.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(definition.this.upcast(), poly_vars.len()));
 
        let mut return_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
            if !poly_var.is_done { return_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let return_type = InferenceType::new(!poly_vars.is_empty(), return_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
            poly_vars, 
 
            embedded: embedded_types,
 
            returned: return_type,
 
        });
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct for enum expressions. These
 
    /// can never be determined from the enum itself, but may be inferred from
 
    /// the use of the enum.
 
    fn insert_initial_enum_polymorph_data(
 
        &mut self, ctx: &Ctx, lit_id: LiteralExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_enum();
 

	
 
        // Handle polymorphic arguments to the enum
 
        let mut poly_vars = Vec::with_capacity(literal.poly_args2.len());
 
        let mut total_num_poly_parts = 0;
 
        for poly_arg_type_id in literal.poly_args2.clone() { // TODO: @performance
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, poly_arg_type_id, true
 
            );
 
            total_num_poly_parts += inference_type.parts.len();
 
            poly_vars.push(inference_type);
 
        }
 

	
 
        // Handle enum type itself
 
        let parts_reserved = 1 + poly_vars.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(literal.definition.unwrap(), poly_vars.len()));
 
        let mut enum_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
            if !poly_var.is_done { enum_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let enum_type = InferenceType::new(!poly_vars.is_empty(), enum_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
            poly_vars,
 
            embedded: Vec::new(),
 
            returned: enum_type,
 
        });
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct for unions. The polymorphic
 
    /// arguments may be partially determined from embedded values in the union.
 
    fn insert_initial_union_polymorph_data(
 
        &mut self, ctx: &Ctx, lit_id: LiteralExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_union();
 

	
 
        // Construct the polymorphic variables
 
        let mut poly_vars = Vec::with_capacity(literal.poly_args2.len());
 
        let mut total_num_poly_parts = 0;
 
        for poly_arg_type_id in literal.poly_args2.clone() { // TODO: @performance
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, poly_arg_type_id, true
 
            );
 
            total_num_poly_parts += inference_type.parts.len();
 
            poly_vars.push(inference_type);
 
        }
 

	
 
        // Handle any of the embedded values in the variant, if specified
 
        let definition_id = literal.definition.unwrap();
 
        let union_definition = ctx.types.get_base_definition(&definition_id)
 
            .unwrap()
 
            .definition.as_union();
 
        let variant_definition = &union_definition.variants[literal.variant_idx];
 
        debug_assert_eq!(variant_definition.embedded.len(), literal.values.len());
 

	
 
        let mut embedded = Vec::with_capacity(variant_definition.embedded.len());
 
        for embedded_id in &variant_definition.embedded {
 
        for embedded_parser_type in &variant_definition.embedded {
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, *embedded_id, false
 
                ctx, embedded_parser_type, false
 
            );
 
            embedded.push(inference_type);
 
        }
 

	
 
        // Handle the type of the union itself
 
        let parts_reserved = 1 + poly_vars.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(definition_id, poly_vars.len()));
 
        let mut union_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
            if !poly_var.is_done { union_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts_reserved, parts.len());
 
        let union_type = InferenceType::new(!poly_vars.is_empty(), union_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
            poly_vars,
 
            embedded,
 
            returned: union_type
 
        });
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct. Assumes that the select
 
    /// expression's referenced (definition_id, field_idx) has been resolved.
 
    fn insert_initial_select_polymorph_data(
 
        &mut self, ctx: &Ctx, select_id: SelectExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 

	
 
        // Retrieve relevant data
 
        let expr = &ctx.heap[select_id];
 
        let field = expr.field.as_symbolic();
 

	
 
        let definition_id = field.definition.unwrap();
 
        let definition = ctx.heap[definition_id].as_struct();
 
        let field_idx = field.field_idx;
 

	
 
        // Generate initial polyvar types and struct type
 
        let num_poly_vars = definition.poly_vars.len();
 
        let mut poly_vars = Vec::with_capacity(num_poly_vars);
 
        let struct_parts_reserved = 1 + 2 * num_poly_vars;
 
        let mut struct_parts = Vec::with_capacity(struct_parts_reserved);
 
        struct_parts.push(ITP::Instance(definition_id, num_poly_vars));        
 

	
 
        for poly_idx in 0..num_poly_vars {
 
            poly_vars.push(InferenceType::new(true, false, vec![
 
                ITP::MarkerBody(poly_idx), ITP::Unknown,
 
            ]));
 
            struct_parts.push(ITP::MarkerBody(poly_idx));
 
            struct_parts.push(ITP::Unknown);
 
        }
 
        debug_assert_eq!(struct_parts.len(), struct_parts_reserved);
 

	
 
        // Generate initial field type
 
        let field_type = self.determine_inference_type_from_parser_type(
 
            ctx, &definition.fields[field_idx].parser_type, false
 
        );
 

	
 
        self.extra_data.insert(select_id.upcast(), ExtraData{
 
            poly_vars,
 
            embedded: vec![InferenceType::new(num_poly_vars != 0, num_poly_vars == 0, struct_parts)],
 
            returned: field_type
 
        });
 
    }
 

	
 
    /// Determines the initial InferenceType from the provided ParserType. This
 
    /// may be called with two kinds of intentions:
 
    /// 1. To resolve a ParserType within the body of a function, or on
 
    ///     polymorphic arguments to calls/instantiations within that body. This
 
    ///     means that the polymorphic variables are known and can be replaced
 
    ///     with the monomorph we're instantiating.
 
    /// 2. To resolve a ParserType on a called function's definition or on
 
    ///     an instantiated datatype's members. This means that the polymorphic
 
    ///     arguments inside those ParserTypes refer to the polymorphic
 
    ///     variables in the called/instantiated type's definition.
 
    /// In the second case we place InferenceTypePart::Marker instances such
 
    /// that we can perform type inference on the polymorphic variables.
 
    fn determine_inference_type_from_parser_type(
 
        &mut self, ctx: &Ctx, parser_type: &ParserType,
 
        parser_type_in_body: bool
 
    ) -> InferenceType {
 
        use ParserTypeVariant as PTV;
 
        use InferenceTypePart as ITP;
 

	
 
        let mut infer_type = Vec::with_capacity(parser_type.elements.len());
 
        let mut has_inferred = false;
 
        let mut has_markers = false;
 

	
 
        for element in &parser_type.elements {
 
            match &element.variant {
 
                PTV::Message => {
 
                    // TODO: @types Remove the Message -> Byte hack at some point...
 
                    infer_type.push(ITP::Message);
 
                    infer_type.push(ITP::UInt8);
 
                },
 
                PTV::Bool => { infer_type.push(ITP::Bool); },
 
                PTV::UInt8 => { infer_type.push(ITP::UInt8); },
 
                PTV::UInt16 => { infer_type.push(ITP::UInt16); },
 
                PTV::UInt32 => { infer_type.push(ITP::UInt32); },
 
                PTV::UInt64 => { infer_type.push(ITP::UInt64); },
 
                PTV::SInt8 => { infer_type.push(ITP::SInt8); },
 
                PTV::SInt16 => { infer_type.push(ITP::SInt16); },
 
                PTV::SInt32 => { infer_type.push(ITP::SInt32); },
 
                PTV::SInt64 => { infer_type.push(ITP::SInt64); },
 
                PTV::Character => { infer_type.push(ITP::Character); },
 
                PTV::String => { infer_type.push(ITP::String); },
 
                PTV::IntegerLiteral => { unreachable!("integer literal type on variable type"); },
 
                PTV::Inferred => {
 
                    infer_type.push(ITP::Unknown);
 
                    has_inferred = true;
 
                },
 
                PTV::Array => { infer_type.push(ITP::Array); },
 
                PTV::Input => { infer_type.push(ITP::Input); },
 
                PTV::Output => { infer_type.push(ITP::Output); },
 
                PTV::PolymorphicArgument(belongs_to_definition, poly_arg_idx) => {
 
                    let poly_arg_idx = *poly_arg_idx;
 
                    if parser_type_in_body {
 
                        // Refers to polymorphic argument on procedure we're currently processing.
 
                        // This argument is already known.
 
                        debug_assert_eq!(belongs_to_definition, self.definition_type.definition_id());
 
                        debug_assert!((poly_arg_idx as usize) < self.poly_vars.len());
 

	
 
                        infer_type.push(ITP::MarkerDefinition(poly_arg_idx as usize));
 
                        for concrete_part in &self.poly_vars[poly_arg_idx].parts {
 
                            infer_types.push(ITP::from(*concrete_part));
 
                            infer_type.push(ITP::from(*concrete_part));
 
                        }
 
                    } else {
 
                        // Polymorphic argument has to be inferred
 
                        has_markers = true;
 
                        has_inferred = true;
 
                        infer_type.push(ITP::MarkerBody(poly_arg_idx));
 
                        infer_type.push(ITP::Unknown)
 
                    }
 
                },
 
                PTV::Definition(definition_id, num_embedded) => {
 
                    infer_type.push(ITP::Instance(*definition_id, *num_embedded));
 
                }
 
            }
 
        }
 

	
 
        InferenceType::new(has_markers, !has_inferred, infer_type)
 
    }
 

	
 
    /// Construct an error when an expression's type does not match. This
 
    /// happens if we infer the expression type from its arguments (e.g. the
 
    /// expression type of an addition operator is the type of the arguments)
 
    /// But the expression type was already set due to our parent (e.g. an
 
    /// "if statement" or a "logical not" always expecting a boolean)
 
    fn construct_expr_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, arg_id: ExpressionId
 
    ) -> ParseError {
 
        // TODO: Expand and provide more meaningful information for humans
 
        let expr = &ctx.heap[expr_id];
 
        let arg_expr = &ctx.heap[arg_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 
        let arg_type = self.expr_types.get(&arg_id).unwrap();
 

	
 
        return ParseError::new_error_at_span(
 
            &ctx.module.source, expr.span(), format!(
 
                "incompatible types: this expression expected a '{}'",
 
                expr_type.display_name(&ctx.heap)
 
            )
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg_expr.span(), format!(
 
                "but this expression yields a '{}'",
 
                arg_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_arg_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let arg1 = &ctx.heap[arg1_id];
 
        let arg2 = &ctx.heap[arg2_id];
 

	
 
        let arg1_type = self.expr_types.get(&arg1_id).unwrap();
 
        let arg2_type = self.expr_types.get(&arg2_id).unwrap();
 

	
 
        return ParseError::new_error_str_at_span(
 
            &ctx.module.source, expr.span(),
 
            "incompatible types: cannot apply this expression"
 
        ).with_info_at_span(
 
            &ctx.module.source, arg1.span(), format!(
 
                "Because this expression has type '{}'",
 
                arg1_type.display_name(&ctx.heap)
 
            )
 
        ).with_info_at_span(
 
            &ctx.module.source, arg2.span(), format!(
 
                "But this expression has type '{}'",
 
                arg2_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_template_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 

	
 
        return ParseError::new_error_at_span(
 
            &ctx.module.source, expr.span(), format!(
 
                "incompatible types: got a '{}' but expected a '{}'",
 
                expr_type.display_name(&ctx.heap), 
 
                InferenceType::partial_display_name(&ctx.heap, template)
 
            )
 
        )
 
    }
 

	
 
    /// Constructs a human interpretable error in the case that type inference
 
    /// on a polymorphic variable to a function call or literal construction 
 
    /// failed. This may only be caused by a pair of inference types (which may 
 
    /// come from arguments or the return type) having two different inferred 
 
    /// values for that polymorphic variable.
 
    ///
 
    /// So we find this pair and construct the error using it.
 
    ///
 
    /// We assume that the expression is a function call or a struct literal,
 
    /// and that an actual error has occurred.
 
    fn construct_poly_arg_error(
 
        ctx: &Ctx, poly_data: &ExtraData, expr_id: ExpressionId
 
    ) -> ParseError {
 
        // Helper function to check for polymorph mismatch between two inference
 
        // types.
 
        fn has_poly_mismatch<'a>(type_a: &'a InferenceType, type_b: &'a InferenceType) -> Option<(usize, &'a [InferenceTypePart], &'a [InferenceTypePart])> {
 
            if !type_a.has_body_marker || !type_b.has_body_marker {
 
                return None
 
            }
 

	
 
            for (marker_a, section_a) in type_a.body_marker_iter() {
 
                for (marker_b, section_b) in type_b.body_marker_iter() {
 
                    if marker_a != marker_b {
 
                        // Not the same polymorphic variable
 
                        continue;
 
                    }
 

	
 
                    if !InferenceType::check_subtrees(section_a, 0, section_b, 0) {
 
                        // Not compatible
 
                        return Some((marker_a, section_a, section_b))
 
                    }
 
                }
 
            }
 

	
 
            None
 
        }
 

	
 
        // Helpers function to retrieve polyvar name and definition name
 
        fn get_poly_var_and_func_name(ctx: &Ctx, poly_var_idx: usize, expr: &CallExpression) -> (String, String) {
 
            match &expr.method {
 
                Method::Create => unreachable!(),
 
                Method::Fires => (String::from('T'), String::from("fires")),
 
                Method::Get => (String::from('T'), String::from("get")),
 
                Method::Put => (String::from('T'), String::from("put")),
 
                Method::Symbolic(symbolic) => {
 
                    let definition = &ctx.heap[symbolic.definition.unwrap()];
 
                    let poly_var = match definition {
 
                        Definition::Struct(_) | Definition::Enum(_) | Definition::Union(_) => unreachable!(),
 
                        Definition::Function(definition) => {
 
                            String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string()
 
                        },
 
                        Definition::Component(definition) => {
 
                            String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string()
 
                        }
 
                    };
 
                    let func_name = String::from_utf8_lossy(&symbolic.identifier.value).to_string();
 
                    (poly_var, func_name)
 
                }
 
            }
 
        }
 

	
 
        fn get_poly_var_and_type_name(ctx: &Ctx, poly_var_idx: usize, definition_id: DefinitionId) -> (String, String) {
 
        fn get_poly_var_and_definition_name<'a>(ctx: &'a Ctx, poly_var_idx: usize, definition_id: DefinitionId) -> (&'a str, &'a str) {
 
            let definition = &ctx.heap[definition_id];
 
            let (poly_var_name, type_name) = match definition {
 
                Definition::Function(_) | Definition::Component(_) =>
 
                    unreachable!("get_poly_var_and_type_name called on unsupported type"),
 
                Definition::Enum(definition) => (
 
                    &definition.poly_vars[poly_var_idx].value,
 
                    &definition.identifier.value
 
                ),
 
                Definition::Struct(definition) => (
 
                    &definition.poly_vars[poly_var_idx].value,
 
                    &definition.identifier.value
 
                ),
 
                Definition::Union(definition) => (
 
                    &definition.poly_vars[poly_var_idx].value,
 
                    &definition.identifier.value
 
                ),
 
            };
 
            let poly_var = definition.poly_vars()[poly_var_idx].value.as_str();
 
            let func_name = definition.identifier().value.as_str();
 

	
 
            (
 
                String::from_utf8_lossy(poly_var_name).to_string(),
 
                String::from_utf8_lossy(type_name).to_string()
 
            )
 
            (poly_var, func_name)
 
        }
 

	
 
        // Helper function to construct initial error
 
        fn construct_main_error(ctx: &Ctx, poly_var_idx: usize, expr: &Expression) -> ParseError {
 
            match expr {
 
                Expression::Call(expr) => {
 
                    let (poly_var, func_name) = get_poly_var_and_func_name(ctx, poly_var_idx, expr);
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                    let (poly_var, func_name) = get_poly_var_and_definition_name(ctx, poly_var_idx, expr.definition);
 
                    return ParseError::new_error_at_span(
 
                        &ctx.module.source, expr.span, format!(
 
                            "Conflicting type for polymorphic variable '{}' of '{}'",
 
                            poly_var, func_name
 
                        )
 
                    )
 
                },
 
                Expression::Literal(expr) => {
 
                    let definition_id = match &expr.value {
 
                        Literal::Struct(v) => v.definition.unwrap(),
 
                        Literal::Enum(v) => v.definition.unwrap(),
 
                        Literal::Union(v) => v.definition.unwrap(),
 
                        _ => unreachable!(),
 
                    };
 

	
 
                    let (poly_var, struct_name) = get_poly_var_and_type_name(ctx, poly_var_idx, definition_id);
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                    let (poly_var, type_name) = get_poly_var_and_definition_name(ctx, poly_var_idx, definition_id);
 
                    return ParseError::new_error_at_span(
 
                        &ctx.module.source, expr.span, format!(
 
                            "Conflicting type for polymorphic variable '{}' of instantiation of '{}'",
 
                            poly_var, struct_name
 
                        )
 
                            poly_var, type_name
 
                        )
 
                    );
 
                },
 
                Expression::Select(expr) => {
 
                    let field = expr.field.as_symbolic();
 
                    let (poly_var, struct_name) = get_poly_var_and_type_name(ctx, poly_var_idx, field.definition.unwrap());
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                    let (poly_var, struct_name) = get_poly_var_and_definition_name(ctx, poly_var_idx, field.definition.unwrap());
 
                    return ParseError::new_error_at_span(
 
                        &ctx.module.source, expr.position(), format!(
 
                            "Conflicting type for polymorphic variable '{}' while accessing field '{}' of '{}'",
 
                            poly_var, &String::from_utf8_lossy(&field.identifier.value), struct_name
 
                            poly_var, field.identifier.value.as_str(), struct_name
 
                        )
 
                    )
 
                }
 
                _ => unreachable!("called construct_poly_arg_error without a call/literal expression")
 
                _ => unreachable!("called construct_poly_arg_error without an expected expression, got: {:?}", expr)
 
            }
 
        }
 

	
 
        // Actual checking
 
        let expr = &ctx.heap[expr_id];
 
        let (expr_args, expr_return_name) = match expr {
 
            Expression::Call(expr) => 
 
                (
 
                    expr.arguments.clone(),
 
                    "return type"
 
                ),
 
            Expression::Literal(expr) => {
 
                let expressions = match &expr.value {
 
                    Literal::Struct(v) => v.fields.iter()
 
                        .map(|f| f.value)
 
                        .collect(),
 
                    Literal::Enum(_) => Vec::new(),
 
                    Literal::Union(v) => v.values.clone(),
 
                    _ => unreachable!()
 
                };
 

	
 
                ( expressions, "literal" )
 
            },
 
            Expression::Select(expr) =>
 
                // Select expression uses the polymorphic variables of the 
 
                // struct it is accessing, so get the subject expression.
 
                (
 
                    vec![expr.subject],
 
                    "selected field"
 
                ),
 
            _ => unreachable!(),
 
        };
 

	
 
        // - check return type with itself
 
        if let Some((poly_idx, section_a, section_b)) = has_poly_mismatch(
 
            &poly_data.returned, &poly_data.returned
 
        ) {
 
            return construct_main_error(ctx, poly_idx, expr)
 
                .with_postfixed_info(
 
                    &ctx.module.source, expr.position(),
 
                    &format!(
 
                .with_info_at_span(
 
                    &ctx.module.source, expr.span(), format!(
 
                        "The {} inferred the conflicting types '{}' and '{}'",
 
                        expr_return_name,
 
                        InferenceType::partial_display_name(&ctx.heap, section_a),
 
                        InferenceType::partial_display_name(&ctx.heap, section_b)
 
                    )
 
                )
 
                );
 
        }
 

	
 
        // - check arguments with each other argument and with return type
 
        for (arg_a_idx, arg_a) in poly_data.embedded.iter().enumerate() {
 
            for (arg_b_idx, arg_b) in poly_data.embedded.iter().enumerate() {
 
                if arg_b_idx > arg_a_idx {
 
                    break;
 
                }
 

	
 
                if let Some((poly_idx, section_a, section_b)) = has_poly_mismatch(&arg_a, &arg_b) {
 
                    let error = construct_main_error(ctx, poly_idx, expr);
 
                    if arg_a_idx == arg_b_idx {
 
                        // Same argument
 
                        let arg = &ctx.heap[expr_args[arg_a_idx]];
 
                        return error.with_postfixed_info(
 
                            &ctx.module.source, arg.position(),
 
                            &format!(
 
                        return error.with_info_at_span(
 
                            &ctx.module.source, arg.span(), format!(
 
                                "This argument inferred the conflicting types '{}' and '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_a),
 
                                InferenceType::partial_display_name(&ctx.heap, section_b)
 
                            )
 
                        )
 
                        );
 
                    } else {
 
                        let arg_a = &ctx.heap[expr_args[arg_a_idx]];
 
                        let arg_b = &ctx.heap[expr_args[arg_b_idx]];
 
                        return error.with_postfixed_info(
 
                            &ctx.module.source, arg_a.position(),
 
                            &format!(
 
                        return error.with_info_at_span(
 
                            &ctx.module.source, arg_a.span(), format!(
 
                                "This argument inferred it to '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_a)
 
                            )
 
                        ).with_postfixed_info(
 
                            &ctx.module.source, arg_b.position(),
 
                            &format!(
 
                        ).with_info_at_span(
 
                            &ctx.module.source, arg_b.span(), format!(
 
                                "While this argument inferred it to '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_b)
 
                            )
 
                        )
 
                    }
 
                }
 
            }
 

	
 
            // Check with return type
 
            if let Some((poly_idx, section_arg, section_ret)) = has_poly_mismatch(arg_a, &poly_data.returned) {
 
                let arg = &ctx.heap[expr_args[arg_a_idx]];
 
                return construct_main_error(ctx, poly_idx, expr)
 
                    .with_postfixed_info(
 
                        &ctx.module.source, arg.position(),
 
                        &format!(
 
                    .with_info_at_span(
 
                        &ctx.module.source, arg.span(), format!(
 
                            "This argument inferred it to '{}'",
 
                            InferenceType::partial_display_name(&ctx.heap, section_arg)
 
                        )
 
                    )
 
                    .with_postfixed_info(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                        &ctx.module.source, expr.span(), format!(
 
                            "While the {} inferred it to '{}'",
 
                            expr_return_name,
 
                            InferenceType::partial_display_name(&ctx.heap, section_ret)
 
                        )
 
                    )
 
                    );
 
            }
 
        }
 

	
 
        unreachable!("construct_poly_arg_error without actual error found?")
 
    }
 
}
 

	
 
#[cfg(test)]
 
mod tests {
 
    use super::*;
 
    use crate::protocol::arena::Id;
 
    use InferenceTypePart as ITP;
 
    use InferenceType as IT;
 

	
 
    #[test]
 
    fn test_single_part_inference() {
 
        // lhs argument inferred from rhs
 
        let pairs = [
 
            (ITP::NumberLike, ITP::UInt8),
 
            (ITP::IntegerLike, ITP::SInt32),
 
            (ITP::Unknown, ITP::UInt64),
 
            (ITP::Unknown, ITP::String)
 
        ];
 
        for (lhs, rhs) in pairs.iter() {
 
            // Using infer-both
 
            let mut lhs_type = IT::new(false, false, vec![lhs.clone()]);
 
            let mut rhs_type = IT::new(false, true, vec![rhs.clone()]);
 
            let result = unsafe{ IT::infer_subtrees_for_both_types(
 
                &mut lhs_type, 0, &mut rhs_type, 0
 
            ) };
 
            assert_eq!(DualInferenceResult::First, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts);
 

	
 
            // Using infer-single
 
            let mut lhs_type = IT::new(false, false, vec![lhs.clone()]);
 
            let rhs_type = IT::new(false, true, vec![rhs.clone()]);
 
            let result = IT::infer_subtree_for_single_type(
 
                &mut lhs_type, 0, &rhs_type.parts, 0
 
            );
 
            assert_eq!(SingleInferenceResult::Modified, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts);
 
        }
 
    }
 

	
 
    #[test]
 
    fn test_multi_part_inference() {
 
        let pairs = [
 
            (vec![ITP::ArrayLike, ITP::NumberLike], vec![ITP::Slice, ITP::SInt8]),
 
            (vec![ITP::Unknown], vec![ITP::Input, ITP::Array, ITP::String]),
 
            (vec![ITP::PortLike, ITP::SInt32], vec![ITP::Input, ITP::SInt32]),
 
            (vec![ITP::Unknown], vec![ITP::Output, ITP::SInt32]),
 
            (
 
                vec![ITP::Instance(Id::new(0), 2), ITP::Input, ITP::Unknown, ITP::Output, ITP::Unknown],
 
                vec![ITP::Instance(Id::new(0), 2), ITP::Input, ITP::Array, ITP::SInt32, ITP::Output, ITP::SInt32]
 
            )
 
        ];
 

	
 
        for (lhs, rhs) in pairs.iter() {
 
            let mut lhs_type = IT::new(false, false, lhs.clone());
 
            let mut rhs_type = IT::new(false, true, rhs.clone());
 
            let result = unsafe{ IT::infer_subtrees_for_both_types(
 
                &mut lhs_type, 0, &mut rhs_type, 0
 
            ) };
 
            assert_eq!(DualInferenceResult::First, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts);
 

	
 
            let mut lhs_type = IT::new(false, false, lhs.clone());
 
            let rhs_type = IT::new(false, true, rhs.clone());
 
            let result = IT::infer_subtree_for_single_type(
 
                &mut lhs_type, 0, &rhs_type.parts, 0
 
            );
 
            assert_eq!(SingleInferenceResult::Modified, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts)
 
        }
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/pass_validation_linking.rs
Show inline comments
 
use crate::collections::{ScopedBuffer};
 
use crate::protocol::ast::*;
 
use crate::protocol::input_source2::{InputSource2 as InputSource, InputSpan, ParseError};
 
use crate::protocol::parser::{
 
    symbol_table2::*,
 
    type_table::*,
 
    utils::*,
 
};
 
use crate::protocol::input_source::*;
 
use crate::protocol::parser::symbol_table::*;
 
use crate::protocol::parser::type_table::*;
 

	
 
use super::visitor::{
 
    STMT_BUFFER_INIT_CAPACITY,
 
    EXPR_BUFFER_INIT_CAPACITY,
 
    TYPE_BUFFER_INIT_CAPACITY,
 
    Ctx, 
 
    Visitor2, 
 
    VisitorResult
 
};
 

	
 
#[derive(PartialEq, Eq)]
 
enum DefinitionType {
 
    Primitive(ComponentDefinitionId),
 
    Composite(ComponentDefinitionId),
 
    Function(FunctionDefinitionId)
 
}
 

	
 
impl DefinitionType {
 
    fn is_primitive(&self) -> bool { if let Self::Primitive(_) = self { true } else { false } }
 
    fn is_composite(&self) -> bool { if let Self::Composite(_) = self { true } else { false } }
 
    fn is_function(&self) -> bool { if let Self::Function(_) = self { true } else { false } }
 
    fn definition_id(&self) -> DefinitionId {
 
        match self {
 
            DefinitionType::Primitive(v) => v.upcast(),
 
            DefinitionType::Composite(v) => v.upcast(),
 
            DefinitionType::Function(v) => v.upcast(),
 
        }
 
    }
 
}
 

	
 
/// This particular visitor will go through the entire AST in a recursive manner
 
/// and check if all statements and expressions are legal (e.g. no "return"
 
/// statements in component definitions), and will link certain AST nodes to
 
/// their appropriate targets (e.g. goto statements, or function calls).
 
///
 
/// This visitor will not perform control-flow analysis (e.g. making sure that
 
/// each function actually returns) and will also not perform type checking. So
 
/// the linking of function calls and component instantiations will be checked
 
/// and linked to the appropriate definitions, but the return types and/or
 
/// arguments will not be checked for validity.
 
///
 
///
 
/// Because of this scheme expressions will not be visited in the breadth-first
 
/// pass.
 
pub(crate) struct PassValidationLinking {
 
    /// `in_sync` is `Some(id)` if the visitor is visiting the children of a
 
    /// synchronous statement. A single value is sufficient as nested
 
    /// synchronous statements are not allowed
 
    in_sync: Option<SynchronousStatementId>,
 
    /// `in_while` contains the last encountered `While` statement. This is used
 
    /// to resolve unlabeled `Continue`/`Break` statements.
 
    in_while: Option<WhileStatementId>,
 
    // Traversal state: current scope (which can be used to find the parent
 
    // scope), the definition variant we are considering, and whether the
 
    // visitor is performing breadthwise block statement traversal.
 
    cur_scope: Option<Scope>,
 
    def_type: DefinitionType,
 
    // Parent expression (the previous stmt/expression we visited that could be
 
    // used as an expression parent)
 
    expr_parent: ExpressionParent,
 
    // Keeping track of relative position in block in the breadth-first pass.
 
    // May not correspond to block.statement[index] if any statements are
 
    // inserted after the breadth-pass
 
    relative_pos_in_block: u32,
 
    // Single buffer of statement IDs that we want to traverse in a block.
 
    // Required to work around Rust borrowing rules and to prevent constant
 
    // cloning of vectors.
 
    statement_buffer: ScopedBuffer<StatementId>,
 
    // Another buffer, now with expression IDs, to prevent constant cloning of
 
    // vectors while working around rust's borrowing rules
 
    expression_buffer: ScopedBuffer<ExpressionId>,
 
}
 

	
 
impl PassValidationLinking {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            in_sync: None,
 
            in_while: None,
 
            cur_scope: None,
 
            expr_parent: ExpressionParent::None,
 
            def_type: DefinitionType::None,
 
            def_type: DefinitionType::Function(FunctionDefinitionId::new_invalid()),
 
            relative_pos_in_block: 0,
 
            statement_buffer: ScopedBuffer::new_reserved(STMT_BUFFER_INIT_CAPACITY),
 
            expression_buffer: ScopedBuffer::new_reserved(EXPR_BUFFER_INIT_CAPACITY),
 
        }
 
    }
 

	
 
    fn reset_state(&mut self) {
 
        self.in_sync = None;
 
        self.in_while = None;
 
        self.cur_scope = None;
 
        self.expr_parent = ExpressionParent::None;
 
        self.def_type = DefinitionType::None;
 
        self.def_type = DefinitionType::Function(FunctionDefinitionId::new_invalid());
 
        self.relative_pos_in_block = 0;
 
        self.statement_buffer.clear();
 
        self.expression_buffer.clear();
 
    }
 
}
 

	
 
impl Visitor2 for PassValidationLinking {
 
    //--------------------------------------------------------------------------
 
    // Definition visitors
 
    //--------------------------------------------------------------------------
 

	
 
    fn visit_component_definition(&mut self, ctx: &mut Ctx, id: ComponentDefinitionId) -> VisitorResult {
 
        self.reset_state();
 

	
 
        self.def_type = match &ctx.heap[id].variant {
 
            ComponentVariant::Primitive => DefinitionType::Primitive(id),
 
            ComponentVariant::Composite => DefinitionType::Composite(id),
 
        };
 
        self.cur_scope = Some(Scope::Definition(id.upcast()));
 
        self.expr_parent = ExpressionParent::None;
 

	
 
        // Visit statements in component body
 
        let body_id = ctx.heap[id].body;
 
        self.visit_block_stmt(ctx, body_id)?;
 

	
 
        self.check_post_definition_state();
 
        Ok(())
 
    }
 

	
 
    fn visit_function_definition(&mut self, ctx: &mut Ctx, id: FunctionDefinitionId) -> VisitorResult {
 
        self.reset_state();
 

	
 
        // Set internal statement indices
 
        self.def_type = DefinitionType::Function(id);
 
        self.cur_scope = Some(Scope::Definition(id.upcast()));
 
        self.expr_parent = ExpressionParent::None;
 

	
 
        // Visit statements in function body
 
        let body_id = ctx.heap[id].body;
 
        self.visit_block_stmt(ctx, body_id)?;
 

	
 
        self.check_post_definition_state();
 
        Ok(())
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Statement visitors
 
    //--------------------------------------------------------------------------
 

	
 
    fn visit_block_stmt(&mut self, ctx: &mut Ctx, id: BlockStatementId) -> VisitorResult {
 
        self.visit_block_stmt_with_hint(ctx, id, None)
 
    }
 

	
 
    fn visit_local_memory_stmt(&mut self, _ctx: &mut Ctx, _id: MemoryStatementId) -> VisitorResult {
 
        Ok(())
 
    }
 

	
 
    fn visit_local_channel_stmt(&mut self, _ctx: &mut Ctx, _id: ChannelStatementId) -> VisitorResult {
 
        Ok(())
 
    }
 

	
 
    fn visit_labeled_stmt(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> VisitorResult {
 
        let body_id = ctx.heap[id].body;
 
        self.visit_stmt(ctx, body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_if_stmt(&mut self, ctx: &mut Ctx, id: IfStatementId) -> VisitorResult {
 
        // Traverse expression and bodies
 
        let (test_id, true_id, false_id) = {
 
            let stmt = &ctx.heap[id];
 
            (stmt.test, stmt.true_body, stmt.false_body)
 
        };
 

	
 
        debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
        self.expr_parent = ExpressionParent::If(id);
 
        self.visit_expr(ctx, test_id)?;
 
        self.expr_parent = ExpressionParent::None;
 

	
 
        self.visit_block_stmt(ctx, true_id)?;
 
        if let Some(false_id) = false_id {
 
            self.visit_block_stmt(ctx, false_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_while_stmt(&mut self, ctx: &mut Ctx, id: WhileStatementId) -> VisitorResult {
 
        let (test_id, body_id) = {
 
            let stmt = &ctx.heap[id];
 
            (stmt.test, stmt.body)
 
        };
 
        let old_while = self.in_while.replace(id);
 
        debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
        self.expr_parent = ExpressionParent::While(id);
 
        self.visit_expr(ctx, test_id)?;
 
        self.expr_parent = ExpressionParent::None;
 

	
 
        self.visit_block_stmt(ctx, body_id)?;
 
        self.in_while = old_while;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_break_stmt(&mut self, ctx: &mut Ctx, id: BreakStatementId) -> VisitorResult {
 
        // Resolve break target
 
        let target_end_while = {
 
            let stmt = &ctx.heap[id];
 
            let target_while_id = self.resolve_break_or_continue_target(ctx, stmt.span, &stmt.label)?;
 
            let target_while = &ctx.heap[target_while_id];
 
            debug_assert!(target_while.end_while.is_some());
 
            target_while.end_while.unwrap()
 
        };
 

	
 
        let stmt = &mut ctx.heap[id];
 
        stmt.target = Some(target_end_while);
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_continue_stmt(&mut self, ctx: &mut Ctx, id: ContinueStatementId) -> VisitorResult {
 
        // Resolve continue target
 
        let target_while_id = {
 
            let stmt = &ctx.heap[id];
 
            self.resolve_break_or_continue_target(ctx, stmt.span, &stmt.label)?
 
        };
 

	
 
        let stmt = &mut ctx.heap[id];
 
        stmt.target = Some(target_while_id);
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_synchronous_stmt(&mut self, ctx: &mut Ctx, id: SynchronousStatementId) -> VisitorResult {
 
        // Check for validity of synchronous statement
 
        let cur_sync_span = ctx.heap[id].span;
 
        if self.in_sync.is_some() {
 
            // Nested synchronous statement
 
            let old_sync_span = &ctx.heap[self.in_sync.unwrap()].span;
 
            let old_sync_span = ctx.heap[self.in_sync.unwrap()].span;
 
            return Err(ParseError::new_error_str_at_span(
 
                &ctx.module.source, cur_sync_span, "Illegal nested synchronous statement"
 
            ).with_info_str_at_span(
 
                &ctx.module.source, old_sync_span.position, "It is nested in this synchronous statement"
 
                &ctx.module.source, old_sync_span, "It is nested in this synchronous statement"
 
            ));
 
        }
 

	
 
        if self.def_type != DefinitionType::Primitive {
 
        if !self.def_type.is_primitive() {
 
            return Err(ParseError::new_error_str_at_span(
 
                &ctx.module.source, cur_sync_span,
 
                "synchronous statements may only be used in primitive components"
 
            ));
 
        }
 

	
 
        let sync_body = ctx.heap[id].body;
 
        let old = self.in_sync.replace(id);
 
        self.visit_block_stmt_with_hint(ctx, sync_body, Some(id))?;
 
        self.in_sync = old;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_return_stmt(&mut self, ctx: &mut Ctx, id: ReturnStatementId) -> VisitorResult {
 
        // Check if "return" occurs within a function
 
        let stmt = &ctx.heap[id];
 
        if self.def_type != DefinitionType::Function {
 
        if !self.def_type.is_function() {
 
            return Err(ParseError::new_error_str_at_span(
 
                &ctx.module.source, stmt.span,
 
                "return statements may only appear in function bodies"
 
            ));
 
        }
 

	
 
        // If here then we are within a function
 
        debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
        self.expr_parent = ExpressionParent::Return(id);
 
        self.visit_expr(ctx, ctx.heap[id].expression)?;
 
        self.expr_parent = ExpressionParent::None;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_goto_stmt(&mut self, ctx: &mut Ctx, id: GotoStatementId) -> VisitorResult {
 
        let target_id = self.find_label(ctx, &ctx.heap[id].label)?;
 
        ctx.heap[id].target = Some(target_id);
 

	
 
        let target = &ctx.heap[target_id];
 
        if self.in_sync != target.in_sync {
 
            // We can only goto the current scope or outer scopes. Because
 
            // nested sync statements are not allowed so if the value does
 
            // not match, then we must be inside a sync scope
 
            debug_assert!(self.in_sync.is_some());
 
            let goto_stmt = &ctx.heap[id];
 
            let sync_stmt = &ctx.heap[self.in_sync.unwrap()];
 
            return Err(
 
                ParseError::new_error_str_at_span(&ctx.module.source, goto_stmt.span, "goto may not escape the surrounding synchronous block")
 
                .with_postfixed_info(&ctx.module.source, target.label.span, "this is the target of the goto statement")
 
                .with_postfixed_info(&ctx.module.source, sync_stmt.span, "which will jump past this statement")
 
            );
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_new_stmt(&mut self, ctx: &mut Ctx, id: NewStatementId) -> VisitorResult {
 
        // Make sure the new statement occurs inside a composite component
 
        if self.def_type != DefinitionType::Composite {
 
        if !self.def_type.is_composite() {
 
            let new_stmt = &ctx.heap[id];
 
            return Err(ParseError::new_error_str_at_span(
 
                &ctx.module.source, new_stmt.span,
 
                "instantiating components may only be done in composite components"
 
            ));
 
        }
 

	
 
        // Recurse into call expression (which will check the expression parent
 
        // to ensure that the "new" statment instantiates a component)
 
        let call_expr_id = ctx.heap[id].expression;
 

	
 
        debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
        self.expr_parent = ExpressionParent::New(id);
 
        self.visit_call_expr(ctx, call_expr_id)?;
 
        self.expr_parent = ExpressionParent::None;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_expr_stmt(&mut self, ctx: &mut Ctx, id: ExpressionStatementId) -> VisitorResult {
 
        let expr_id = ctx.heap[id].expression;
 

	
 
        debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
        self.expr_parent = ExpressionParent::ExpressionStmt(id);
 
        self.visit_expr(ctx, expr_id)?;
 
        self.expr_parent = ExpressionParent::None;
 

	
 
        Ok(())
 
    }
 

	
 

	
 
    //--------------------------------------------------------------------------
 
    // Expression visitors
 
    //--------------------------------------------------------------------------
 

	
 
    fn visit_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        let assignment_expr = &mut ctx.heap[id];
 

	
 
        let left_expr_id = assignment_expr.left;
 
        let right_expr_id = assignment_expr.right;
 
        let old_expr_parent = self.expr_parent;
 
        assignment_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, right_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 
        Ok(())
 
    }
 

	
 
    fn visit_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        let conditional_expr = &mut ctx.heap[id];
 

	
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        conditional_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, true_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 2);
 
        self.visit_expr(ctx, false_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        let binary_expr = &mut ctx.heap[id];
 
        let left_expr_id = binary_expr.left;
 
        let right_expr_id = binary_expr.right;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        binary_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, right_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> VisitorResult {
 
        let unary_expr = &mut ctx.heap[id];
 
        let expr_id = unary_expr.expression;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        unary_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(id.upcast(), 0);
 
        self.visit_expr(ctx, expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        let indexing_expr = &mut ctx.heap[id];
 

	
 
        let subject_expr_id = indexing_expr.subject;
 
        let index_expr_id = indexing_expr.index;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        indexing_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, index_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        let slicing_expr = &mut ctx.heap[id];
 

	
 
        let subject_expr_id = slicing_expr.subject;
 
        let from_expr_id = slicing_expr.from_index;
 
        let to_expr_id = slicing_expr.to_index;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        slicing_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, from_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 2);
 
        self.visit_expr(ctx, to_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> VisitorResult {
 
        let select_expr = &mut ctx.heap[id];
 
        let expr_id = select_expr.subject;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        select_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(id.upcast(), 0);
 
        self.visit_expr(ctx, expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitorResult {
 
        let literal_expr = &mut ctx.heap[id];
 
        let old_expr_parent = self.expr_parent;
 
        literal_expr.parent = old_expr_parent;
 

	
 
        match &mut literal_expr.value {
 
            Literal::Null | Literal::True | Literal::False |
 
            Literal::Character(_) | Literal::String(_) | Literal::Integer(_) => {
 
                // Just the parent has to be set, done above
 
            },
 
            Literal::Struct(literal) => {
 
                let upcast_id = id.upcast();
 
                // Retrieve type definition
 
                let type_definition = ctx.types.get_base_definition(&literal.definition).unwrap();
 
                let struct_definition = type_definition.definition.as_struct();
 

	
 
                // Make sure all fields are specified, none are specified twice
 
                // and all fields exist on the struct definition
 
                let mut specified = Vec::new(); // TODO: @performance
 
                specified.resize(struct_definition.fields.len(), false);
 

	
 
                for field in &mut literal.fields {
 
                    // Find field in the struct definition
 
                    let field_idx = struct_definition.fields.iter().position(|v| v.identifier == field.identifier);
 
                    if field_idx.is_none() {
 
                        let ast_definition = &ctx.heap[literal.definition];
 
                        return Err(ParseError::new_error_at_span(
 
                            &ctx.module.source, field.identifier.span, format!(
 
                                "This field does not exist on the struct '{}'",
 
                                ast_definition.identifier().value.as_str()
 
                            )
 
                        ));
 
                    }
 
                    field.field_idx = field_idx.unwrap();
 

	
 
                    // Check if specified more than once
 
                    if specified[field.field_idx] {
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, field.identifier.position,
 
                            "This field is specified more than once"
 
                        ));
 
                    }
 

	
 
                    specified[field.field_idx] = true;
 
                }
 

	
 
                if !specified.iter().all(|v| *v) {
 
                    // Some fields were not specified
 
                    let mut not_specified = String::new();
 
                    let mut num_not_specified = 0;
 
                    for (def_field_idx, is_specified) in specified.iter().enumerate() {
 
                        if !is_specified {
 
                            if !not_specified.is_empty() { not_specified.push_str(", ") }
 
                            let field_ident = &struct_definition.fields[def_field_idx].identifier;
 
                            not_specified.push_str(field_ident.value.as_str());
 
                            num_not_specified += 1;
 
                        }
 
                    }
 

	
 
                    debug_assert!(num_not_specified > 0);
 
                    let msg = if num_not_specified == 1 {
 
                        format!("not all fields are specified, '{}' is missing", not_specified)
 
                    } else {
 
                        format!("not all fields are specified, [{}] are missing", not_specified)
 
                    };
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module.source, literal.parser_type.elements[0].full_span, msg
 
                    ));
 
                }
 

	
 
                // Need to traverse fields expressions in struct and evaluate
 
                // the poly args
 
                let mut expr_section = self.expression_buffer.start_section();
 
                for field in &literal.fields {
 
                    expr_section.push(field.value);
 
                }
 

	
 
                for expr_idx in 0..expr_section.len() {
 
                    let expr_id = expr_section[expr_idx];
 
                    self.expr_parent = ExpressionParent::Expression(upcast_id, expr_idx as u32);
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 

	
 
                expr_section.forget();
 
            },
 
            Literal::Enum(literal) => {
 
                // Make sure the variant exists
 
                let type_definition = ctx.types.get_base_definition(&literal.definition).unwrap();
 
                let enum_definition = type_definition.definition.as_enum();
 

	
 
                let variant_idx = enum_definition.variants.iter().position(|v| {
 
                    v.identifier == literal.variant
 
                });
 

	
 
                if variant_idx.is_none() {
 
                    let ast_definition = ctx.heap[literal.definition].as_enum();
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module.source, literal.parser_type.elements[0].full_span, format!(
 
                            "the variant '{}' does not exist on the enum '{}'",
 
                            literal.variant.value.as_str(), ast_definition.identifier.value.as_str()
 
                        )
 
                    ));
 
                }
 

	
 
                literal.variant_idx = variant_idx.unwrap();
 
            },
 
            Literal::Union(literal) => {
 
                // Make sure the variant exists
 
                let type_definition = ctx.types.get_base_definition(&literal.definition).unwrap();
 
                let union_definition = type_definition.definition.as_union();
 

	
 
                let variant_idx = union_definition.variants.iter().position(|v| {
 
                    v.identifier == literal.variant
 
                });
 
                if variant_idx.is_none() {
 
                    let ast_definition = ctx.heap[literal.definition].as_union();
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module.source, literal.parser_type.elements[0].full_span, format!(
 
                            "the variant does '{}' does not exist on the union '{}'",
 
                            literal.variant.value.as_str(), ast_definition.identifier.value.as_str()
 
                        )
 
                    ));
 
                }
 

	
 
                literal.variant_idx = variant_idx.unwrap();
 

	
 
                // Make sure the number of specified values matches the expected
 
                // number of embedded values in the union variant.
 
                let union_variant = &union_definition.variants[literal.variant_idx];
 
                if union_variant.embedded.len() != literal.values.len() {
 
                    let ast_definition = ctx.heap[literal.definition].as_union();
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module.source, literal.parser_type.elements[0].full_span, format!(
 
                            "The variant '{}' of union '{}' expects {} embedded values, but {} were specified",
 
                            literal.variant.value.as_str(), ast_definition.identifier.value.as_str(),
 
                            union_variant.embedded.len(), literal.values.len()
 
                        ),
 
                    ))
 
                }
 

	
 
                // Traverse embedded values of union (if any) and evaluate the
 
                // polymorphic arguments
 
                let upcast_id = id.upcast();
 
                let mut expr_section = self.expression_buffer.start_section();
 
                for value in &literal.values {
 
                    expr_section.push(*value);
 
                }
 

	
 
                for expr_idx in 0..expr_section.len() {
 
                    let expr_id = expr_section[expr_idx];
 
                    self.expr_parent = ExpressionParent::Expression(upcast_id, expr_idx as u32);
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 

	
 
                expr_section.forget();
 
            },
 
            Literal::Array(literal) => {
 
                // Visit all expressions in the array
 
                let upcast_id = id.upcast();
 
                let expr_section = self.expression_buffer.start_section_initialized(literal);
 
                for expr_idx in 0..expr_section.len() {
 
                    let expr_id = expr_section[expr_idx];
 
                    self.expr_parent = ExpressionParent::Expression(upcast_id, expr_id as u32);
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 

	
 
                expr_section.forget();
 
            }
 
        }
 

	
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitorResult {
 
        let call_expr = &mut ctx.heap[id];
 

	
 
        // Check whether the method is allowed to be called within the code's
 
        // context (in sync, definition type, etc.)
 
        let mut expected_wrapping_new_stmt = false;
 
        match &mut call_expr.method {
 
            Method::Get => {
 
                if self.def_type != DefinitionType::Primitive {
 
                if !self.def_type.is_primitive() {
 
                    return Err(ParseError::new_error_str_at_span(
 
                        &ctx.module.source, call_expr.span,
 
                        "a call to 'get' may only occur in primitive component definitions"
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError::new_error_str_at_span(
 
                        &ctx.module.source, call_expr.span,
 
                        "a call to 'get' may only occur inside synchronous blocks"
 
                    ));
 
                }
 
            },
 
            Method::Put => {
 
                if self.def_type != DefinitionType::Primitive {
 
                if !self.def_type.is_primitive() {
 
                    return Err(ParseError::new_error_str_at_span(
 
                        &ctx.module.source, call_expr.span,
 
                        "a call to 'put' may only occur in primitive component definitions"
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError::new_error_str_at_span(
 
                        &ctx.module.source, call_expr.span,
 
                        "a call to 'put' may only occur inside synchronous blocks"
 
                    ));
 
                }
 
            },
 
            Method::Fires => {
 
                if self.def_type != DefinitionType::Primitive {
 
                if !self.def_type.is_primitive() {
 
                    return Err(ParseError::new_error_str_at_span(
 
                        &ctx.module.source, call_expr.span,
 
                        "a call to 'fires' may only occur in primitive component definitions"
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError::new_error_str_at_span(
 
                        &ctx.module.source, call_expr.span,
 
                        "a call to 'fires' may only occur inside synchronous blocks"
 
                    ));
 
                }
 
            },
 
            Method::Create => {},
 
            Method::Length => {},
 
            Method::Assert => {
 
                if self.def_type == DefinitionType::Function {
 
                if self.def_type.is_function() {
 
                    return Err(ParseError::new_error_str_at_span(
 
                        &ctx.module.source, call_expr.span,
 
                        "assert statement may only occur in components"
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError::new_error_str_at_span(
 
                        &ctx.module.source, call_expr.span,
 
                        "assert statements may only occur inside synchronous blocks"
 
                    ));
 
                }
 
            },
 
            Method::UserFunction => {},
 
            Method::UserComponent => {
 
                expected_wrapping_new_stmt = true;
 
            },
 
        }
 

	
 
        if expected_wrapping_new_stmt {
 
            if self.expr_parent != ExpressionParent::New {
 
                return Err(ParseError::new_error_str_at_span(
 
                    &ctx.module.source, call_expr.span,
 
                    "cannot call a component, it can only be instantiated by using 'new'"
 
                ));
 
            }
 
        } else {
 
            if self.expr_parent == ExpressionParent::New {
 
                return Err(ParseError::new_error_str_at_span(
 
                    &ctx.module.source, call_expr.span,
 
                    "only components can be instantiated"
 
                ));
 
            }
 
        }
 

	
 
        // Check the number of arguments
 
        let call_definition = ctx.types.get_base_definition(&call_expr.definition).unwrap();
 
        let num_expected_args = match &call_definition.definition {
 
            DefinedTypeVariant::Function(definition) => definition.arguments.len(),
 
            DefinedTypeVariant::Component(definition) => definition.arguments.len(),
 
            v => unreachable!("encountered {:?} type in call expression", v),
 
        };
 

	
 
        let num_provided_args = call_expr.arguments.len();
 
        if num_provided_args != num_expected_args {
 
            let argument_text = if num_expected_args == 1 { "argument" } else { "arguments" };
 
            return Err(ParseError::new_error_at_span(
 
                &ctx.module.source, call_expr.span, format!(
 
                    "expected {} {}, but {} were provided",
 
                    num_expected_args, argument_text, num_provided_args
 
                )
 
            ));
 
        }
 

	
 
        // Recurse into all of the arguments and set the expression's parent
 
        let upcast_id = id.upcast();
 

	
 
        let section = self.expression_buffer.start_section_initialized(&call_expr.arguments);
 
        let old_expr_parent = self.expr_parent;
 
        call_expr.parent = old_expr_parent;
 

	
 
        for arg_expr_idx in 0..section.len() {
 
            let arg_expr_id = section[arg_expr_idx];
 
            self.expr_parent = ExpressionParent::Expression(upcast_id, arg_expr_idx as u32);
 
            self.visit_expr(ctx, arg_expr_id)?;
 
        }
 

	
 
        section.forget();
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> VisitorResult {
 
        let var_expr = &ctx.heap[id];
 
        let variable_id = self.find_variable(ctx, self.relative_pos_in_block, &var_expr.identifier)?;
 
        let var_expr = &mut ctx.heap[id];
 
        var_expr.declaration = Some(variable_id);
 
        var_expr.parent = self.expr_parent;
 

	
 
        Ok(())
 
    }
 
}
 

	
 
impl PassValidationLinking {
 
    //--------------------------------------------------------------------------
 
    // Special traversal
 
    //--------------------------------------------------------------------------
 

	
 
    fn visit_block_stmt_with_hint(&mut self, ctx: &mut Ctx, id: BlockStatementId, hint: Option<SynchronousStatementId>) -> VisitorResult {
 
        // Set parent scope and relative position in the parent scope. Remember
 
        // these values to set them back to the old values when we're done with
 
        // the traversal of the block's statements.
 
        let body = &mut ctx.heap[id];
 
        body.parent_scope = self.cur_scope.clone();
 
        body.relative_pos_in_parent = self.relative_pos_in_block;
 

	
 
        let old_scope = self.cur_scope.replace(match hint {
 
            Some(sync_id) => Scope::Synchronous((sync_id, id)),
 
            None => Scope::Regular(id),
 
        });
 
        let old_relative_pos = self.relative_pos_in_block;
 

	
 
        // Copy statement IDs into buffer
 
        let statement_section = self.statement_buffer.start_section_initialized(&body.statements);
 

	
 
        // Perform the breadth-first pass. Its main purpose is to find labeled
 
        // statements such that we can find the `goto`-targets immediately when
 
        // performing the depth pass
 
        for stmt_idx in 0..statement_section.len() {
 
            self.relative_pos_in_block = stmt_idx as u32;
 
            self.visit_statement_for_locals_labels_and_in_sync(ctx, self.relative_pos_in_block, statement_section[stmt_idx])?;
 
        }
 

	
 
        for stmt_idx in 0..statement_section.len() {
 
            self.relative_pos_in_block = stmt_idx as u32;
 
            self.visit_stmt(ctx, statement_section[stmt_idx])?;
 
        }
 

	
 
        self.cur_scope = old_scope;
 
        self.relative_pos_in_block = old_relative_pos;
 
        statement_section.forget();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_statement_for_locals_labels_and_in_sync(&mut self, ctx: &mut Ctx, relative_pos: u32, id: StatementId) -> VisitorResult {
 
        let statement = &mut ctx.heap[id];
 
        match statement {
 
            Statement::Local(stmt) => {
 
                match stmt {
 
                    LocalStatement::Memory(local) => {
 
                        let variable_id = local.variable;
 
                        self.checked_local_add(ctx, relative_pos, variable_id)?;
 
                    },
 
                    LocalStatement::Channel(local) => {
 
                        let from_id = local.from;
 
                        let to_id = local.to;
 
                        self.checked_local_add(ctx, relative_pos, from_id)?;
 
                        self.checked_local_add(ctx, relative_pos, to_id)?;
 
                    }
 
                }
 
            }
 
            Statement::Labeled(stmt) => {
 
                let stmt_id = stmt.this;
 
                self.checked_label_add(ctx, relative_pos, self.in_sync, stmt_id)?;
 
                self.visit_statement_for_locals_labels_and_in_sync(ctx, relative_pos, stmt.body)?;
 
            },
 
            Statement::While(stmt) => {
 
                stmt.in_sync = self.in_sync;
 
            },
 
            _ => {},
 
        }
 

	
 
        return Ok(())
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Utilities
 
    //--------------------------------------------------------------------------
 

	
 
    /// Adds a local variable to the current scope. It will also annotate the
 
    /// `Local` in the AST with its relative position in the block.
 
    fn checked_local_add(&mut self, ctx: &mut Ctx, relative_pos: u32, id: LocalId) -> Result<(), ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 

	
 
        // Make sure we do not conflict with any global symbols
 
        let cur_scope = SymbolScope::Definition(self.def_type.definition_id());
 
        {
 
            let ident = &ctx.heap[id].identifier;
 
            if let Some(symbol) = ctx.symbols.get_symbol_by_name(cur_scope, &ident.value.as_bytes()) {
 
                return Err(ParseError::new_error_str_at_span(
 
                    &ctx.module.source, symbol.variant.span_of_introduction(&ctx.heap),
 
                    "local variable declaration conflicts with symbol"
 
                ).with_postfixed_info(
 
                    &ctx.module.source, symbol.position, "the conflicting symbol is introduced here"
 
                ));
 
            }
 
        }
 

	
 
        let local = &mut ctx.heap[id];
 
        local.relative_pos_in_block = relative_pos;
 

	
 
        // Make sure we do not shadow any variables in any of the scopes. Note
 
        // that variables in parent scopes may be declared later
 
        let local = &ctx.heap[id];
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        let mut local_relative_pos = self.relative_pos_in_block;
 

	
 
        loop {
 
            debug_assert!(scope.is_block(), "scope is not a block");
 
            let block = &ctx.heap[scope.to_block()];
 
            for other_local_id in &block.locals {
 
                let other_local = &ctx.heap[*other_local_id];
 
                // Position check in case another variable with the same name
 
                // is defined in a higher-level scope, but later than the scope
 
                // in which the current variable resides.
 
                if local.this != *other_local_id &&
 
                    local_relative_pos >= other_local.relative_pos_in_block &&
 
                    local.identifier == other_local.identifier {
 
                    // Collision within this scope
 
                    return Err(
 
                        ParseError::new_error(&ctx.module.source, local.position, "Local variable name conflicts with another variable")
 
                            .with_postfixed_info(&ctx.module.source, other_local.position, "Previous variable is found here")
 
                    );
 
                }
 
            }
 

	
 
            // Current scope is fine, move to parent scope if any
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if let Scope::Definition(definition_id) = scope {
 
                // At outer scope, check parameters of function/component
 
                for parameter_id in ctx.heap[*definition_id].parameters() {
 
                    let parameter = &ctx.heap[*parameter_id];
 
                    if local.identifier == parameter.identifier {
 
                        return Err(
 
                            ParseError::new_error(&ctx.module.source, local.position, "Local variable name conflicts with parameter")
 
                                .with_postfixed_info(&ctx.module.source, parameter.position, "Parameter definition is found here")
 
                        );
 
                    }
 
                }
 

	
 
                break;
 
            }
 

	
 
            // If here, then we are dealing with a block-like parent block
 
            local_relative_pos = ctx.heap[scope.to_block()].relative_pos_in_parent;
 
        }
 

	
 
        // No collisions at all
 
        let block = &mut ctx.heap[self.cur_scope.as_ref().unwrap().to_block()];
 
        block.locals.push(id);
 

	
 
        Ok(())
 
    }
 

	
 
    /// Finds a variable in the visitor's scope that must appear before the
 
    /// specified relative position within that block.
 
    fn find_variable(&self, ctx: &Ctx, mut relative_pos: u32, identifier: &Identifier) -> Result<VariableId, ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 

	
 
        // TODO: May still refer to an alias of a global symbol using a single
 
        //  identifier in the namespace.
 
        // No need to use iterator over namespaces if here
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        
 
        loop {
 
            debug_assert!(scope.is_block());
 
            let block = &ctx.heap[scope.to_block()];
 
            
 
            for local_id in &block.locals {
 
                let local = &ctx.heap[*local_id];
 
                
 
                if local.relative_pos_in_block < relative_pos && identifier.matches_identifier(&local.identifier) {
 
                    return Ok(local_id.upcast());
 
                }
 
            }
 

	
 
            debug_assert!(block.parent_scope.is_some());
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                // Definition scope, need to check arguments to definition
 
                match scope {
 
                    Scope::Definition(definition_id) => {
 
                        let definition = &ctx.heap[*definition_id];
 
                        for parameter_id in definition.parameters() {
 
                            let parameter = &ctx.heap[*parameter_id];
 
                            if identifier.matches_identifier(&parameter.identifier) {
 
                                return Ok(parameter_id.upcast());
 
                            }
 
                        }
 
                    },
 
                    _ => unreachable!(),
 
                }
 

	
 
                // Variable could not be found
 
                return Err(ParseError::new_error_str_at_span(
 
                    &ctx.module.source, identifier.span, "unresolved variable"
 
                ));
 
            } else {
 
                relative_pos = block.relative_pos_in_parent;
 
            }
 
        }
 
    }
 

	
 
    /// Adds a particular label to the current scope. Will return an error if
 
    /// there is another label with the same name visible in the current scope.
 
    fn checked_label_add(&mut self, ctx: &mut Ctx, relative_pos: u32, in_sync: Option<SynchronousStatementId>, id: LabeledStatementId) -> Result<(), ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 

	
 
        // Make sure label is not defined within the current scope or any of the
 
        // parent scope.
 
        let label = &mut ctx.heap[id];
 
        label.relative_pos_in_block = relative_pos;
 
        label.in_sync = in_sync;
 

	
 
        let label = &*label;
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 

	
 
        loop {
 
            debug_assert!(scope.is_block(), "scope is not a block");
 
            let block = &ctx.heap[scope.to_block()];
 
            for other_label_id in &block.labels {
 
                let other_label = &ctx.heap[*other_label_id];
 
                if other_label.label == label.label {
 
                    // Collision
 
                    return Err(ParseError::new_error_str_at_span(
 
                        &ctx.module.source, label.label.span, "label name is used more than once"
 
                    ).with_postfixed_info(
 
                        &ctx.module.source, other_label.label.span, "the other label is found here"
 
                    ));
 
                }
 
            }
 

	
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                break;
 
            }
 
        }
 

	
 
        // No collisions
 
        let block = &mut ctx.heap[self.cur_scope.as_ref().unwrap().to_block()];
 
        block.labels.push(id);
 

	
 
        Ok(())
 
    }
 

	
 
    /// Finds a particular labeled statement by its identifier. Once found it
 
    /// will make sure that the target label does not skip over any variable
 
    /// declarations within the scope in which the label was found.
 
    fn find_label(&self, ctx: &Ctx, identifier: &Identifier) -> Result<LabeledStatementId, ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 

	
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        loop {
 
            debug_assert!(scope.is_block(), "scope is not a block");
 
            let relative_scope_pos = ctx.heap[scope.to_block()].relative_pos_in_parent;
 

	
 
            let block = &ctx.heap[scope.to_block()];
 
            for label_id in &block.labels {
 
                let label = &ctx.heap[*label_id];
 
                if label.label == *identifier {
 
                    for local_id in &block.locals {
 
                        // TODO: Better to do this in control flow analysis, it
 
                        //  is legal to skip over a variable declaration if it
 
                        //  is not actually being used. I might be missing
 
                        //  something here when laying out the bytecode...
 
                        let local = &ctx.heap[*local_id];
 
                        if local.relative_pos_in_block > relative_scope_pos && local.relative_pos_in_block < label.relative_pos_in_block {
 
                            return Err(
 
                                ParseError::new_error_str_at_span(&ctx.module.source, identifier.span, "this target label skips over a variable declaration")
 
                                .with_postfixed_info(&ctx.module.source, label.label.span, "because it jumps to this label")
 
                                .with_postfixed_info(&ctx.module.source, local.identifier.span, "which skips over this variable")
 
                            );
 
                        }
 
                    }
 
                    return Ok(*label_id);
 
                }
 
            }
 

	
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                return Err(ParseError::new_error_str_at_span(
 
                    &ctx.module.source, identifier.span, "could not find this label"
 
                ));
 
            }
 

	
 
        }
 
    }
 

	
 
    /// This function will check if the provided while statement ID has a block
 
    /// statement that is one of our current parents.
 
    fn has_parent_while_scope(&self, ctx: &Ctx, id: WhileStatementId) -> bool {
 
        debug_assert!(self.cur_scope.is_some());
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        let while_stmt = &ctx.heap[id];
 
        loop {
 
            debug_assert!(scope.is_block());
 
            let block = scope.to_block();
 
            if while_stmt.body == block.upcast() {
 
                return true;
 
            }
src/protocol/parser/symbol_table.rs
Show inline comments
 
// TODO: Maybe allow namespaced-aliased imports. It is currently not possible
 
//  to express the following:
 
//      import Module.Submodule as SubMod
 
//      import SubMod::{Symbol}
 
//  And it is especially not possible to express the following:
 
//      import SubMod::{Symbol}
 
//      import Module.Submodule as SubMod
 
/// symbol_table.rs
 
///
 
/// The datastructure used to lookup symbols within particular scopes. Scopes
 
/// may be module-level or definition level, although imports and definitions
 
/// within definitions are currently not allowed.
 
///
 
/// TODO: Once the compiler has matured, find out ways to optimize to prevent
 
///     the repeated HashMap lookup.
 

	
 
use std::collections::HashMap;
 
use std::collections::hash_map::Entry;
 

	
 
use crate::protocol::input_source::*;
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use crate::collections::*;
 

	
 
use std::collections::{HashMap, hash_map::Entry};
 
use crate::protocol::parser::LexedModule;
 
const RESERVED_SYMBOLS: usize = 32;
 

	
 
#[derive(PartialEq, Eq, Hash)]
 
struct SymbolKey {
 
    module_id: RootId,
 
    symbol_name: Vec<u8>,
 
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
 
pub enum SymbolScope {
 
    Global,
 
    Module(RootId),
 
    Definition(DefinitionId),
 
}
 

	
 
impl SymbolKey {
 
    fn from_identifier(module_id: RootId, symbol: &Identifier) -> Self {
 
        Self{ module_id, symbol_name: symbol.value.clone() }
 
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
 
pub enum SymbolClass {
 
    Module,
 
    Struct,
 
    Enum,
 
    Union,
 
    Function,
 
    Component
 
}
 

	
 
    fn from_namespaced_identifier(module_id: RootId, symbol: &NamespacedIdentifier) -> Self {
 
        Self{ module_id, symbol_name: symbol.strip_poly_args() }
 
    }
 
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
 
pub enum DefinitionClass {
 
    Struct,
 
    Enum,
 
    Union,
 
    Function,
 
    Component,
 
}
 

	
 
pub(crate) enum Symbol {
 
    Namespace(RootId),
 
    Definition((RootId, DefinitionId)),
 
impl DefinitionClass {
 
    fn as_symbol_class(&self) -> SymbolClass {
 
        match self {
 
            DefinitionClass::Struct => SymbolClass::Struct,
 
            DefinitionClass::Enum => SymbolClass::Enum,
 
            DefinitionClass::Union => SymbolClass::Union,
 
            DefinitionClass::Function => SymbolClass::Function,
 
            DefinitionClass::Component => SymbolClass::Component,
 
        }
 
    }
 
}
 

	
 
pub(crate) struct SymbolValue {
 
    // Position is the place where the symbol is introduced to a module (this
 
    // position always corresponds to the module whose RootId is stored in the
 
    // `SymbolKey` associated with this `SymbolValue`). For a definition this
 
    // is the position where the symbol is defined, for an import this is the
 
    // position of the import statement.
 
    pub(crate) position: InputPosition,
 
    pub(crate) symbol: Symbol,
 
struct ScopedSymbols {
 
    scope: SymbolScope,
 
    parent_scope: Option<SymbolScope>,
 
    child_scopes: Vec<SymbolScope>,
 
    symbols: Vec<Symbol>,
 
}
 

	
 
impl SymbolValue {
 
    pub(crate) fn is_namespace(&self) -> bool {
 
        match &self.symbol {
 
            Symbol::Namespace(_) => true,
 
            _ => false
 
impl ScopedSymbols {
 
    fn get_symbol<'a>(&'a self, name: &StringRef) -> Option<&'a Symbol> {
 
        for symbol in self.symbols.iter() {
 
            if symbol.name == *name {
 
                return Some(symbol);
 
            }
 
        }
 
    pub(crate) fn as_namespace(&self) -> Option<RootId> {
 
        match &self.symbol {
 
            Symbol::Namespace(root_id) => Some(*root_id),
 
            _ => None,
 

	
 
        None
 
    }
 
}
 

	
 
    pub(crate) fn as_definition(&self) -> Option<(RootId, DefinitionId)> {
 
        match &self.symbol {
 
            Symbol::Definition((root_id, definition_id)) => Some((*root_id, *definition_id)),
 
            _ => None,
 
#[derive(Debug, Clone)]
 
pub struct SymbolModule {
 
    pub root_id: RootId,
 
    pub introduced_at: ImportId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SymbolDefinition {
 
    // Definition location (not necessarily the place where the symbol
 
    // is introduced, as it may be imported). Builtin symbols will have invalid
 
    // spans and module IDs
 
    pub defined_in_module: RootId,
 
    pub defined_in_scope: SymbolScope,
 
    pub definition_span: InputSpan, // full span of definition
 
    pub identifier_span: InputSpan, // span of just the identifier
 
    // Location where the symbol is introduced in its scope
 
    pub imported_at: Option<ImportId>,
 
    // Definition in the heap, with a utility enum to determine its
 
    // class if the ID is not needed.
 
    pub class: DefinitionClass,
 
    pub definition_id: DefinitionId,
 
}
 

	
 
impl SymbolDefinition {
 
    /// Clones the entire data structure, but replaces the `imported_at` field
 
    /// with the supplied `ImportId`.
 
    pub(crate) fn into_imported(mut self, imported_at: ImportId) -> Self {
 
        self.imported_at = Some(imported_at);
 
        self
 
    }
 
/// `SymbolTable` is responsible for two parts of the parsing process: firstly
 
/// it ensures that there are no clashing symbol definitions within each file,
 
/// and secondly it will resolve all symbols within a module to their
 
/// appropriate definitions (in case of enums, functions, etc.) and namespaces
 
/// (currently only external modules can act as namespaces). If a symbol clashes
 
/// or if a symbol cannot be resolved this will be an error.
 
///
 
/// Within the compilation process the symbol table is responsible for resolving
 
/// namespaced identifiers (e.g. Module::Enum::EnumVariant) to the appropriate
 
/// definition (i.e. not namespaces; as the language has no way to use
 
/// namespaces except for using them in namespaced identifiers).
 
pub(crate) struct SymbolTable {
 
    // Lookup from module name (not any aliases) to the root id
 
    module_lookup: HashMap<Vec<u8>, RootId>,
 
    // Lookup from within a module, to a particular imported (potentially
 
    // aliased) or defined symbol. Basically speaking: if the source code of a
 
    // module contains correctly imported/defined symbols, then this lookup
 
    // will always return the corresponding definition
 
    symbol_lookup: HashMap<SymbolKey, SymbolValue>,
 
}
 

	
 
impl SymbolTable {
 
    pub(crate) fn new() -> Self {
 
        Self{ module_lookup: HashMap::new(), symbol_lookup: HashMap::new() }
 
    }
 

	
 
    pub(crate) fn build(&mut self, heap: &Heap, modules: &[LexedModule]) -> Result<(), ParseError> {
 
        // Sanity checks
 
        debug_assert!(self.module_lookup.is_empty());
 
        debug_assert!(self.symbol_lookup.is_empty());
 
        if cfg!(debug_assertions) {
 
            for (index, module) in modules.iter().enumerate() {
 
                debug_assert_eq!(
 
                    index, module.root_id.index as usize,
 
                    "module RootId does not correspond to LexedModule index"
 
                )
 
            }
 
        }
 

	
 
        // Preparation: create a lookup from module name to root id. This does
 
        // not take aliasing into account.
 
        self.module_lookup.reserve(modules.len());
 
        for module in modules {
 
            // TODO: Maybe put duplicate module name checking here?
 
            // TODO: @string
 
            self.module_lookup.insert(module.module_name.clone(), module.root_id);
 
        }
 

	
 
        // Preparation: determine total number of imports we will be inserting
 
        // into the lookup table. We could just iterate over the arena, but then
 
        // we don't know the source file the import belongs to.
 
        let mut lookup_reserve_size = 0;
 
        for module in modules {
 
            let module_root = &heap[module.root_id];
 
            for import_id in &module_root.imports {
 
                match &heap[*import_id] {
 
                    Import::Module(_) => lookup_reserve_size += 1,
 
                    Import::Symbols(import) => {
 
                        if import.symbols.is_empty() {
 
                            // Add all symbols from the other module
 
                            match self.module_lookup.get(&import.module) {
 
                                Some(target_module_id) => {
 
                                    lookup_reserve_size += heap[*target_module_id].definitions.len()
 
                                },
 
                                None => {
 
                                    return Err(
 
                                        ParseError::new_error(&module.source, import.position, "Cannot resolve module")
 
                                    );
 
                                }
 
#[derive(Debug, Clone)]
 
pub enum SymbolVariant {
 
    Module(SymbolModule),
 
    Definition(SymbolDefinition),
 
}
 

	
 
impl SymbolVariant {
 
    /// Returns the span at which the item was introduced. For an imported
 
    /// item (all modules, and imported types) this returns the span of the
 
    /// import. For a defined type this returns the span of the identifier
 
    pub(crate) fn span_of_introduction(&self, heap: &Heap) -> InputSpan {
 
        match self {
 
            SymbolVariant::Module(v) => heap[v.introduced_at].span(),
 
            SymbolVariant::Definition(v) => if let Some(import_id) = v.imported_at {
 
                heap[import_id].span()
 
            } else {
 
                            lookup_reserve_size += import.symbols.len();
 
                v.identifier_span
 
            },
 
        }
 
    }
 

	
 
    pub(crate) fn as_module(&self) -> &SymbolModule {
 
        match self {
 
            SymbolVariant::Module(v) => v,
 
            SymbolVariant::Definition(_) => unreachable!("called 'as_module' on {:?}", self),
 
        }
 
    }
 

	
 
            lookup_reserve_size += module_root.definitions.len();
 
    pub(crate) fn as_definition(&self) -> &SymbolDefinition {
 
        match self {
 
            SymbolVariant::Module(v) => unreachable!("called 'as_definition' on {:?}", self),
 
            SymbolVariant::Definition(v) => v,
 
        }
 
    }
 

	
 
        self.symbol_lookup.reserve(lookup_reserve_size);
 

	
 
        // First pass: we go through all of the modules and add lookups to
 
        // symbols that are defined within that module. Cross-module imports are
 
        // not yet resolved
 
        for module in modules {
 
            let root = &heap[module.root_id];
 
            for definition_id in &root.definitions {
 
                let definition = &heap[*definition_id];
 
                let identifier = definition.identifier();
 
                if let Err(previous_position) = self.add_definition_symbol(
 
                    identifier.position, SymbolKey::from_identifier(module.root_id, &identifier),
 
                    module.root_id, *definition_id
 
                ) {
 
                    return Err(
 
                        ParseError::new_error(&module.source, definition.position(), "Symbol is multiply defined")
 
                            .with_postfixed_info(&module.source, previous_position, "Previous definition was here")
 
                    )
 
    pub(crate) fn as_definition_mut(&mut self) -> &mut SymbolDefinition {
 
        match self {
 
            SymbolVariant::Module(v) => unreachable!("called 'as_definition_mut' on {:?}", self),
 
            SymbolVariant::Definition(v) => v,
 
        }
 
    }
 
}
 

	
 
        // Second pass: now that we can find symbols in modules, we can resolve
 
        // all imports (if they're correct, that is)
 
        for module in modules {
 
            let root = &heap[module.root_id];
 
            for import_id in &root.imports {
 
                let import = &heap[*import_id];
 
                match import {
 
                    Import::Module(import) => {
 
                        // Find the module using its name
 
                        let target_root_id = self.resolve_module(&import.module);
 
                        if target_root_id.is_none() {
 
                            return Err(ParseError::new_error(&module.source, import.position, "Could not resolve module"));
 
                        }
 
                        let target_root_id = target_root_id.unwrap();
 
                        if target_root_id == module.root_id {
 
                            return Err(ParseError::new_error(&module.source, import.position, "Illegal import of self"));
 
/// TODO: @Cleanup - remove clone everywhere
 
#[derive(Clone)]
 
pub struct Symbol {
 
    pub name: StringRef<'static>,
 
    pub variant: SymbolVariant,
 
}
 

	
 
                        // Add the target module under its alias
 
                        if let Err(previous_position) = self.add_namespace_symbol(
 
                            import.position, SymbolKey::from_identifier(module.root_id, &import.alias),
 
                            target_root_id
 
                        ) {
 
                            return Err(
 
                                ParseError::new_error(&module.source, import.position, "Symbol is multiply defined")
 
                                    .with_postfixed_info(&module.source, previous_position, "Previous definition was here")
 
                            );
 
impl Symbol {
 
    pub(crate) fn class(&self) -> SymbolClass {
 
        match &self.variant {
 
            SymbolVariant::Module(_) => SymbolClass::Module,
 
            SymbolVariant::Definition(data) => data.class.as_symbol_class(),
 
        }
 
                    },
 
                    Import::Symbols(import) => {
 
                        // Find the target module using its name
 
                        let target_root_id = self.resolve_module(&import.module);
 
                        if target_root_id.is_none() {
 
                            return Err(ParseError::new_error(&module.source, import.position, "Could not resolve module of symbol imports"));
 
                        }
 
                        let target_root_id = target_root_id.unwrap();
 
                        if target_root_id == module.root_id {
 
                            return Err(ParseError::new_error(&module.source, import.position, "Illegal import of self"));
 
                        }
 

	
 
                        // Determine which symbols to import
 
                        if import.symbols.is_empty() {
 
                            // Import of all symbols, not using any aliases
 
                            for definition_id in &heap[target_root_id].definitions {
 
                                let definition = &heap[*definition_id];
 
                                let identifier = definition.identifier();
 
                                if let Err(previous_position) = self.add_definition_symbol(
 
                                    import.position, SymbolKey::from_identifier(module.root_id, identifier),
 
                                    target_root_id, *definition_id
 
                                ) {
 
                                    return Err(
 
                                        ParseError::new_error(
 
                                            &module.source, import.position,
 
                                            &format!("Imported symbol '{}' is already defined", String::from_utf8_lossy(&identifier.value))
 
                                        )
 
                                        .with_postfixed_info(
 
                                            &modules[target_root_id.index as usize].source,
 
                                            definition.position(),
 
                                            "The imported symbol is defined here"
 
                                        )
 
                                        .with_postfixed_info(
 
                                            &module.source, previous_position, "And is previously defined here"
 
                                        )
 
                                    )
 
    }
 
}
 
                        } else {
 
                            // Import of specific symbols, optionally using aliases
 
                            for symbol in &import.symbols {
 
                                // Because we have already added per-module definitions, we can use
 
                                // the table to lookup this particular symbol. Note: within a single
 
                                // module a namespace-import and a symbol-import may not collide.
 
                                // Hence per-module symbols are unique.
 
                                // However: if we import a symbol from another module, we don't want
 
                                // to "import a module's imported symbol". And so if we do find
 
                                // a symbol match, we need to make sure it is a definition from
 
                                // within that module by checking `source_root_id == target_root_id`
 
                                let key = SymbolKey::from_identifier(target_root_id, &symbol.name);
 
                                let target_symbol = self.symbol_lookup.get(&key);
 
                                let symbol_definition_id = match target_symbol {
 
                                    Some(target_symbol) => {
 
                                        match target_symbol.symbol {
 
                                            Symbol::Definition((symbol_root_id, symbol_definition_id)) => {
 
                                                if symbol_root_id == target_root_id {
 
                                                    Some(symbol_definition_id)
 
                                                } else {
 
                                                    // This is imported within the target module, and not
 
                                                    // defined within the target module
 
                                                    None
 
                                                }
 
                                            },
 
                                            Symbol::Namespace(_) => {
 
                                                // We don't import a module's "module import"
 
                                                None
 
                                            }
 
                                        }
 
                                    },
 
                                    None => None
 
                                };
 

	
 
                                if symbol_definition_id.is_none() {
 
                                    return Err(
 
                                        ParseError::new_error(&module.source, symbol.position, "Could not resolve symbol")
 
                                    )
 
pub struct SymbolTable {
 
    module_lookup: HashMap<StringRef<'static>, RootId>,
 
    scope_lookup: HashMap<SymbolScope, ScopedSymbols>,
 
}
 
                                let symbol_definition_id = symbol_definition_id.unwrap();
 

	
 
                                if let Err(previous_position) = self.add_definition_symbol(
 
                                    symbol.position, SymbolKey::from_identifier(module.root_id, &symbol.alias),
 
                                    target_root_id, symbol_definition_id
 
                                ) {
 
                                    return Err(
 
                                        ParseError::new_error(&module.source, symbol.position, "Symbol is multiply defined")
 
                                            .with_postfixed_info(&module.source, previous_position, "Previous definition was here")
 
                                    )
 
impl SymbolTable {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            module_lookup: HashMap::new(),
 
            scope_lookup: HashMap::new(),
 
        }
 
    }
 
    /// Inserts a new module by its name. Upon module naming conflict the
 
    /// previously associated `RootId` will be returned.
 
    pub(crate) fn insert_module(&mut self, module_name: StringRef<'static>, root_id: RootId) -> Result<(), RootId> {
 
        match self.module_lookup.entry(module_name) {
 
            Entry::Occupied(v) => {
 
                Err(*v.get())
 
            },
 
            Entry::Vacant(v) => {
 
                v.insert(root_id);
 
                Ok(())
 
            }
 
        }
 
    }
 

	
 
    /// Retrieves module `RootId` by name
 
    pub(crate) fn get_module_by_name(&mut self, name: &[u8]) -> Option<RootId> {
 
        let string_ref = StringRef::new(name);
 
        self.module_lookup.get(&string_ref).map(|v| *v)
 
    }
 

	
 
    /// Inserts a new symbol scope. The parent must have been added to the
 
    /// symbol table before.
 
    pub(crate) fn insert_scope(&mut self, parent_scope: Option<SymbolScope>, new_scope: SymbolScope) {
 
        debug_assert!(
 
            parent_scope.is_none() || self.scope_lookup.contains_key(parent_scope.as_ref().unwrap()),
 
            "inserting scope {:?} but parent {:?} does not exist", new_scope, parent_scope
 
        );
 
        debug_assert!(!self.scope_lookup.contains_key(&new_scope), "inserting scope {:?}, but it already exists", new_scope);
 

	
 
        if let Some(parent_scope) = parent_scope {
 
            let parent = self.scope_lookup.get_mut(&parent_scope).unwrap();
 
            parent.child_scopes.push(new_scope);
 
        }
 
        fn find_name(heap: &Heap, root_id: RootId) -> String {
 
            let root = &heap[root_id];
 
            for pragma_id in &root.pragmas {
 
                match &heap[*pragma_id] {
 
                    Pragma::Module(module) => {
 
                        return String::from_utf8_lossy(&module.value).to_string()
 
                    },
 
                    _ => {},
 

	
 
        let scope = ScopedSymbols {
 
            scope: new_scope,
 
            parent_scope,
 
            child_scopes: Vec::with_capacity(RESERVED_SYMBOLS),
 
            symbols: Vec::with_capacity(RESERVED_SYMBOLS)
 
        };
 
        self.scope_lookup.insert(new_scope, scope);
 
    }
 

	
 
    /// Inserts a symbol into a particular scope. The symbol's name may not
 
    /// exist in the scope or any of its parents. If it does collide then the
 
    /// symbol will be returned, together with the symbol that has the same
 
    /// name.
 
    pub(crate) fn insert_symbol(&mut self, in_scope: SymbolScope, symbol: Symbol) -> Result<(), (Symbol, &Symbol)> {
 
        debug_assert!(self.scope_lookup.contains_key(&in_scope), "inserting symbol {}, but scope {:?} does not exist", symbol.name.as_str(), in_scope);
 
        let mut seek_scope = in_scope;
 
        loop {
 
            let scoped_symbols = self.scope_lookup.get(&seek_scope).unwrap();
 
            for existing_symbol in scoped_symbols.symbols.iter() {
 
                if symbol.name == existing_symbol.name {
 
                    return Err((symbol, existing_symbol))
 
                }
 
            }
 

	
 
            return String::from("Unknown")
 
            match scoped_symbols.parent_scope {
 
                Some(parent_scope) => { seek_scope = parent_scope; },
 
                None => { break; }
 
            }
 
        }
 

	
 
        debug_assert_eq!(
 
            self.symbol_lookup.len(), lookup_reserve_size,
 
            "miscalculated reserved size for symbol lookup table"
 
        );
 
        // If here, then there is no collision
 
        let scoped_symbols = self.scope_lookup.get_mut(&in_scope).unwrap();
 
        scoped_symbols.symbols.push(symbol);
 
        Ok(())
 
    }
 

	
 
    /// Resolves a module by its defined name
 
    pub(crate) fn resolve_module(&self, identifier: &Vec<u8>) -> Option<RootId> {
 
        self.module_lookup.get(identifier).map(|v| *v)
 
    }
 

	
 
    pub(crate) fn resolve_symbol<'t>(
 
        &'t self, root_module_id: RootId, identifier: &[u8]
 
    ) -> Option<&'t SymbolValue> {
 
        let lookup_key = SymbolKey{ module_id: root_module_id, symbol_name: Vec::from(identifier) };
 
        self.symbol_lookup.get(&lookup_key)
 
    }
 

	
 
    pub(crate) fn resolve_identifier<'t>(
 
        &'t self, root_module_id: RootId, identifier: &Identifier
 
    ) -> Option<&'t SymbolValue> {
 
        let lookup_key = SymbolKey::from_identifier(root_module_id, identifier);
 
        self.symbol_lookup.get(&lookup_key)
 
    }
 

	
 
    /// Resolves a namespaced symbol. This method will go as far as possible in
 
    /// going to the right symbol. It will halt the search when:
 
    /// 1. Polymorphic arguments are encountered on the identifier.
 
    /// 2. A non-namespace symbol is encountered.
 
    /// 3. A part of the identifier couldn't be resolved to anything
 
    /// The returned iterator will always point to the next symbol (even if 
 
    /// nothing was found)
 
    pub(crate) fn resolve_namespaced_identifier<'t, 'i>(
 
        &'t self, root_module_id: RootId, identifier: &'i NamespacedIdentifier
 
    ) -> (Option<&'t SymbolValue>, NamespacedIdentifierIter<'i>) {
 
        let mut iter = identifier.iter();
 
        let mut symbol: Option<&SymbolValue> = None;
 
        let mut within_module_id = root_module_id;
 

	
 
        while let Some((partial, poly_args)) = iter.next() {
 
            // Lookup the symbol within the currently iterated upon module
 
            let lookup_key = SymbolKey{ module_id: within_module_id, symbol_name: Vec::from(partial) };
 
            let new_symbol = self.symbol_lookup.get(&lookup_key);
 
            
 
            match new_symbol {
 
                None => {
 
                    // Can't find anything
 
                    symbol = None;
 
                    break;
 
                },
 
                Some(new_symbol) => {
 
                    // Found something, but if we already moved to another
 
                    // module then we don't want to keep jumping across modules,
 
                    // we're only interested in symbols defined within that
 
                    // module.
 
                    match &new_symbol.symbol {
 
                        Symbol::Namespace(new_root_id) => {
 
                            if root_module_id != within_module_id {
 
                                // This new symbol is imported by a foreign
 
                                // module, so this is an error
 
                                debug_assert!(symbol.is_some());
 
                                debug_assert!(symbol.unwrap().is_namespace());
 
                                debug_assert!(iter.num_returned() > 1);
 
                                symbol = None;
 
                                break;
 
                            }
 
                            within_module_id = *new_root_id;
 
                            symbol = Some(new_symbol);
 
                        },
 
                        Symbol::Definition((definition_root_id, _)) => {
 
                            // Found a definition, but if we already jumped
 
                            // modules, then this must be defined within that
 
                            // module.
 
                            if root_module_id != within_module_id && within_module_id != *definition_root_id {
 
                                // This is an imported definition within the module
 
                                // So keep the old 
 
                                debug_assert!(symbol.is_some());
 
                                debug_assert!(symbol.unwrap().is_namespace());
 
                                debug_assert!(iter.num_returned() > 1);
 
                                symbol = None;
 
                                break;
 
    /// Retrieves a symbol by name by searching in a particular scope and that scope's parents. The
 
    /// returned symbol may both be imported as defined within any of the searched scopes.
 
    pub(crate) fn get_symbol_by_name(
 
        &self, mut in_scope: SymbolScope, name: &[u8]
 
    ) -> Option<&Symbol> {
 
        let string_ref = StringRef::new(name);
 
        loop {
 
            let scope = self.scope_lookup.get(&in_scope);
 
            if scope.is_none() {
 
                return None;
 
            }
 
                            symbol = Some(new_symbol);
 
                            break;
 
            let scope = scope.unwrap();
 

	
 
            if let Some(symbol) = scope.get_symbol(&string_ref) {
 
                return Some(symbol);
 
            } else {
 
                // Could not find symbol in current scope, seek in the parent scope if it exists
 
                match &scope.parent_scope {
 
                    Some(parent_scope) => { in_scope = *parent_scope; },
 
                    None => return None,
 
                }
 
            }
 
        }
 
    }
 

	
 
            if poly_args.is_some() {
 
                // Polymorphic argument specification should also be a fully 
 
                // resolved result.
 
                break;
 
    /// Retrieves a symbol by name by searching in a particular scope and that scope's parents. The
 
    /// returned symbol must be defined within any of the searched scopes and may not be imported.
 
    /// In case such an imported symbol exists then this function still returns `None`.
 
    pub(crate) fn get_symbol_by_name_defined_in_scope(
 
        &self, in_scope: SymbolScope, name: &[u8]
 
    ) -> Option<&Symbol> {
 
        match self.get_symbol_by_name(in_scope, name) {
 
            Some(symbol) => {
 
                match &symbol.variant {
 
                    SymbolVariant::Module(_) => {
 
                        None // in-scope modules are always imported
 
                    },
 
                    SymbolVariant::Definition(variant) => {
 
                        if variant.imported_at.is_some() || variant.defined_in_scope == SymbolScope::Global {
 
                            // Symbol is imported or lives in the global scope.
 
                            // Things in the global scope are defined by the
 
                            // compiler.
 
                            None
 
                        } else {
 
                            Some(symbol)
 
                        }
 
                    }
 

	
 
        match symbol {
 
            None => (None, iter),
 
            Some(symbol) => (Some(symbol), iter)
 
                }
 
            },
 
            None => None,
 
        }
 
    }
 

	
 
    /// Attempts to add a namespace symbol. Returns `Ok` if the symbol was
 
    /// inserted. If the symbol already exists then `Err` will be returned
 
    /// together with the previous definition's source position (in the origin
 
    /// module's source file).
 
    // Note: I would love to return a reference to the value, but Rust is
 
    // preventing me from doing so... That, or I'm not smart enough...
 
    fn add_namespace_symbol(
 
        &mut self, origin_position: InputPosition, key: SymbolKey, target_module_id: RootId
 
    ) -> Result<(), InputPosition> {
 
        match self.symbol_lookup.entry(key) {
 
            Entry::Occupied(o) => Err(o.get().position),
 
            Entry::Vacant(v) => {
 
                v.insert(SymbolValue{
 
                    position: origin_position,
 
                    symbol: Symbol::Namespace(target_module_id)
 
                });
 
                Ok(())
 
    /// Retrieves all symbols that are defined within a particular scope. Imported symbols are
 
    /// ignored. Returns `true` if the scope was found (which may contain 0 defined symbols) and
 
    /// `false` if the scope was not found.
 
    pub(crate) fn get_all_symbols_defined_in_scope(&self, in_scope: SymbolScope, target: &mut Vec<Symbol>) -> bool {
 
        match self.scope_lookup.get(&in_scope) {
 
            Some(scope) => {
 
                for symbol in &scope.symbols {
 
                    if let SymbolVariant::Definition(definition) = &symbol.variant {
 
                        if definition.imported_at.is_some() {
 
                            continue;
 
                        }
 

	
 
                        // Defined in scope, so push onto target
 
                        target.push(symbol.clone());
 
                    }
 
                }
 

	
 
    /// Attempts to add a definition symbol. Returns `Ok` if the symbol was
 
    /// inserted. If the symbol already exists then `Err` will be returned
 
    /// together with the previous definition's source position (in the origin
 
    /// module's source file).
 
    fn add_definition_symbol(
 
        &mut self, origin_position: InputPosition, key: SymbolKey,
 
        target_module_id: RootId, target_definition_id: DefinitionId,
 
    ) -> Result<(), InputPosition> {
 
        match self.symbol_lookup.entry(key) {
 
            Entry::Occupied(o) => Err(o.get().position),
 
            Entry::Vacant(v) => {
 
                v.insert(SymbolValue {
 
                    position: origin_position,
 
                    symbol: Symbol::Definition((target_module_id, target_definition_id))
 
                });
 
                Ok(())
 
            }
 
                true
 
            },
 
            None => false,
 
        }
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/symbol_table2.rs
Show inline comments
 
deleted file
src/protocol/parser/token_parsing.rs
Show inline comments
 
use crate::collections::{StringRef, ScopedSection};
 
use crate::protocol::ast::*;
 
use crate::protocol::input_source2::{
 
    InputSource2 as InputSource,
 
    InputPosition2 as InputPosition,
 
use crate::protocol::input_source::{
 
    InputSource as InputSource,
 
    InputPosition as InputPosition,
 
    InputSpan,
 
    ParseError,
 
};
 
use super::tokens::*;
 
use super::symbol_table2::*;
 
use super::symbol_table::*;
 
use super::{Module, ModuleCompilationPhase, PassCtx};
 

	
 
// Keywords
 
pub(crate) const KW_LET:       &'static [u8] = b"let";
 
pub(crate) const KW_AS:        &'static [u8] = b"as";
 
pub(crate) const KW_STRUCT:    &'static [u8] = b"struct";
 
pub(crate) const KW_ENUM:      &'static [u8] = b"enum";
 
pub(crate) const KW_UNION:     &'static [u8] = b"union";
 
pub(crate) const KW_FUNCTION:  &'static [u8] = b"function";
 
pub(crate) const KW_PRIMITIVE: &'static [u8] = b"primitive";
 
pub(crate) const KW_COMPOSITE: &'static [u8] = b"composite";
 
pub(crate) const KW_IMPORT:    &'static [u8] = b"import";
 

	
 
// Keywords - literals
 
pub(crate) const KW_LIT_TRUE:  &'static [u8] = b"true";
 
pub(crate) const KW_LIT_FALSE: &'static [u8] = b"false";
 
pub(crate) const KW_LIT_NULL:  &'static [u8] = b"null";
 

	
 
// Keywords - functions
 
pub(crate) const KW_FUNC_GET:    &'static [u8] = b"get";
 
pub(crate) const KW_FUNC_PUT:    &'static [u8] = b"put";
 
pub(crate) const KW_FUNC_FIRES:  &'static [u8] = b"fires";
 
pub(crate) const KW_FUNC_CREATE: &'static [u8] = b"create";
 
pub(crate) const KW_FUNC_LENGTH: &'static [u8] = b"length";
 
pub(crate) const KW_FUNC_ASSERT: &'static [u8] = b"assert";
 

	
 
// Keywords - statements
 
pub(crate) const KW_STMT_CHANNEL:  &'static [u8] = b"channel";
 
pub(crate) const KW_STMT_IF:       &'static [u8] = b"if";
 
pub(crate) const KW_STMT_ELSE:     &'static [u8] = b"else";
 
pub(crate) const KW_STMT_WHILE:    &'static [u8] = b"while";
 
pub(crate) const KW_STMT_BREAK:    &'static [u8] = b"break";
 
pub(crate) const KW_STMT_CONTINUE: &'static [u8] = b"continue";
 
pub(crate) const KW_STMT_GOTO:     &'static [u8] = b"goto";
 
pub(crate) const KW_STMT_RETURN:   &'static [u8] = b"return";
 
pub(crate) const KW_STMT_SYNC:     &'static [u8] = b"synchronous";
 
pub(crate) const KW_STMT_NEW:      &'static [u8] = b"new";
 

	
 
// Keywords - types
 
pub(crate) const KW_TYPE_IN_PORT:  &'static [u8] = b"in";
 
pub(crate) const KW_TYPE_OUT_PORT: &'static [u8] = b"out";
 
pub(crate) const KW_TYPE_MESSAGE:  &'static [u8] = b"msg";
 
pub(crate) const KW_TYPE_BOOL:     &'static [u8] = b"bool";
 
pub(crate) const KW_TYPE_UINT8:    &'static [u8] = b"u8";
 
pub(crate) const KW_TYPE_UINT16:   &'static [u8] = b"u16";
 
pub(crate) const KW_TYPE_UINT32:   &'static [u8] = b"u32";
 
pub(crate) const KW_TYPE_UINT64:   &'static [u8] = b"u64";
 
pub(crate) const KW_TYPE_SINT8:    &'static [u8] = b"s8";
 
pub(crate) const KW_TYPE_SINT16:   &'static [u8] = b"s16";
 
pub(crate) const KW_TYPE_SINT32:   &'static [u8] = b"s32";
 
pub(crate) const KW_TYPE_SINT64:   &'static [u8] = b"s64";
 
pub(crate) const KW_TYPE_CHAR:     &'static [u8] = b"char";
 
pub(crate) const KW_TYPE_STRING:   &'static [u8] = b"string";
 
pub(crate) const KW_TYPE_INFERRED: &'static [u8] = b"auto";
 

	
 
/// A special trait for when consuming comma-separated things such that we can
 
/// push them onto a `Vec` and onto a `ScopedSection`. As we monomorph for
 
/// very specific comma-separated cases I don't expect polymorph bloat.
 
/// Also, I really don't like this solution.
 
pub(crate) trait Extendable {
 
    type Value;
 

	
 
    #[inline]
 
    fn push(&mut self, v: Self::Value);
 
}
 

	
 
impl<T> Extendable for Vec<T> {
 
    type Value = T;
 

	
 
    #[inline]
 
    fn push(&mut self, v: Self::Value) {
 
        (self as Vec<T>).push(v);
 
        (self as &mut Vec<T>).push(v);
 
    }
 
}
 

	
 
impl<T: Sized + Copy> Extendable for ScopedSection<T> {
 
    type Value = T;
 

	
 
    #[inline]
 
    fn push(&mut self, v: Self::Value) {
 
        (self as ScopedSection<T>).push(v);
 
        (self as &mut ScopedSection<T>).push(v);
 
    }
 
}
 

	
 
/// Consumes a domain-name identifier: identifiers separated by a dot. For
 
/// simplification of later parsing and span identification the domain-name may
 
/// contain whitespace, but must reside on the same line.
 
pub(crate) fn consume_domain_ident<'a>(
 
    source: &'a InputSource, iter: &mut TokenIter
 
) -> Result<(&'a [u8], InputSpan), ParseError> {
 
    let (_, mut span) = consume_ident(source, iter)?;
 
    while let Some(TokenKind::Dot) = iter.next() {
 
        iter.consume();
 
        let (_, new_span) = consume_ident(source, iter)?;
 
        span.end = new_span.end;
 
    }
 

	
 
    // Not strictly necessary, but probably a reasonable restriction: this
 
    // simplifies parsing of module naming and imports.
 
    if span.begin.line != span.end.line {
 
        return Err(ParseError::new_error_str_at_span(source, span, "module names may not span multiple lines"));
 
    }
 

	
 
    // If module name consists of a single identifier, then it may not match any
 
    // of the reserved keywords
 
    let section = source.section_at_pos(span.begin, span.end);
 
    if is_reserved_keyword(section) {
 
        return Err(ParseError::new_error_str_at_span(source, span, "encountered reserved keyword"));
 
    }
 

	
 
    Ok((source.section_at_pos(span.begin, span.end), span))
 
}
 

	
 
/// Consumes a specific expected token. Be careful to only call this with tokens
 
/// that do not have a variable length.
 
pub(crate) fn consume_token(source: &InputSource, iter: &mut TokenIter, expected: TokenKind) -> Result<InputSpan, ParseError> {
 
    if Some(expected) != iter.next() {
 
        return Err(ParseError::new_error_at_pos(
 
            source, iter.last_valid_pos(),
 
            format!("expected '{}'", expected.token_chars())
 
        ));
 
    }
 
    let span = iter.next_span();
 
    iter.consume();
 
    Ok(span)
 
}
 

	
 
/// Consumes a comma separated list until the closing delimiter is encountered
 
pub(crate) fn consume_comma_separated_until<T, F, E>(
 
    close_delim: TokenKind, source: &InputSource, iter: &mut TokenIter,
 
    consumer_fn: F, target: &mut E, item_name_and_article: &'static str,
 
    close_pos: Option<&mut InputPosition>
 
) -> Result<(), ParseError>
 
    where F: Fn(&InputSource, &mut TokenIter) -> Result<T, ParseError>,
 
          E: Extendable<Value=T>
 
{
 
    let mut had_comma = true;
 
    let mut next;
 
    loop {
 
        next = iter.next();
 
        if Some(close_delim) == next {
 
            if let Some(close_pos) = close_pos {
 
                // If requested return the position of the closing delimiter
 
                let (_, new_close_pos) = iter.next_positions();
 
                *close_pos = new_close_pos;
 
            }
 
            iter.consume();
 
            break;
 
        } else if !had_comma || next.is_none() {
 
            return Err(ParseError::new_error_at_pos(
 
                source, iter.last_valid_pos(),
 
                format!("expected a '{}', or {}", close_delim.token_chars(), item_name_and_article)
 
            ));
 
        }
 

	
 
        let new_item = consumer_fn(source, iter)?;
 
        target.push(new_item);
 

	
 
        next = iter.next();
 
        had_comma = next == Some(TokenKind::Comma);
 
        if had_comma {
 
            iter.consume();
 
        }
 
    }
 

	
 
    Ok(())
 
}
 

	
 
/// Consumes a comma-separated list of items if the opening delimiting token is
 
/// encountered. If not, then the iterator will remain at its current position.
 
/// Note that the potential cases may be:
 
/// - No opening delimiter encountered, then we return `false`.
 
/// - Both opening and closing delimiter encountered, but no items.
 
/// - Opening and closing delimiter encountered, and items were processed.
 
/// - Found an opening delimiter, but processing an item failed.
 
pub(crate) fn maybe_consume_comma_separated<T, F, E>(
 
    open_delim: TokenKind, close_delim: TokenKind, source: &InputSource, iter: &mut TokenIter,
 
    consumer_fn: F, target: &mut E, item_name_and_article: &'static str,
 
    close_pos: Option<&mut InputPosition>
 
) -> Result<bool, ParseError>
 
    where F: Fn(&InputSource, &mut TokenIter) -> Result<T, ParseError>,
 
          E: Extendable<Value=T>
 
{
 
    let mut next = iter.next();
 
    if Some(open_delim) != next {
 
        return Ok(false);
 
    }
 

	
 
    // Opening delimiter encountered, so must parse the comma-separated list.
 
    iter.consume();
 
    consume_comma_separated_until(close_delim, source, iter, consumer_fn, target, item_name_and_article, close_pos)?;
 

	
 
    Ok(true)
 
}
 

	
 
pub(crate) fn maybe_consume_comma_separated_spilled<F: Fn(&InputSource, &mut TokenIter) -> Result<(), ParseError>>(
 
    open_delim: TokenKind, close_delim: TokenKind, source: &InputSource, iter: &mut TokenIter,
 
    consumer_fn: F, item_name_and_article: &'static str
 
) -> Result<bool, ParseError> {
 
    let mut next = iter.next();
 
    if Some(open_delim) != next {
 
        return Ok(false);
 
    }
 

	
 
    iter.consume();
 
    let mut had_comma = true;
 
    loop {
 
        next = iter.next();
 
        if Some(close_delim) == next {
 
            iter.consume();
 
            break;
 
        } else if !had_comma {
 
            return Err(ParseError::new_error_at_pos(
 
                source, iter.last_valid_pos(),
 
                format!("expected a '{}', or {}", close_delim.token_chars(), item_name_and_article)
 
            ));
 
        }
 

	
 
        consumer_fn(source, iter)?;
 
        next = iter.next();
 
        had_comma = next == Some(TokenKind::Comma);
 
        if had_comma {
 
            iter.consume();
 
        }
 
    }
 

	
 
    Ok(true)
 
}
 

	
 
/// Consumes a comma-separated list and expected the opening and closing
 
/// characters to be present. The returned array may still be empty
 
pub(crate) fn consume_comma_separated<T, F, E>(
 
    open_delim: TokenKind, close_delim: TokenKind, source: &InputSource, iter: &mut TokenIter,
 
    consumer_fn: F, target: &mut Vec<T>, item_name_and_article: &'static str,
 
    consumer_fn: F, target: &mut E, item_name_and_article: &'static str,
 
    list_name_and_article: &'static str, close_pos: Option<&mut InputPosition>
 
) -> Result<(), ParseError>
 
    where F: Fn(&InputSource, &mut TokenIter) -> Result<T, ParseError>,
 
          E: Extendable<Value=T>
 
{
 
    let first_pos = iter.last_valid_pos();
 
    match maybe_consume_comma_separated(
 
        open_delim, close_delim, source, iter, consumer_fn, target,
 
        item_name_and_article, close_pos
 
    ) {
 
        Ok(true) => Ok(()),
 
        Ok(false) => {
 
            return Err(ParseError::new_error_at_pos(
 
                source, first_pos,
 
                format!("expected {}", list_name_and_article)
 
            ));
 
        },
 
        Err(err) => Err(err)
 
    }
 
}
 

	
 
/// Consumes an integer literal, may be binary, octal, hexadecimal or decimal,
 
/// and may have separating '_'-characters.
 
pub(crate) fn consume_integer_literal(source: &InputSource, iter: &mut TokenIter, buffer: &mut String) -> Result<(u64, InputSpan), ParseError> {
 
    if Some(TokenKind::Integer) != iter.next() {
 
        return Err(ParseError::new_error_str_at_pos(source, iter.last_valid_pos(), "expected an integer literal"));
 
    }
 
    let integer_span = iter.next_span();
 
    iter.consume();
 

	
 
    let integer_text = source.section_at_span(integer_span);
 

	
 
    // Determine radix and offset from prefix
 
    let (radix, input_offset, radix_name) =
 
        if integer_text.starts_with(b"0b") || integer_text.starts_with(b"0B") {
 
            // Binary number
 
            (2, 2, "binary")
 
        } else if integer_text.starts_with(b"0o") || integer_text.starts_with(b"0O") {
 
            // Octal number
 
            (8, 2, "octal")
 
        } else if integer_text.starts_with(b"0x") || integer_text.starts_with(b"0X") {
 
            // Hexadecimal number
 
            (16, 2, "hexadecimal")
 
        } else {
 
            (10, 0, "decimal")
 
        };
 

	
 
    // Take out any of the separating '_' characters
 
    buffer.clear();
 
    for char_idx in input_offset..integer_text.len() {
 
        let char = integer_text[char_idx];
 
        if char == b'_' {
 
            continue;
 
        }
 
        if !char.is_ascii_digit() {
 
            return Err(ParseError::new_error_at_span(
 
                source, integer_span,
 
                format!("incorrectly formatted {} number", radix_name)
 
            ));
 
        }
 
        buffer.push(char::from(char));
 
    }
 

	
 
    // Use the cleaned up string to convert to integer
 
    match u64::from_str_radix(&buffer, radix) {
 
        Ok(number) => Ok((number, integer_span)),
 
        Err(_) => Err(ParseError::new_error_at_span(
 
            source, integer_span,
 
            format!("incorrectly formatted {} number", radix_name)
 
        )),
 
    }
 
}
 

	
 
/// Consumes a character literal. We currently support a limited number of
 
/// backslash-escaped characters
 
pub(crate) fn consume_character_literal(
 
    source: &InputSource, iter: &mut TokenIter
 
) -> Result<(char, InputSpan), ParseError> {
 
    if Some(TokenKind::Character) != iter.next() {
 
        return Err(ParseError::new_error_str_at_pos(source, iter.last_valid_pos(), "expected a character literal"));
 
    }
 
    let span = iter.next_span();
 
    iter.consume();
 

	
 
    let char_text = source.section_at_span(span);
 
    if !char_text.is_ascii() {
 
        return Err(ParseError::new_error_str_at_span(
 
            source, span, "expected an ASCII character literal"
 
        ));
 
    }
 

	
 
    match char_text.len() {
 
        0 => return Err(ParseError::new_error_str_at_span(source, span, "too little characters in character literal")),
 
        1 => {
 
            // We already know the text is ascii, so just throw an error if we have the escape
 
            // character.
 
            if char_text[0] == b'\\' {
 
                return Err(ParseError::new_error_str_at_span(source, span, "escape character without subsequent character"));
 
            }
 
            return Ok((char_text[0] as char, span));
 
        },
 
        2 => {
 
            if char_text[0] == b'\\' {
 
                let result = parse_escaped_character(char_text[1])?;
 
                let result = parse_escaped_character(source, iter.last_valid_pos(), char_text[1])?;
 
                return Ok((result, span))
 
            }
 
        },
 
        _ => {}
 
    }
 

	
 
    return Err(ParseError::new_error_str_at_span(source, span, "too many characters in character literal"))
 
}
 

	
 
/// Consumes a string literal. We currently support a limited number of
 
/// backslash-escaped characters. Note that the result is stored in the
 
/// buffer.
 
pub(crate) fn consume_string_literal(
 
    source: &InputSource, iter: &mut TokenIter, buffer: &mut String
 
) -> Result<InputSpan, ParseError> {
 
    if Some(TokenKind::String) != iter.next() {
 
        return Err(ParseError::new_error_str_at_pos(source, iter.last_valid_pos(), "expected a string literal"));
 
    }
 

	
 
    buffer.clear();
 
    let span = iter.next_span();
 
    iter.consume();
 

	
 
    let text = source.section_at_span(span);
 
    if !text.is_ascii() {
 
        return Err(ParseError::new_error_str_at_span(source, span, "expected an ASCII string literal"));
 
    }
 
    buffer.reserve(text.len());
 

	
 
    let mut was_escape = false;
 
    for idx in 0..text.len() {
 
        let cur = text[idx];
 
        if cur != b'\\' {
 
            if was_escape {
 
                let to_push = parse_escaped_character(cur)?;
 
                let to_push = parse_escaped_character(source, iter.last_valid_pos(), cur)?;
 
                buffer.push(to_push);
 
            } else {
 
                buffer.push(cur as char);
 
            }
 
            was_escape = false;
 
        } else {
 
            was_escape = true;
 
        }
 
    }
 

	
 
    debug_assert!(!was_escape); // because otherwise we couldn't have ended the string literal
 

	
 
    Ok(span)
 
}
 

	
 
fn parse_escaped_character(v: u8) -> Result<char, ParseError> {
 
fn parse_escaped_character(source: &InputSource, pos: InputPosition, v: u8) -> Result<char, ParseError> {
 
    let result = match v {
 
        b'r' => '\r',
 
        b'n' => '\n',
 
        b't' => '\t',
 
        b'0' => '\0',
 
        b'\\' => '\\',
 
        b'\'' => '\'',
 
        b'"' => '"',
 
        v => return Err(ParseError::new_error_at_span(
 
            source, span, format!("unexpected escaped character '{}'", v)
 
        v => return Err(ParseError::new_error_at_pos(
 
            source, pos, format!("unexpected escaped character '{}'", v)
 
        )),
 
    };
 
    Ok(result)
 
}
 

	
 
pub(crate) fn consume_pragma<'a>(source: &'a InputSource, iter: &mut TokenIter) -> Result<(&'a [u8], InputPosition, InputPosition), ParseError> {
 
    if Some(TokenKind::Pragma) != iter.next() {
 
        return Err(ParseError::new_error_str_at_pos(source, iter.last_valid_pos(), "expected a pragma"));
 
    }
 
    let (pragma_start, pragma_end) = iter.next_positions();
 
    iter.consume();
 
    Ok((source.section_at_pos(pragma_start, pragma_end), pragma_start, pragma_end))
 
}
 

	
 
pub(crate) fn has_ident(source: &InputSource, iter: &mut TokenIter, expected: &[u8]) -> bool {
 
    peek_ident(source, iter).map_or(false, |section| section == expected)
 
}
 

	
 
pub(crate) fn peek_ident<'a>(source: &'a InputSource, iter: &mut TokenIter) -> Option<&'a [u8]> {
 
    if Some(TokenKind::Ident) == iter.next() {
 
        let (start, end) = iter.next_positions();
 
        return Some(source.section_at_pos(start, end))
 
    }
 

	
 
    None
 
}
 

	
 
/// Consumes any identifier and returns it together with its span. Does not
 
/// check if the identifier is a reserved keyword.
 
pub(crate) fn consume_any_ident<'a>(
 
    source: &'a InputSource, iter: &mut TokenIter
 
) -> Result<(&'a [u8], InputSpan), ParseError> {
 
    if Some(TokenKind::Ident) != iter.next() {
 
        return Err(ParseError::new_error_str_at_pos(source, iter.last_valid_pos(), "expected an identifier"));
 
    }
 
    let (ident_start, ident_end) = iter.next_positions();
 
    iter.consume();
 
    Ok((source.section_at_pos(ident_start, ident_end), InputSpan::from_positions(ident_start, ident_end)))
 
}
 

	
 
/// Consumes a specific identifier. May or may not be a reserved keyword.
 
pub(crate) fn consume_exact_ident(source: &InputSource, iter: &mut TokenIter, expected: &[u8]) -> Result<InputSpan, ParseError> {
 
    let (ident, pos) = consume_any_ident(source, iter)?;
 
    if ident != expected {
 
        debug_assert!(expected.is_ascii());
 
        return Err(ParseError::new_error_at_pos(
 
            source, iter.last_valid_pos(),
 
            format!("expected the text '{}'", &String::from_utf8_lossy(expected))
 
        ));
 
    }
 
    Ok(pos)
 
}
 

	
 
/// Consumes an identifier that is not a reserved keyword and returns it
 
/// together with its span.
 
pub(crate) fn consume_ident<'a>(
 
    source: &'a InputSource, iter: &mut TokenIter
 
) -> Result<(&'a [u8], InputSpan), ParseError> {
 
    let (ident, span) = consume_any_ident(source, iter)?;
 
    if is_reserved_keyword(ident) {
 
        return Err(ParseError::new_error_str_at_span(source, span, "encountered reserved keyword"));
 
    }
 

	
 
    Ok((ident, span))
 
}
 

	
 
/// Consumes an identifier and immediately intern it into the `StringPool`
 
pub(crate) fn consume_ident_interned(
 
    source: &InputSource, iter: &mut TokenIter, ctx: &mut PassCtx
 
) -> Result<Identifier, ParseError> {
 
    let (value, span) = consume_ident(source, iter)?;
 
    let value = ctx.pool.intern(value);
 
    Ok(Identifier{ span, value })
 
}
 

	
 
fn is_reserved_definition_keyword(text: &[u8]) -> bool {
 
    match text {
 
        KW_STRUCT | KW_ENUM | KW_UNION | KW_FUNCTION | KW_PRIMITIVE | KW_COMPOSITE => true,
 
        _ => false,
 
    }
 
}
 

	
 
fn is_reserved_statement_keyword(text: &[u8]) -> bool {
 
    match text {
 
        KW_IMPORT | KW_AS |
 
        KW_STMT_CHANNEL | KW_STMT_IF | KW_STMT_WHILE |
 
        KW_STMT_BREAK | KW_STMT_CONTINUE | KW_STMT_GOTO | KW_STMT_RETURN |
 
        KW_STMT_SYNC | KW_STMT_NEW => true,
 
        _ => false,
 
    }
 
}
 

	
 
fn is_reserved_expression_keyword(text: &[u8]) -> bool {
 
    match text {
 
        KW_LET |
 
        KW_LIT_TRUE | KW_LIT_FALSE | KW_LIT_NULL |
 
        KW_FUNC_GET | KW_FUNC_PUT | KW_FUNC_FIRES | KW_FUNC_CREATE | KW_FUNC_ASSERT | KW_FUNC_LENGTH => true,
 
        _ => false,
 
    }
 
}
 

	
 
fn is_reserved_type_keyword(text: &[u8]) -> bool {
 
    match text {
 
        KW_TYPE_IN_PORT | KW_TYPE_OUT_PORT | KW_TYPE_MESSAGE | KW_TYPE_BOOL |
 
        KW_TYPE_UINT8 | KW_TYPE_UINT16 | KW_TYPE_UINT32 | KW_TYPE_UINT64 |
 
        KW_TYPE_SINT8 | KW_TYPE_SINT16 | KW_TYPE_SINT32 | KW_TYPE_SINT64 |
 
        KW_TYPE_CHAR | KW_TYPE_STRING |
 
        KW_TYPE_INFERRED => true,
 
        _ => false,
 
    }
 
}
 

	
 
fn is_reserved_keyword(text: &[u8]) -> bool {
 
    return
 
        is_reserved_definition_keyword(text) ||
 
        is_reserved_statement_keyword(text) ||
 
        is_reserved_expression_keyword(text) ||
 
        is_reserved_type_keyword(text);
 
}
 

	
 
pub(crate) fn seek_module(modules: &[Module], root_id: RootId) -> Option<&Module> {
 
    for module in modules {
 
        if module.root_id == root_id {
 
            return Some(module)
 
        }
 
    }
 

	
 
    return None
 
}
 

	
 
/// Constructs a human-readable message indicating why there is a conflict of
 
/// symbols.
 
// Note: passing the `module_idx` is not strictly necessary, but will prevent
 
// programmer mistakes during development: we get a conflict because we're
 
// currently parsing a particular module.
 
pub(crate) fn construct_symbol_conflict_error(
 
    modules: &[Module], module_idx: usize, ctx: &PassCtx, new_symbol: &Symbol, old_symbol: &Symbol
 
) -> ParseError {
 
    let module = &modules[module_idx];
 
    let get_symbol_span_and_msg = |symbol: &Symbol| -> (String, Option<InputSpan>) {
 
        match &symbol.variant {
 
            SymbolVariant::Module(module) => {
 
                let import = &ctx.heap[module.introduced_at];
 
                return (
 
                    format!("the module aliased as '{}' imported here", symbol.name.as_str()),
 
                    Some(import.as_module().span)
 
                );
 
            },
 
            SymbolVariant::Definition(definition) => {
 
                if definition.defined_in_module.is_invalid() {
 
                    // Must be a builtin thing
 
                    return (format!("the builtin '{}'", symbol.name.as_str()), None)
 
                } else {
 
                    if let Some(import_id) = definition.imported_at {
 
                        let import = &ctx.heap[import_id];
 
                        return (
 
                            format!("the type '{}' imported here", symbol.name.as_str()),
 
                            Some(import.as_symbols().span)
 
                        );
 
                    } else {
 
                        // This is a defined symbol. So this must mean that the
 
                        // error was caused by it being defined.
 
                        debug_assert_eq!(definition.defined_in_module, module.root_id);
 

	
 
                        return (
 
                            format!("the type '{}' defined here", symbol.name.as_str()),
 
                            Some(definition.identifier_span)
 
                        )
 
                    }
 
                }
 
            }
 
        }
 
    };
 

	
 
    let (new_symbol_msg, new_symbol_span) = get_symbol_span_and_msg(new_symbol);
 
    let (old_symbol_msg, old_symbol_span) = get_symbol_span_and_msg(old_symbol);
 
    let new_symbol_span = new_symbol_span.unwrap(); // because new symbols cannot be builtin
 

	
 
    match old_symbol_span {
 
        Some(old_symbol_span) => ParseError::new_error_at_span(
 
            &module.source, new_symbol_span, format!("symbol is defined twice: {}", new_symbol_msg)
 
        ).with_info_at_span(
 
            &module.source, old_symbol_span, format!("it conflicts with {}", old_symbol_msg)
 
        ),
 
        None => ParseError::new_error_at_span(
 
            &module.source, new_symbol_span,
 
            format!("symbol is defined twice: {} conflicts with {}", new_symbol_msg, old_symbol_msg)
 
        )
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/tokens.rs
Show inline comments
 
use crate::protocol::input_source2::{
 
    InputPosition2 as InputPosition,
 
use crate::protocol::input_source::{
 
    InputPosition as InputPosition,
 
    InputSpan
 
};
 

	
 
/// Represents a particular kind of token. Some tokens represent
 
/// variable-character tokens. Such a token is always followed by a
 
/// `TokenKind::SpanEnd` token.
 
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
 
pub(crate) enum TokenKind {
 
    // Variable-character tokens, followed by a SpanEnd token
 
    Ident,          // regular identifier
 
    Pragma,         // identifier with prefixed `#`, range includes `#`
 
    Integer,        // integer literal
 
    String,         // string literal, range includes `"`
 
    Character,      // character literal, range includes `'`
 
    LineComment,    // line comment, range includes leading `//`, but not newline
 
    BlockComment,   // block comment, range includes leading `/*` and trailing `*/`
 
    // Punctuation (single character)
 
    Exclamation,    // !
 
    Question,       // ?
 
    Pound,          // #
 
    OpenAngle,      // <
 
    OpenCurly,      // {
 
    OpenParen,      // (
 
    OpenSquare,     // [
 
    CloseAngle,     // >
 
    CloseCurly,     // }
 
    CloseParen,     // )
 
    CloseSquare,    // ]
 
    Colon,          // :
 
    Comma,          // ,
 
    Dot,            // .
 
    SemiColon,      // ;
 
    Quote,          // '
 
    DoubleQuote,    // "
 
    // Operator-like (single character)
 
    At,             // @
 
    Plus,           // +
 
    Minus,          // -
 
    Star,           // *
 
    Slash,          // /
 
    Percent,        // %
 
    Caret,          // ^
 
    And,            // &
 
    Or,             // |
 
    Tilde,          // ~
 
    Equal,          // =
 
    // Punctuation (two characters)
 
    ColonColon,     // ::
 
    DotDot,         // ..
 
    ArrowRight,     // ->
 
    // Operator-like (two characters)
 
    PlusPlus,       // ++
 
    PlusEquals,     // +=
 
    MinusMinus,     // --
 
    MinusEquals,    // -=
 
    StarEquals,     // *=
 
    SlashEquals,    // /=
 
    PercentEquals,  // %=
 
    CaretEquals,    // ^=
 
    AndAnd,         // &&
 
    AndEquals,      // &=
 
    OrOr,           // ||
 
    OrEquals,       // |=
 
    EqualEqual,     // ==
 
    NotEqual,       // !=
 
    ShiftLeft,      // <<
 
    LessEquals,     // <=
 
    ShiftRight,     // >>
 
    GreaterEquals,  // >=
 
    // Operator-like (three characters)
 
    ShiftLeftEquals,// <<=
 
    ShiftRightEquals, // >>=
 
    // Special marker token to indicate end of variable-character tokens
 
    SpanEnd,
 
}
 

	
 
impl TokenKind {
 
    /// Returns true if the next expected token is the special `TokenKind::SpanEnd` token. This is
 
    /// the case for tokens of variable length (e.g. an identifier).
 
    fn has_span_end(&self) -> bool {
 
        return *self <= TokenKind::BlockComment
 
    }
 

	
 
    /// Returns the number of characters associated with the token. May only be called on tokens
 
    /// that do not have a variable length.
 
    fn num_characters(&self) -> u32 {
 
        debug_assert!(!self.has_span_end() && *self != TokenKind::SpanEnd);
 
        if *self <= TokenKind::Equal {
 
            1
 
        } else if *self <= TokenKind::ShiftRight {
 
            2
 
        } else {
 
            3
 
        }
 
    }
 

	
 
    /// Returns the characters that are represented by the token, may only be called on tokens that
 
    /// do not have a variable length.
 
    pub fn token_chars(&self) -> &'static str {
 
        debug_assert!(!self.has_span_end() && *self != TokenKind::SpanEnd);
 
        use TokenKind as TK;
 
        match self {
 
            TK::Exclamation => "!",
 
            TK::Question => "?",
 
            TK::Pound => "#",
 
            TK::OpenAngle => "<",
 
            TK::OpenCurly => "{",
 
            TK::OpenParen => "(",
 
            TK::OpenSquare => "[",
 
            TK::CloseAngle => ">",
 
            TK::CloseCurly => "}",
 
            TK::CloseParen => ")",
 
            TK::CloseSquare => "]",
 
            TK::Colon => ":",
 
            TK::Comma => ",",
 
            TK::Dot => ".",
 
            TK::SemiColon => ";",
 
            TK::Quote => "'",
 
            TK::DoubleQuote => "\"",
 
            TK::At => "@",
 
            TK::Plus => "+",
 
            TK::Minus => "-",
 
            TK::Star => "*",
 
            TK::Slash => "/",
 
            TK::Percent => "%",
 
            TK::Caret => "^",
 
            TK::And => "&",
 
            TK::Or => "|",
 
            TK::Tilde => "~",
 
            TK::Equal => "=",
 
            TK::ColonColon => "::",
 
            TK::DotDot => "..",
 
            TK::ArrowRight => "->",
 
            TK::PlusPlus => "++",
 
            TK::PlusEquals => "+=",
 
            TK::MinusMinus => "--",
 
            TK::MinusEquals => "-=",
 
            TK::StarEquals => "*=",
 
            TK::SlashEquals => "/=",
 
            TK::PercentEquals => "%=",
 
            TK::CaretEquals => "^=",
 
            TK::AndAnd => "&&",
 
            TK::AndEquals => "&=",
 
            TK::OrOr => "||",
 
            TK::OrEquals => "|=",
 
            TK::EqualEqual => "==",
 
            TK::NotEqual => "!=",
 
            TK::ShiftLeft => "<<",
 
            TK::LessEquals => "<=",
 
            TK::ShiftRight => ">>",
 
            TK::GreaterEquals => ">=",
 
            TK::ShiftLeftEquals => "<<=",
 
            TK::ShiftRightEquals => ">>=",
 
            // Lets keep these in explicitly for now, in case we want to add more symbols
 
            TK::Ident | TK::Pragma | TK::Integer | TK::String | TK::Character |
 
            TK::LineComment | TK::BlockComment | TK::SpanEnd => unreachable!(),
 
        }
 
    }
 
}
 

	
 
/// Represents a single token at a particular position.
 
pub(crate) struct Token {
 
    pub kind: TokenKind,
 
    pub pos: InputPosition,
 
}
 

	
 
impl Token {
 
    pub(crate) fn new(kind: TokenKind, pos: InputPosition) -> Self {
 
        Self{ kind, pos }
 
    }
 
}
 

	
 
/// The kind of token ranges that are specially parsed by the tokenizer.
 
#[derive(Debug, PartialEq, Eq)]
 
pub(crate) enum TokenRangeKind {
 
    Module,
 
    Pragma,
 
    Import,
 
    Definition,
 
    Code,
 
}
 

	
 
/// A range of tokens with a specific meaning. Such a range is part of a tree
 
/// where each parent tree envelops all of its children.
 
#[derive(Debug)]
 
pub(crate) struct TokenRange {
 
    // Index of parent in `TokenBuffer.ranges`, does not have a parent if the
 
    // range kind is Module, in that case the parent index points to itself.
 
    pub parent_idx: usize,
 
    pub range_kind: TokenRangeKind,
 
    pub curly_depth: u32,
 
    // Offsets into `TokenBuffer.ranges`: the tokens belonging to this range.
 
    pub start: u32,             // first token (inclusive index)
 
    pub end: u32,               // last token (exclusive index)
 
    // Child ranges
 
    pub num_child_ranges: u32,  // Number of subranges
 
    pub first_child_idx: u32,   // First subrange (or points to self if no subranges)
 
    pub last_child_idx: u32,    // Last subrange (or points to self if no subranges)
 
    pub next_sibling_idx: Option<u32>,
 
}
 

	
 
pub(crate) struct TokenBuffer {
 
    pub tokens: Vec<Token>,
 
    pub ranges: Vec<TokenRange>,
 
}
 

	
 
impl TokenBuffer {
 
    pub(crate) fn new() -> Self {
 
        Self{ tokens: Vec::new(), ranges: Vec::new() }
 
    }
 

	
 
    pub(crate) fn iter_range<'a>(&'a self, range: &TokenRange) -> TokenIter<'a> {
 
        TokenIter::new(self, range.start as usize, range.end as usize)
 
    }
 

	
 
    pub(crate) fn start_pos(&self, range: &TokenRange) -> InputPosition {
 
        self.tokens[range.start].pos
 
        self.tokens[range.start as usize].pos
 
    }
 

	
 
    pub(crate) fn end_pos(&self, range: &TokenRange) -> InputPosition {
 
        let last_token = &self.tokens[range.end - 1];
 
        let last_token = &self.tokens[range.end as usize - 1];
 
        if last_token.kind == TokenKind::SpanEnd {
 
            return last_token.pos
 
        } else {
 
            debug_assert!(!last_token.kind.has_span_end());
 
            return last_token.pos.with_offset(last_token.kind.num_characters());
 
        }
 
    }
 
}
 

	
 
/// Iterator over tokens within a specific `TokenRange`.
 
pub(crate) struct TokenIter<'a> {
 
    tokens: &'a Vec<Token>,
 
    cur: usize,
 
    end: usize,
 
}
 

	
 
impl<'a> TokenIter<'a> {
 
    fn new(buffer: &'a TokenBuffer, start: usize, end: usize) -> Self {
 
        Self{ tokens: &buffer.tokens, cur: start, end }
 
    }
 

	
 
    /// Returns the next token (may include comments), or `None` if at the end
 
    /// of the range.
 
    pub(crate) fn next_including_comments(&self) -> Option<TokenKind> {
 
        if self.cur >= self.end {
 
            return None;
 
        }
 

	
 
        let token = &self.tokens[self.cur];
 
        Some(token.kind)
 
    }
 

	
 
    /// Returns the next token (but skips over comments), or `None` if at the
 
    /// end of the range
 
    pub(crate) fn next(&mut self) -> Option<TokenKind> {
 
        while let Some(token_kind) = self.next_including_comments() {
 
            if token_kind != TokenKind::LineComment && token_kind != TokenKind::BlockComment {
 
                return Some(token_kind);
 
            }
 
            self.consume();
 
        }
 

	
 
        return None
 
    }
 

	
 
    /// Peeks ahead by one token (i.e. the one that comes after `next()`), and
 
    /// skips over comments
 
    pub(crate) fn peek(&self) -> Option<TokenKind> {
 
        for next_idx in self.cur + 1..self.end {
 
            let next_kind = self.tokens[next_idx].kind;
 
            if next_kind != TokenKind::LineComment && next_kind != TokenKind::BlockComment && next_kind != TokenKind::SpanEnd {
 
                return Some(next_kind);
 
            }
 
        }
 

	
 
        return None;
 
    }
 

	
 
    /// Returns the start position belonging to the token returned by `next`. If
 
    /// there is not a next token, then we return the end position of the
 
    /// previous token.
 
    pub(crate) fn last_valid_pos(&self) -> InputPosition {
 
        if self.cur < self.end {
 
            // Return token position
 
            return self.tokens[self.cur].pos
 
        }
 

	
 
        // Return previous token end
 
        let token = &self.tokens[self.cur - 1];
 
        return if token.kind == TokenKind::SpanEnd {
 
            token.pos
 
        } else {
 
            token.pos.with_offset(token.kind.num_characters());
 
            token.pos.with_offset(token.kind.num_characters())
 
        };
 
    }
 

	
 
    /// Returns the token range belonging to the token returned by `next`. This
 
    /// assumes that we're not at the end of the range we're iterating over.
 
    /// TODO: @cleanup Phase out?
 
    pub(crate) fn next_positions(&self) -> (InputPosition, InputPosition) {
 
        debug_assert!(self.cur < self.end);
 
        let token = &self.tokens[self.cur];
 
        if token.kind.has_span_end() {
 
            let span_end = &self.tokens[self.cur + 1];
 
            debug_assert_eq!(span_end.kind, TokenKind::SpanEnd);
 
            (token.pos, span_end.pos)
 
        } else {
 
            let offset = token.kind.num_characters();
 
            (token.pos, token.pos.with_offset(offset))
 
        }
 
    }
 

	
 
    /// See `next_positions`
 
    pub(crate) fn next_span(&self) -> InputSpan {
 
        let (begin, end) = self.next_positions();
 
        return InputSpan::from_positions(begin, end)
 
    }
 

	
 
    /// Advances the iterator to the next (meaningful) token.
 
    pub(crate) fn consume(&mut self) {
 
        if let Some(kind) = self.next() {
 
            if kind.has_span_end() {
 
                self.cur += 2;
 
            } else {
 
                self.cur += 1;
 
            }
 
        }
 
    }
 

	
 
    /// Saves the current iteration position, may be passed to `load` to return
 
    /// the iterator to a previous position.
 
    pub(crate) fn save(&self) -> (usize, usize) {
 
        (self.cur, self.end)
 
    }
 

	
 
    pub(crate) fn load(&mut self, saved: (usize, usize)) {
 
        self.cur = saved.0;
 
        self.end = saved.1;
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/type_table.rs
Show inline comments
 
use std::fmt::{Formatter, Result as FmtResult};
 
use std::collections::{HashMap, VecDeque};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::parser::symbol_table2::{SymbolTable, Symbol, SymbolScope};
 
use crate::protocol::input_source2::{InputSource2 as InputSource, ParseError};
 
use crate::protocol::parser::symbol_table::SymbolScope;
 
use crate::protocol::input_source::ParseError;
 
use crate::protocol::parser::*;
 

	
 
//------------------------------------------------------------------------------
 
// Defined Types
 
//------------------------------------------------------------------------------
 

	
 
#[derive(Copy, Clone, PartialEq, Eq)]
 
pub enum TypeClass {
 
    Enum,
 
    Union,
 
    Struct,
 
    Function,
 
    Component
 
}
 

	
 
impl TypeClass {
 
    pub(crate) fn display_name(&self) -> &'static str {
 
        match self {
 
            TypeClass::Enum => "enum",
 
            TypeClass::Union => "union",
 
            TypeClass::Struct => "struct",
 
            TypeClass::Function => "function",
 
            TypeClass::Component => "component",
 
        }
 
    }
 

	
 
    pub(crate) fn is_data_type(&self) -> bool {
 
        *self == TypeClass::Enum || *self == TypeClass::Union || *self == TypeClass::Struct
 
    }
 

	
 
    pub(crate) fn is_proc_type(&self) -> bool {
 
        *self == TypeClass::Function || *self == TypeClass::Component
 
    }
 
}
 

	
 
impl std::fmt::Display for TypeClass {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
 
        write!(f, "{}", self.display_name())
 
    }
 
}
 

	
 
/// Struct wrapping around a potentially polymorphic type. If the type does not
 
/// have any polymorphic arguments then it will not have any monomorphs and
 
/// `is_polymorph` will be set to `false`. A type with polymorphic arguments
 
/// only has `is_polymorph` set to `true` if the polymorphic arguments actually
 
/// appear in the types associated types (function return argument, struct
 
/// field, enum variant, etc.). Otherwise the polymorphic argument is just a
 
/// marker and does not influence the bytesize of the type.
 
pub struct DefinedType {
 
    pub(crate) ast_root: RootId,
 
    pub(crate) ast_definition: DefinitionId,
 
    pub(crate) definition: DefinedTypeVariant,
 
    pub(crate) poly_vars: Vec<PolymorphicVariable>,
 
    pub(crate) is_polymorph: bool,
 
    pub(crate) is_pointerlike: bool,
 
    // TODO: @optimize
 
    pub(crate) monomorphs: Vec<Vec<ConcreteType>>,
 
}
 

	
 
impl DefinedType {
 
    fn add_monomorph(&mut self, types: Vec<ConcreteType>) {
 
        debug_assert!(!self.has_monomorph(&types), "monomorph already exists");
 
        self.monomorphs.push(types);
 
    }
 

	
 
    pub(crate) fn has_any_monomorph(&self) -> bool {
 
        !self.monomorphs.is_empty()
 
    }
 

	
 
    pub(crate) fn has_monomorph(&self, types: &Vec<ConcreteType>) -> bool {
 
        debug_assert_eq!(self.poly_vars.len(), types.len(), "mismatch in number of polymorphic types");
 
        for monomorph in &self.monomorphs {
 
            if monomorph == types { return true; }
 
        }
 

	
 
        return false;
 
    }
 
}
 

	
 
pub enum DefinedTypeVariant {
 
    Enum(EnumType),
 
    Union(UnionType),
 
    Struct(StructType),
 
    Function(FunctionType),
 
    Component(ComponentType)
 
}
 

	
 
impl DefinedTypeVariant {
 
    pub(crate) fn type_class(&self) -> TypeClass {
 
        match self {
 
            DefinedTypeVariant::Enum(_) => TypeClass::Enum,
 
            DefinedTypeVariant::Union(_) => TypeClass::Union,
 
            DefinedTypeVariant::Struct(_) => TypeClass::Struct,
 
            DefinedTypeVariant::Function(_) => TypeClass::Function,
 
            DefinedTypeVariant::Component(_) => TypeClass::Component
 
        }
 
    }
 

	
 
    pub(crate) fn as_struct(&self) -> &StructType {
 
        match self {
 
            DefinedTypeVariant::Struct(v) => v,
 
            _ => unreachable!("Cannot convert {} to struct variant", self.type_class())
 
        }
 
    }
 

	
 
    pub(crate) fn as_enum(&self) -> &EnumType {
 
        match self {
 
            DefinedTypeVariant::Enum(v) => v,
 
            _ => unreachable!("Cannot convert {} to enum variant", self.type_class())
 
        }
 
    }
 

	
 
    pub(crate) fn as_union(&self) -> &UnionType {
 
        match self {
 
            DefinedTypeVariant::Union(v) => v,
 
            _ => unreachable!("Cannot convert {} to union variant", self.type_class())
 
        }
 
    }
 
}
 

	
 
struct PolymorphicVariable {
 
    identifier: Identifier,
 
    is_in_use: bool, // a polymorphic argument may be defined, but not used by the type definition
 
}
 

	
 
/// `EnumType` is the classical C/C++ enum type. It has various variants with
 
/// an assigned integer value. The integer values may be user-defined,
 
/// compiler-defined, or a mix of the two. If a user assigns the same enum
 
/// value multiple times, we assume the user is an expert and we consider both
 
/// variants to be equal to one another.
 
pub struct EnumType {
 
    pub(crate) variants: Vec<EnumVariant>,
 
    pub(crate) representation: PrimitiveType,
 
}
 

	
 
// TODO: Also support maximum u64 value
 
pub struct EnumVariant {
 
    pub(crate) identifier: Identifier,
 
    pub(crate) value: i64,
 
}
 

	
 
/// `UnionType` is the algebraic datatype (or sum type, or discriminated union).
 
/// A value is an element of the union, identified by its tag, and may contain
 
/// a single subtype.
 
pub struct UnionType {
 
    pub(crate) variants: Vec<UnionVariant>,
 
    pub(crate) tag_representation: PrimitiveType
 
}
 

	
 
pub struct UnionVariant {
 
    pub(crate) identifier: Identifier,
 
    pub(crate) embedded: Vec<ParserType>, // zero-length does not have embedded values
 
    pub(crate) tag_value: i64,
 
}
 

	
 
pub struct StructType {
 
    pub(crate) fields: Vec<StructField>,
 
}
 

	
 
pub struct StructField {
 
    pub(crate) identifier: Identifier,
 
    pub(crate) parser_type: ParserType,
 
}
 

	
 
pub struct FunctionType {
 
    pub return_types: Vec<ParserType>,
 
    pub arguments: Vec<FunctionArgument>
 
}
 

	
 
pub struct ComponentType {
 
    pub variant: ComponentVariant,
 
    pub arguments: Vec<FunctionArgument>
 
}
 

	
 
pub struct FunctionArgument {
 
    identifier: Identifier,
 
    parser_type: ParserType,
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Type table
 
//------------------------------------------------------------------------------
 

	
 
// TODO: @cleanup Do I really need this, doesn't make the code that much cleaner
 
struct TypeIterator {
 
    breadcrumbs: Vec<(RootId, DefinitionId)>
 
}
 

	
 
impl TypeIterator {
 
    fn new() -> Self {
 
        Self{ breadcrumbs: Vec::with_capacity(32) }
 
    }
 

	
 
    fn reset(&mut self, root_id: RootId, definition_id: DefinitionId) {
 
        self.breadcrumbs.clear();
 
        self.breadcrumbs.push((root_id, definition_id))
 
    }
 

	
 
    fn push(&mut self, root_id: RootId, definition_id: DefinitionId) {
 
        self.breadcrumbs.push((root_id, definition_id));
 
    }
 

	
 
    fn contains(&self, root_id: RootId, definition_id: DefinitionId) -> bool {
 
        for (stored_root_id, stored_definition_id) in self.breadcrumbs.iter() {
 
            if *stored_root_id == root_id && *stored_definition_id == definition_id { return true; }
 
        }
 

	
 
        return false
 
    }
 

	
 
    fn top(&self) -> Option<(RootId, DefinitionId)> {
 
        self.breadcrumbs.last().map(|(r, d)| (*r, *d))
 
    }
 

	
 
    fn pop(&mut self) {
 
        debug_assert!(!self.breadcrumbs.is_empty());
 
        self.breadcrumbs.pop();
 
    }
 
}
 

	
 
/// Result from attempting to resolve a `ParserType` using the symbol table and
 
/// the type table.
 
enum ResolveResult {
 
    Builtin,
 
    PolymoprhicArgument,
 
    /// ParserType points to a user-defined type that is already resolved in the
 
    /// type table.
 
    Resolved(RootId, DefinitionId),
 
    /// ParserType points to a user-defined type that is not yet resolved into
 
    /// the type table.
 
    Unresolved(RootId, DefinitionId)
 
}
 

	
 
pub(crate) struct TypeTable {
 
    /// Lookup from AST DefinitionId to a defined type. Considering possible
 
    /// polymorphs is done inside the `DefinedType` struct.
 
    lookup: HashMap<DefinitionId, DefinedType>,
 
    /// Iterator over `(module, definition)` tuples used as workspace to make sure
 
    /// that each base definition of all a type's subtypes are resolved.
 
    iter: TypeIterator,
 
    /// Iterator over `parser type`s during the process where `parser types` are
 
    /// resolved into a `(module, definition)` tuple.
 
    parser_type_iter: VecDeque<ParserTypeId>,
 
}
 

	
 
pub(crate) struct TypeCtx<'a> {
 
    symbols: &'a SymbolTable,
 
    heap: &'a mut Heap,
 
    modules: &'a [Module]
 
}
 

	
 
impl<'a> TypeCtx<'a> {
 
    pub(crate) fn new(symbols: &'a SymbolTable, heap: &'a mut Heap, modules: &'a [Module]) -> Self {
 
        Self{ symbols, heap, modules }
 
    }
 
}
 

	
 
impl TypeTable {
 
    /// Construct a new type table without any resolved types.
 
    pub(crate) fn new() -> Self {
 
        Self{ 
 
            lookup: HashMap::new(), 
 
            iter: TypeIterator::new(), 
 
            parser_type_iter: VecDeque::with_capacity(64), 
 
        }
 
    }
 

	
 
    pub(crate) fn build_base_types(&mut self, ctx: &mut TypeCtx) -> Result<(), ParseError> {
 
    pub(crate) fn build_base_types(&mut self, modules: &mut [Module], ctx: &mut PassCtx) -> Result<(), ParseError> {
 
        // Make sure we're allowed to cast root_id to index into ctx.modules
 
        debug_assert!(modules.iter().all(|m| m.phase >= ModuleCompilationPhase::DefinitionsParsed));
 
        debug_assert!(self.lookup.is_empty());
 
        debug_assert!(self.iter.top().is_none());
 
        debug_assert!(self.parser_type_iter.is_empty());
 

	
 
        if cfg!(debug_assertions) {
 
            for (index, module) in ctx.modules.iter().enumerate() {
 
            for (index, module) in modules.iter().enumerate() {
 
                debug_assert_eq!(index, module.root_id.index as usize);
 
            }
 
        }
 

	
 
        // Use context to guess hashmap size
 
        let reserve_size = ctx.heap.definitions.len();
 
        self.lookup.reserve(reserve_size);
 

	
 
        for root_idx in 0..ctx.modules.len() {
 
            let last_definition_idx = ctx.heap[ctx.modules[root_idx].root_id].definitions.len();
 
        for root_idx in 0..modules.len() {
 
            let last_definition_idx = ctx.heap[modules[root_idx].root_id].definitions.len();
 
            for definition_idx in 0..last_definition_idx {
 
                let definition_id = ctx.heap[ctx.modules[root_idx].root_id].definitions[definition_idx];
 
                self.resolve_base_definition(ctx, definition_id)?;
 
                let definition_id = ctx.heap[modules[root_idx].root_id].definitions[definition_idx];
 
                self.resolve_base_definition(modules, ctx, definition_id)?;
 
            }
 
        }
 

	
 
        debug_assert_eq!(self.lookup.len(), reserve_size, "mismatch in reserved size of type table");
 
        for module in modules {
 
            module.phase = ModuleCompilationPhase::TypesAddedToTable;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    /// Retrieves base definition from type table. We must be able to retrieve
 
    /// it as we resolve all base types upon type table construction (for now).
 
    /// However, in the future we might do on-demand type resolving, so return
 
    /// an option anyway
 
    pub(crate) fn get_base_definition(&self, definition_id: &DefinitionId) -> Option<&DefinedType> {
 
        self.lookup.get(&definition_id)
 
    }
 

	
 
    /// Instantiates a monomorph for a given base definition.
 
    pub(crate) fn add_monomorph(&mut self, definition_id: &DefinitionId, types: Vec<ConcreteType>) {
 
        debug_assert!(
 
            self.lookup.contains_key(definition_id),
 
            "attempting to instantiate monomorph of definition unknown to type table"
 
        );
 

	
 
        let definition = self.lookup.get_mut(definition_id).unwrap();
 
        definition.add_monomorph(types);
 
    }
 

	
 
    /// Checks if a given definition already has a specific monomorph
 
    pub(crate) fn has_monomorph(&mut self, definition_id: &DefinitionId, types: &Vec<ConcreteType>) -> bool {
 
        debug_assert!(
 
            self.lookup.contains_key(definition_id),
 
            "attempting to check monomorph existence of definition unknown to type table"
 
        );
 

	
 
        let definition = self.lookup.get(definition_id).unwrap();
 
        definition.has_monomorph(types)
 
    }
 

	
 
    /// This function will resolve just the basic definition of the type, it
 
    /// will not handle any of the monomorphized instances of the type.
 
    fn resolve_base_definition<'a>(&'a mut self, ctx: &mut TypeCtx, definition_id: DefinitionId) -> Result<(), ParseError> {
 
    fn resolve_base_definition<'a>(&'a mut self, modules: &[Module], ctx: &mut PassCtx, definition_id: DefinitionId) -> Result<(), ParseError> {
 
        // Check if we have already resolved the base definition
 
        if self.lookup.contains_key(&definition_id) { return Ok(()); }
 

	
 
        let root_id = Self::find_root_id(ctx, definition_id);
 
        let root_id = ctx.heap[definition_id].defined_in();
 
        self.iter.reset(root_id, definition_id);
 

	
 
        while let Some((root_id, definition_id)) = self.iter.top() {
 
            // We have a type to resolve
 
            let definition = &ctx.heap[definition_id];
 

	
 
            let can_pop_breadcrumb = match definition {
 
                // TODO: @cleanup Borrow rules hax
 
                Definition::Enum(_) => self.resolve_base_enum_definition(ctx, root_id, definition_id),
 
                Definition::Union(_) => self.resolve_base_union_definition(ctx, root_id, definition_id),
 
                Definition::Struct(_) => self.resolve_base_struct_definition(ctx, root_id, definition_id),
 
                Definition::Component(_) => self.resolve_base_component_definition(ctx, root_id, definition_id),
 
                Definition::Function(_) => self.resolve_base_function_definition(ctx, root_id, definition_id),
 
                Definition::Enum(_) => self.resolve_base_enum_definition(modules, ctx, root_id, definition_id),
 
                Definition::Union(_) => self.resolve_base_union_definition(modules, ctx, root_id, definition_id),
 
                Definition::Struct(_) => self.resolve_base_struct_definition(modules, ctx, root_id, definition_id),
 
                Definition::Component(_) => self.resolve_base_component_definition(modules, ctx, root_id, definition_id),
 
                Definition::Function(_) => self.resolve_base_function_definition(modules, ctx, root_id, definition_id),
 
            }?;
 

	
 
            // Otherwise: `ingest_resolve_result` has pushed a new breadcrumb
 
            // that we must follow before we can resolve the current type
 
            if can_pop_breadcrumb {
 
                self.iter.pop();
 
            }
 
        }
 

	
 
        // We must have resolved the type
 
        debug_assert!(self.lookup.contains_key(&definition_id), "base type not resolved");
 
        Ok(())
 
    }
 

	
 
    /// Resolve the basic enum definition to an entry in the type table. It will
 
    /// not instantiate any monomorphized instances of polymorphic enum
 
    /// definitions. If a subtype has to be resolved first then this function
 
    /// will return `false` after calling `ingest_resolve_result`.
 
    fn resolve_base_enum_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
    fn resolve_base_enum_definition(&mut self, modules: &[Module], ctx: &mut PassCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_enum());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base enum already resolved");
 
        
 
        let definition = ctx.heap[definition_id].as_enum();
 

	
 
        let mut enum_value = -1;
 
        let mut min_enum_value = 0;
 
        let mut max_enum_value = 0;
 
        let mut variants = Vec::with_capacity(definition.variants.len());
 
        for variant in &definition.variants {
 
            enum_value += 1;
 
            match &variant.value {
 
                EnumVariantValue::None => {
 
                    variants.push(EnumVariant{
 
                        identifier: variant.identifier.clone(),
 
                        value: enum_value,
 
                    });
 
                },
 
                EnumVariantValue::Integer(override_value) => {
 
                    enum_value = *override_value;
 
                    variants.push(EnumVariant{
 
                        identifier: variant.identifier.clone(),
 
                        value: enum_value,
 
                    });
 
                }
 
            }
 
            if enum_value < min_enum_value { min_enum_value = enum_value; }
 
            else if enum_value > max_enum_value { max_enum_value = enum_value; }
 
        }
 

	
 
        // Ensure enum names and polymorphic args do not conflict
 
        self.check_identifier_collision(
 
            ctx, root_id, &variants, |variant| &variant.identifier, "enum variant"
 
            modules, root_id, &variants, |variant| &variant.identifier, "enum variant"
 
        )?;
 

	
 
        // Because we're parsing an enum, the programmer cannot put the
 
        // polymorphic variables inside the variants. But the polymorphic
 
        // variables might still be present as "marker types"
 
        self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 
        self.check_poly_args_collision(modules, ctx, root_id, &definition.poly_vars)?;
 
        let poly_vars = Self::create_polymorphic_variables(&definition.poly_vars);
 

	
 
        self.lookup.insert(definition_id, DefinedType {
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Enum(EnumType{
 
                variants,
 
                representation: Self::enum_tag_type(min_enum_value, max_enum_value)
 
            }),
 
            poly_vars,
 
            is_polymorph: false,
 
            is_pointerlike: false,
 
            monomorphs: Vec::new()
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic union definiton to an entry in the type table. It
 
    /// will not instantiate any monomorphized instances of polymorphic union
 
    /// definitions. If a subtype has to be resolved first then this function
 
    /// will return `false` after calling `ingest_resolve_result`.
 
    fn resolve_base_union_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
    fn resolve_base_union_definition(&mut self, modules: &[Module], ctx: &mut PassCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_union());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base union already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_union();
 

	
 
        // Make sure all embedded types are resolved
 
        for variant in &definition.variants {
 
            match &variant.value {
 
                UnionVariantValue::None => {},
 
                UnionVariantValue::Embedded(embedded) => {
 
                    for parser_type in embedded {
 
                        let resolve_result = self.resolve_base_parser_type(ctx, root_id, parser_type)?;
 
                        if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                        let resolve_result = self.resolve_base_parser_type(modules, ctx, root_id, parser_type)?;
 
                        if !self.ingest_resolve_result(modules, ctx, resolve_result)? {
 
                            return Ok(false)
 
                        }
 
                    }
 
                }
 
            }
 
        }
 

	
 
        // If here then all embedded types are resolved
 

	
 
        // Determine the union variants
 
        let mut tag_value = -1;
 
        let mut variants = Vec::with_capacity(definition.variants.len());
 
        for variant in &definition.variants {
 
            tag_value += 1;
 
            let embedded = match &variant.value {
 
                UnionVariantValue::None => { Vec::new() },
 
                UnionVariantValue::Embedded(embedded) => {
 
                    // Type should be resolvable, we checked this above
 
                    embedded.clone()
 
                },
 
            };
 

	
 
            variants.push(UnionVariant{
 
                identifier: variant.identifier.clone(),
 
                embedded,
 
                tag_value,
 
            })
 
        }
 

	
 
        // Ensure union names and polymorphic args do not conflict
 
        self.check_identifier_collision(
 
            ctx, root_id, &variants, |variant| &variant.identifier, "union variant"
 
            modules, root_id, &variants, |variant| &variant.identifier, "union variant"
 
        )?;
 
        self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 
        self.check_poly_args_collision(modules, ctx, root_id, &definition.poly_vars)?;
 

	
 
        // Construct polymorphic variables and mark the ones that are in use
 
        let mut poly_vars = Self::create_polymorphic_variables(&definition.poly_vars);
 
        for variant in &variants {
 
            for parser_type in &variant.embedded {
 
                Self::mark_used_polymorphic_variables(&mut poly_vars, parser_type);
 
            }
 
        }
 
        let is_polymorph = poly_args.iter().any(|arg| arg.is_in_use);
 
        let is_polymorph = poly_vars.iter().any(|arg| arg.is_in_use);
 

	
 
        // Insert base definition in type table
 
        self.lookup.insert(definition_id, DefinedType {
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Union(UnionType{
 
                variants,
 
                tag_representation: Self::enum_tag_type(-1, tag_value),
 
            }),
 
            poly_vars: poly_args,
 
            poly_vars,
 
            is_polymorph,
 
            is_pointerlike: false, // TODO: @cyclic_types
 
            monomorphs: Vec::new()
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic struct definition to an entry in the type table. It
 
    /// will not instantiate any monomorphized instances of polymorphic struct
 
    /// definitions.
 
    fn resolve_base_struct_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
    fn resolve_base_struct_definition(&mut self, modules: &[Module], ctx: &mut PassCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_struct());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base struct already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_struct();
 

	
 
        // Make sure all fields point to resolvable types
 
        for field_definition in &definition.fields {
 
            let resolve_result = self.resolve_base_parser_type(ctx, root_id, &field_definition.parser_type)?;
 
            if !self.ingest_resolve_result(ctx, resolve_result)? {
 
            let resolve_result = self.resolve_base_parser_type(modules, ctx, root_id, &field_definition.parser_type)?;
 
            if !self.ingest_resolve_result(modules, ctx, resolve_result)? {
 
                return Ok(false)
 
            }
 
        }
 

	
 
        // All fields types are resolved, construct base type
 
        let mut fields = Vec::with_capacity(definition.fields.len());
 
        for field_definition in &definition.fields {
 
            fields.push(StructField{
 
                identifier: field_definition.field.clone(),
 
                parser_type: field_definition.parser_type.clone(),
 
            })
 
        }
 

	
 
        // And make sure no conflicts exist in field names and/or polymorphic args
 
        self.check_identifier_collision(
 
            ctx, root_id, &fields, |field| &field.identifier, "struct field"
 
            modules, root_id, &fields, |field| &field.identifier, "struct field"
 
        )?;
 
        self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 
        self.check_poly_args_collision(modules, ctx, root_id, &definition.poly_vars)?;
 

	
 
        // Construct representation of polymorphic arguments
 
        let mut poly_vars = Self::create_polymorphic_variables(&definition.poly_vars);
 
        for field in &fields {
 
            Self::mark_used_polymorphic_variables(&mut poly_vars, &field.parser_type);
 
        }
 

	
 
        let is_polymorph = poly_args.iter().any(|arg| arg.is_in_use);
 
        let is_polymorph = poly_vars.iter().any(|arg| arg.is_in_use);
 

	
 
        self.lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Struct(StructType{
 
                fields,
 
            }),
 
            poly_vars: poly_args,
 
            poly_vars,
 
            is_polymorph,
 
            is_pointerlike: false, // TODO: @cyclic
 
            monomorphs: Vec::new(),
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic function definition to an entry in the type table. It
 
    /// will not instantiate any monomorphized instances of polymorphic function
 
    /// definitions.
 
    fn resolve_base_function_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
    fn resolve_base_function_definition(&mut self, modules: &[Module], ctx: &mut PassCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_function());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base function already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_function();
 

	
 
        // Check the return type
 
        debug_assert_eq!(definition.return_types.len(), 1, "not one return type"); // TODO: @ReturnValues
 
        let resolve_result = self.resolve_base_parser_type(ctx, root_id, &definition.return_types[0])?;
 
        if !self.ingest_resolve_result(ctx, resolve_result)? {
 
        let resolve_result = self.resolve_base_parser_type(modules, ctx, root_id, &definition.return_types[0])?;
 
        if !self.ingest_resolve_result(modules, ctx, resolve_result)? {
 
            return Ok(false)
 
        }
 

	
 
        // Check the argument types
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            let resolve_result = self.resolve_base_parser_type(ctx, root_id, &param.parser_type)?;
 
            if !self.ingest_resolve_result(ctx, resolve_result)? {
 
            let resolve_result = self.resolve_base_parser_type(modules, ctx, root_id, &param.parser_type)?;
 
            if !self.ingest_resolve_result(modules, ctx, resolve_result)? {
 
                return Ok(false)
 
            }
 
        }
 

	
 
        // Construct arguments to function
 
        let mut arguments = Vec::with_capacity(definition.parameters.len());
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            arguments.push(FunctionArgument{
 
                identifier: param.identifier.clone(),
 
                parser_type: param.parser_type.clone(),
 
            })
 
        }
 

	
 
        // Check conflict of argument and polyarg identifiers
 
        self.check_identifier_collision(
 
            ctx, root_id, &arguments, |arg| &arg.identifier, "function argument"
 
            modules, root_id, &arguments, |arg| &arg.identifier, "function argument"
 
        )?;
 
        self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 
        self.check_poly_args_collision(modules, ctx, root_id, &definition.poly_vars)?;
 

	
 
        // Construct polymorphic arguments
 
        let mut poly_vars = Self::create_polymorphic_variables(&definition.poly_vars);
 
        Self::mark_used_polymorphic_variables(&mut poly_vars, &definition.return_types[0]);
 
        for argument in &arguments {
 
            Self::mark_used_polymorphic_variables(&mut poly_vars, &argument.parser_type);
 
        }
 
        let is_polymorph = poly_args.iter().any(|arg| arg.is_in_use);
 
        let is_polymorph = poly_vars.iter().any(|arg| arg.is_in_use);
 

	
 
        // Construct entry in type table
 
        self.lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Function(FunctionType{
 
                return_types: definition.return_types.clone(),
 
                arguments,
 
            }),
 
            poly_vars: poly_args,
 
            poly_vars,
 
            is_polymorph,
 
            is_pointerlike: false, // TODO: @cyclic
 
            monomorphs: Vec::new(),
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic component definition to an entry in the type table.
 
    /// It will not instantiate any monomorphized instancees of polymorphic
 
    /// component definitions.
 
    fn resolve_base_component_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
    fn resolve_base_component_definition(&mut self, modules: &[Module], ctx: &mut PassCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_component());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base component already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_component();
 
        let component_variant = definition.variant;
 

	
 
        // Check argument types
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            let resolve_result = self.resolve_base_parser_type(ctx, root_id, &param.parser_type)?;
 
            if !self.ingest_resolve_result(ctx, resolve_result)? {
 
            let resolve_result = self.resolve_base_parser_type(modules, ctx, root_id, &param.parser_type)?;
 
            if !self.ingest_resolve_result(modules, ctx, resolve_result)? {
 
                return Ok(false)
 
            }
 
        }
 

	
 
        // Construct argument types
 
        let mut arguments = Vec::with_capacity(definition.parameters.len());
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            arguments.push(FunctionArgument{
 
                identifier: param.identifier.clone(),
 
                parser_type: param.parser_type.clone()
 
            })
 
        }
 

	
 
        // Check conflict of argument and polyarg identifiers
 
        self.check_identifier_collision(
 
            ctx, root_id, &arguments, |arg| &arg.identifier, "component argument"
 
            modules, root_id, &arguments, |arg| &arg.identifier, "component argument"
 
        )?;
 
        self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 
        self.check_poly_args_collision(modules, ctx, root_id, &definition.poly_vars)?;
 

	
 
        // Construct polymorphic arguments
 
        let mut poly_vars = Self::create_polymorphic_variables(&definition.poly_vars);
 
        for argument in &arguments {
 
            Self::mark_used_polymorphic_variables(&mut poly_vars, &argument.parser_type)?;
 
            Self::mark_used_polymorphic_variables(&mut poly_vars, &argument.parser_type);
 
        }
 

	
 
        let is_polymorph = poly_vars.iter().any(|v| v.is_in_use);
 

	
 
        // Construct entry in type table
 
        self.lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Component(ComponentType{
 
                variant: component_variant,
 
                arguments,
 
            }),
 
            poly_vars,
 
            is_polymorph,
 
            is_pointerlike: false, // TODO: @cyclic
 
            monomorphs: Vec::new(),
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Takes a ResolveResult and returns `true` if the caller can happily
 
    /// continue resolving its current type, or `false` if the caller must break
 
    /// resolving the current type and exit to the outer resolving loop. In the
 
    /// latter case the `result` value was `ResolveResult::Unresolved`, implying
 
    /// that the type must be resolved first.
 
    fn ingest_resolve_result(&mut self, ctx: &TypeCtx, result: ResolveResult) -> Result<bool, ParseError> {
 
    fn ingest_resolve_result(&mut self, modules: &[Module], ctx: &PassCtx, result: ResolveResult) -> Result<bool, ParseError> {
 
        match result {
 
            ResolveResult::Builtin | ResolveResult::PolymoprhicArgument => Ok(true),
 
            ResolveResult::Resolved(_, _) => Ok(true),
 
            ResolveResult::Unresolved(root_id, definition_id) => {
 
                if self.iter.contains(root_id, definition_id) {
 
                    // Cyclic dependency encountered
 
                    // TODO: Allow this
 
                    let module_source = &ctx.modules[root_id.index as usize].source;
 
                    let module_source = &modules[root_id.index as usize].source;
 
                    let mut error = ParseError::new_error_str_at_span(
 
                        module_source, ctx.heap[definition_id].identifier().span,
 
                        "Evaluating this type definition results in a cyclic type"
 
                    );
 

	
 
                    for (breadcrumb_idx, (root_id, definition_id)) in self.iter.breadcrumbs.iter().enumerate() {
 
                        let msg = if breadcrumb_idx == 0 {
 
                            "The cycle started with this definition"
 
                        } else {
 
                            "Which depends on this definition"
 
                        };
 

	
 
                        let module_source = &ctx.modules[root_id.index as usize].source;
 
                        let module_source = &modules[root_id.index as usize].source;
 
                        error = error.with_info_str_at_span(module_source, ctx.heap[*definition_id].identifier().span, msg);
 
                    }
 

	
 
                    Err(error)
 
                } else {
 
                    // Type is not yet resolved, so push IDs on iterator and
 
                    // continue the resolving loop
 
                    self.iter.push(root_id, definition_id);
 
                    Ok(false)
 
                }
 
            }
 
        }
 
    }
 

	
 
    /// Each type may consist of embedded types. If this type does not have a
 
    /// fixed implementation (e.g. an input port may have an embedded type
 
    /// indicating the type of messages, but it always exists in the runtime as
 
    /// a port identifier, so it has a fixed implementation) then this function
 
    /// will traverse the embedded types to ensure all of them are resolved.
 
    ///
 
    /// Hence if one checks a particular parser type for being resolved, one may
 
    /// get back a result value indicating an embedded type (with a different
 
    /// DefinitionId) is unresolved.
 
    fn resolve_base_parser_type(&mut self, ctx: &TypeCtx, root_id: RootId, parser_type: &ParserType) -> Result<ResolveResult, ParseError> {
 
    fn resolve_base_parser_type(&mut self, modules: &[Module], ctx: &PassCtx, root_id: RootId, parser_type: &ParserType) -> Result<ResolveResult, ParseError> {
 
        // Note that as we iterate over the elements of a
 
        use ParserTypeVariant as PTV;
 

	
 
        // Result for the very first time we resolve a type (i.e the outer type
 
        // that we're actually looking up)
 
        let mut resolve_result = None;
 
        let mut set_resolve_result = |v: ResolveResult| {
 
            if resolve_result.is_none() { resolve_result = Some(v); }
 
        };
 

	
 
        for element in parser_type.elements.iter() {
 
            match element.variant {
 
                PTV::Message | PTV::Bool |
 
                PTV::UInt8 | PTV::UInt16 | PTV::UInt32 | PTV::UInt64 |
 
                PTV::SInt8 | PTV::SInt16 | PTV::SInt32 | PTV::SInt64 |
 
                PTV::Character | PTV::String |
 
                PTV::Array | PTV::Input | PTV::Output => {
 
                    // Nothing to do: these are builtin types or types with a
 
                    // fixed implementation
 
                    set_resolve_result(ResolveResult::Builtin);
 
                },
 
                PTV::IntegerLiteral | PTV::Inferred => {
 
                    // As we're parsing the type definitions where these kinds
 
                    // of types are impossible/disallowed to express:
 
                    unreachable!("illegal ParserTypeVariant within type definition");
 
                },
 
                PTV::PolymorphicArgument(_, _) => {
 
                    set_resolve_result(ResolveResult::PolymoprhicArgument);
 
                },
 
                PTV::Definition(embedded_id, _) => {
 
                    let definition = &ctx.heap[embedded_id];
 
                    if !(definition.is_struct() || definition.is_enum() || definition.is_union()) {
 
                        let module_source = &ctx.modules[root_id.index as usize].source;
 
                        let module_source = &modules[root_id.index as usize].source;
 
                        return Err(ParseError::new_error_str_at_span(
 
                            module_source, element.full_span, "expected a datatype (struct, enum or union)"
 
                        ))
 
                    }
 

	
 
                    if self.lookup.contains_key(&embedded_id) {
 
                        set_resolve_result(ResolveResult::Resolved(definition.defined_in(), embedded_id))
 
                    } else {
 
                        return Ok(ResolveResult::Unresolved(definition.defined_in(), embedded_id))
 
                    }
 
                }
 
            }
 
        }
 

	
 
        // If here then all types in the embedded type's tree were resolved.
 
        debug_assert!(resolve_result.is_some(), "faulty logic in ParserType resolver");
 
        return Ok(resolve_result.unwrap())
 
    }
 

	
 
    /// Go through a list of identifiers and ensure that all identifiers have
 
    /// unique names
 
    fn check_identifier_collision<T: Sized, F: Fn(&T) -> &Identifier>(
 
        &self, ctx: &TypeCtx, root_id: RootId, items: &[T], getter: F, item_name: &'static str
 
        &self, modules: &[Module], root_id: RootId, items: &[T], getter: F, item_name: &'static str
 
    ) -> Result<(), ParseError> {
 
        for (item_idx, item) in items.iter().enumerate() {
 
            let item_ident = getter(item);
 
            for other_item in &items[0..item_idx] {
 
                let other_item_ident = getter(other_item);
 
                if item_ident == other_item_ident {
 
                    let module_source = &ctx.modules[root_id.index as usize].source;
 
                    let module_source = &modules[root_id.index as usize].source;
 
                    return Err(ParseError::new_error_at_span(
 
                        module_source, item_ident.span, format!("This {} is defined more than once", item_name)
 
                    ).with_info_at_span(
 
                        module_source, other_item_ident.span, format!("The other {} is defined here", item_name)
 
                    ));
 
                }
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    /// Go through a list of polymorphic arguments and make sure that the
 
    /// arguments all have unique names, and the arguments do not conflict with
 
    /// any symbols defined at the module scope.
 
    fn check_poly_args_collision(
 
        &self, ctx: &TypeCtx, root_id: RootId, poly_args: &[Identifier]
 
        &self, modules: &[Module], ctx: &PassCtx, root_id: RootId, poly_args: &[Identifier]
 
    ) -> Result<(), ParseError> {
 
        // Make sure polymorphic arguments are unique and none of the
 
        // identifiers conflict with any imported scopes
 
        for (arg_idx, poly_arg) in poly_args.iter().enumerate() {
 
            for other_poly_arg in &poly_args[..arg_idx] {
 
                if poly_arg == other_poly_arg {
 
                    let module_source = &ctx.modules[root_id.index as usize].source;
 
                    let module_source = &modules[root_id.index as usize].source;
 
                    return Err(ParseError::new_error_str_at_span(
 
                        module_source, poly_arg.span,
 
                        "This polymorphic argument is defined more than once"
 
                    ).with_postfixed_info(
 
                    ).with_info_str_at_span(
 
                        module_source, other_poly_arg.span,
 
                        "It conflicts with this polymorphic argument"
 
                    ));
 
                }
 
            }
 

	
 
            // Check if identifier conflicts with a symbol defined or imported
 
            // in the current module
 
            if let Some(symbol) = ctx.symbols.get_symbol_by_name(SymbolScope::Module(root_id), poly_arg.value.as_bytes()) {
 
                // We have a conflict
 
                let module_source = &ctx.modules[root_id.index as usize].source;
 
                let module_source = &modules[root_id.index as usize].source;
 
                let introduction_span = symbol.variant.span_of_introduction(ctx.heap);
 
                return Err(ParseError::new_error_str_at_span(
 
                    module_source, poly_arg.span,
 
                    "This polymorphic argument conflicts with another symbol"
 
                ).with_info_str_at_span(
 
                    module_source, introduction_span,
 
                    "It conflicts due to this symbol"
 
                ));
 
            }
 
        }
 

	
 
        // All arguments are fine
 
        Ok(())
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Small utilities
 
    //--------------------------------------------------------------------------
 

	
 
    fn create_polymorphic_variables(variables: &[Identifier]) -> Vec<PolymorphicVariable> {
 
        let mut result = Vec::with_capacity(variables.len());
 
        for variable in variables.iter() {
 
            result.push(PolymorphicVariable{ identifier: variable.clone(), is_in_use: false });
 
        }
 

	
 
        result
 
    }
 

	
 
    fn mark_used_polymorphic_variables(poly_vars: &mut Vec<PolymorphicVariable>, parser_type: &ParserType) {
 
        for element in & parser_type.elements {
 
            if let ParserTypeVariant::PolymorphicArgument(_, idx) = element {
 
            if let ParserTypeVariant::PolymorphicArgument(_, idx) = &element.variant {
 
                poly_vars[*idx].is_in_use = true;
 
            }
 
        }
 
    }
 

	
 
    fn enum_tag_type(min_tag_value: i64, max_tag_value: i64) -> PrimitiveType {
 
        // TODO: @consistency tag values should be handled correctly
 
        debug_assert!(min_tag_value <= max_tag_value);
 
        let abs_max_value = min_tag_value.abs().max(max_tag_value.abs());
 
        if abs_max_value <= u8::max_value() as i64 {
 
            PrimitiveType::Byte
 
        } else if abs_max_value <= u16::max_value() as i64 {
 
            PrimitiveType::Short
 
        } else if abs_max_value <= u32::max_value() as i64 {
 
            PrimitiveType::Int
 
        } else {
 
            PrimitiveType::Long
 
        }
 
    }
 

	
 
    fn find_root_id(ctx: &TypeCtx, definition_id: DefinitionId) -> RootId {
 
        // TODO: Keep in lookup or something
 
        for module in ctx.modules {
 
            let root_id = module.root_id;
 
            let root = &ctx.heap[root_id];
 
            for module_definition_id in root.definitions.iter() {
 
                if *module_definition_id == definition_id {
 
                    return root_id
 
                }
 
            }
 
        }
 

	
 
        debug_assert!(false, "DefinitionId without corresponding RootId");
 
        unreachable!();
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/utils.rs
Show inline comments
 
deleted file
src/protocol/parser/visitor.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use crate::protocol::input_source2::ParseError;
 
use crate::protocol::input_source::ParseError;
 
use crate::protocol::parser::{type_table::*, Module};
 
use crate::protocol::symbol_table2::{SymbolTable};
 
use crate::protocol::symbol_table::{SymbolTable};
 

	
 
type Unit = ();
 
pub(crate) type VisitorResult = Result<Unit, ParseError>;
 

	
 
/// Globally configured vector capacity for statement buffers in visitor 
 
/// implementations
 
pub(crate) const STMT_BUFFER_INIT_CAPACITY: usize = 256;
 
/// Globally configured vector capacity for expression buffers in visitor
 
/// implementations
 
pub(crate) const EXPR_BUFFER_INIT_CAPACITY: usize = 256;
 
/// Globally configured vector capacity for parser type buffers in visitor
 
/// implementations
 
pub(crate) const TYPE_BUFFER_INIT_CAPACITY: usize = 128;
 

	
 
/// General context structure that is used while traversing the AST.
 
pub(crate) struct Ctx<'p> {
 
    pub heap: &'p mut Heap,
 
    pub module: &'p Module,
 
    pub symbols: &'p mut SymbolTable,
 
    pub types: &'p mut TypeTable,
 
}
 

	
 
/// Visitor is a generic trait that will fully walk the AST. The default
 
/// implementation of the visitors is to not recurse. The exception is the
 
/// top-level `visit_definition`, `visit_stmt` and `visit_expr` methods, which
 
/// call the appropriate visitor function.
 
pub(crate) trait Visitor2 {
 
    // Entry point
 
    fn visit_module(&mut self, ctx: &mut Ctx) -> VisitorResult {
 
        let mut def_index = 0;
 
        loop {
 
            let definition_id = {
 
                let root = &ctx.heap[ctx.module.root_id];
 
                if def_index >= root.definitions.len() {
 
                    return Ok(())
 
                }
 

	
 
                root.definitions[def_index]
 
            };
 

	
 
            self.visit_definition(ctx, definition_id)?;
 
            def_index += 1;
 
        }
 
    }
 

	
 
    // Definitions
 
    // --- enum matching
 
    fn visit_definition(&mut self, ctx: &mut Ctx, id: DefinitionId) -> VisitorResult {
 
        match &ctx.heap[id] {
 
            Definition::Enum(def) => {
 
                let def = def.this;
 
                self.visit_enum_definition(ctx, def)
 
            },
 
            Definition::Union(def) => {
 
                let def = def.this;
 
                self.visit_union_definition(ctx, def)
 
            }
 
            Definition::Struct(def) => {
 
                let def = def.this;
 
                self.visit_struct_definition(ctx, def)
 
            },
 
            Definition::Component(def) => {
 
                let def = def.this;
 
                self.visit_component_definition(ctx, def)
 
            },
 
            Definition::Function(def) => {
 
                let def = def.this;
 
                self.visit_function_definition(ctx, def)
 
            }
 
        }
 
    }
 

	
 
    // --- enum variant handling
 
    fn visit_enum_definition(&mut self, _ctx: &mut Ctx, _id: EnumId) -> VisitorResult { Ok(()) }
 
    fn visit_union_definition(&mut self, _ctx: &mut Ctx, _id: UnionId) -> VisitorResult{ Ok(()) }
 
    fn visit_struct_definition(&mut self, _ctx: &mut Ctx, _id: StructId) -> VisitorResult { Ok(()) }
 
    fn visit_enum_definition(&mut self, _ctx: &mut Ctx, _id: EnumDefinitionId) -> VisitorResult { Ok(()) }
 
    fn visit_union_definition(&mut self, _ctx: &mut Ctx, _id: UnionDefinitionId) -> VisitorResult{ Ok(()) }
 
    fn visit_struct_definition(&mut self, _ctx: &mut Ctx, _id: StructDefinitionId) -> VisitorResult { Ok(()) }
 
    fn visit_component_definition(&mut self, _ctx: &mut Ctx, _id: ComponentDefinitionId) -> VisitorResult { Ok(()) }
 
    fn visit_function_definition(&mut self, _ctx: &mut Ctx, _id: FunctionDefinitionId) -> VisitorResult { Ok(()) }
 

	
 
    // Statements
 
    // --- enum matching
 
    fn visit_stmt(&mut self, ctx: &mut Ctx, id: StatementId) -> VisitorResult {
 
        match &ctx.heap[id] {
 
            Statement::Block(stmt) => {
 
                let this = stmt.this;
 
                self.visit_block_stmt(ctx, this)
 
            },
 
            Statement::Local(stmt) => {
 
                let this = stmt.this();
 
                self.visit_local_stmt(ctx, this)
 
            },
 
            Statement::Labeled(stmt) => {
 
                let this = stmt.this;
 
                self.visit_labeled_stmt(ctx, this)
 
            },
 
            Statement::If(stmt) => {
 
                let this = stmt.this;
 
                self.visit_if_stmt(ctx, this)
 
            },
 
            Statement::EndIf(_stmt) => Ok(()),
 
            Statement::While(stmt) => {
 
                let this = stmt.this;
 
                self.visit_while_stmt(ctx, this)
 
            },
 
            Statement::EndWhile(_stmt) => Ok(()),
 
            Statement::Break(stmt) => {
 
                let this = stmt.this;
 
                self.visit_break_stmt(ctx, this)
 
            },
 
            Statement::Continue(stmt) => {
 
                let this = stmt.this;
 
                self.visit_continue_stmt(ctx, this)
 
            },
 
            Statement::Synchronous(stmt) => {
 
                let this = stmt.this;
 
                self.visit_synchronous_stmt(ctx, this)
 
            },
 
            Statement::EndSynchronous(_stmt) => Ok(()),
 
            Statement::Return(stmt) => {
 
                let this = stmt.this;
 
                self.visit_return_stmt(ctx, this)
 
            },
 
            Statement::Goto(stmt) => {
 
                let this = stmt.this;
 
                self.visit_goto_stmt(ctx, this)
 
            },
 
            Statement::New(stmt) => {
 
                let this = stmt.this;
 
                self.visit_new_stmt(ctx, this)
 
            },
 
            Statement::Expression(stmt) => {
 
                let this = stmt.this;
 
                self.visit_expr_stmt(ctx, this)
 
            }
 
        }
 
    }
 

	
 
    fn visit_local_stmt(&mut self, ctx: &mut Ctx, id: LocalStatementId) -> VisitorResult {
 
        match &ctx.heap[id] {
 
            LocalStatement::Channel(stmt) => {
 
                let this = stmt.this;
 
                self.visit_local_channel_stmt(ctx, this)
 
            },
 
            LocalStatement::Memory(stmt) => {
 
                let this = stmt.this;
 
                self.visit_local_memory_stmt(ctx, this)
 
            },
 
        }
 
    }
 

	
 
    // --- enum variant handling
 
    fn visit_block_stmt(&mut self, _ctx: &mut Ctx, _id: BlockStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_local_memory_stmt(&mut self, _ctx: &mut Ctx, _id: MemoryStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_local_channel_stmt(&mut self, _ctx: &mut Ctx, _id: ChannelStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_labeled_stmt(&mut self, _ctx: &mut Ctx, _id: LabeledStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_if_stmt(&mut self, _ctx: &mut Ctx, _id: IfStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_while_stmt(&mut self, _ctx: &mut Ctx, _id: WhileStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_break_stmt(&mut self, _ctx: &mut Ctx, _id: BreakStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_continue_stmt(&mut self, _ctx: &mut Ctx, _id: ContinueStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_synchronous_stmt(&mut self, _ctx: &mut Ctx, _id: SynchronousStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_return_stmt(&mut self, _ctx: &mut Ctx, _id: ReturnStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_goto_stmt(&mut self, _ctx: &mut Ctx, _id: GotoStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_new_stmt(&mut self, _ctx: &mut Ctx, _id: NewStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_expr_stmt(&mut self, _ctx: &mut Ctx, _id: ExpressionStatementId) -> VisitorResult { Ok(()) }
 

	
 
    // Expressions
 
    // --- enum matching
 
    fn visit_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> VisitorResult {
 
        match &ctx.heap[id] {
 
            Expression::Assignment(expr) => {
 
                let this = expr.this;
 
                self.visit_assignment_expr(ctx, this)
 
            },
 
            Expression::Binding(expr) => {
 
                let this = expr.this;
 
                self.visit_binding_expr(ctx, this)
 
            }
 
            Expression::Conditional(expr) => {
 
                let this = expr.this;
 
                self.visit_conditional_expr(ctx, this)
 
            }
 
            Expression::Binary(expr) => {
 
                let this = expr.this;
 
                self.visit_binary_expr(ctx, this)
 
            }
 
            Expression::Unary(expr) => {
 
                let this = expr.this;
 
                self.visit_unary_expr(ctx, this)
 
            }
 
            Expression::Indexing(expr) => {
 
                let this = expr.this;
 
                self.visit_indexing_expr(ctx, this)
 
            }
 
            Expression::Slicing(expr) => {
 
                let this = expr.this;
 
                self.visit_slicing_expr(ctx, this)
 
            }
 
            Expression::Select(expr) => {
 
                let this = expr.this;
 
                self.visit_select_expr(ctx, this)
 
            }
 
            Expression::Literal(expr) => {
 
                let this = expr.this;
 
                self.visit_literal_expr(ctx, this)
 
            }
 
            Expression::Call(expr) => {
 
                let this = expr.this;
 
                self.visit_call_expr(ctx, this)
 
            }
 
            Expression::Variable(expr) => {
 
                let this = expr.this;
 
                self.visit_variable_expr(ctx, this)
 
            }
 
        }
 
    }
 

	
 
    fn visit_assignment_expr(&mut self, _ctx: &mut Ctx, _id: AssignmentExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_binding_expr(&mut self, _ctx: &mut Ctx, _id: BindingExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_conditional_expr(&mut self, _ctx: &mut Ctx, _id: ConditionalExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_binary_expr(&mut self, _ctx: &mut Ctx, _id: BinaryExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_unary_expr(&mut self, _ctx: &mut Ctx, _id: UnaryExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_indexing_expr(&mut self, _ctx: &mut Ctx, _id: IndexingExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_slicing_expr(&mut self, _ctx: &mut Ctx, _id: SlicingExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_select_expr(&mut self, _ctx: &mut Ctx, _id: SelectExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_literal_expr(&mut self, _ctx: &mut Ctx, _id: LiteralExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_call_expr(&mut self, _ctx: &mut Ctx, _id: CallExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_variable_expr(&mut self, _ctx: &mut Ctx, _id: VariableExpressionId) -> VisitorResult { Ok(()) }
 
}
 
\ No newline at end of file
src/protocol/tests/utils.rs
Show inline comments
 
use crate::protocol::{
 
    ast::*,
 
    inputsource::*,
 
    input_source::*,
 
    parser::{
 
        *,
 
        type_table::TypeTable,
 
        symbol_table::SymbolTable,
 
        token_parsing::*,
 
    },
 
};
 

	
 
// Carries information about the test into utility structures for builder-like
 
// assertions
 
#[derive(Clone, Copy)]
 
struct TestCtx<'a> {
 
    test_name: &'a str,
 
    heap: &'a Heap,
 
    modules: &'a Vec<LexedModule>,
 
    types: &'a TypeTable,
 
    symbols: &'a SymbolTable,
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Interface for parsing and compiling
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct Tester {
 
    test_name: String,
 
    sources: Vec<String>
 
}
 

	
 
impl Tester {
 
    /// Constructs a new tester, allows adding multiple sources before compiling
 
    pub(crate) fn new<S: ToString>(test_name: S) -> Self {
 
        Self{
 
            test_name: test_name.to_string(),
 
            sources: Vec::new()
 
        }
 
    }
 

	
 
    /// Utility for quick tests that use a single source file and expect the
 
    /// compilation to succeed.
 
    pub(crate) fn new_single_source_expect_ok<T: ToString, S: ToString>(test_name: T, source: S) -> AstOkTester {
 
        Self::new(test_name)
 
            .with_source(source)
 
            .compile()
 
            .expect_ok()
 
    }
 

	
 
    /// Utility for quick tests that use a single source file and expect the
 
    /// compilation to fail.
 
    pub(crate) fn new_single_source_expect_err<T: ToString, S: ToString>(test_name: T, source: S) -> AstErrTester {
 
        Self::new(test_name)
 
            .with_source(source)
 
            .compile()
 
            .expect_err()
 
    }
 

	
 
    pub(crate) fn with_source<S: ToString>(mut self, source: S) -> Self {
 
        self.sources.push(source.to_string());
 
        self
 
    }
 

	
 
    pub(crate) fn compile(self) -> AstTesterResult {
 
        let mut parser = Parser::new();
 
        for (source_idx, source) in self.sources.into_iter().enumerate() {
 
            let mut cursor = std::io::Cursor::new(source);
 
            let input_source = InputSource::new("", &mut cursor)
 
            let source = source.into_bytes();
 
            let input_source = InputSource::new(String::from(""), source)
 
                .expect(&format!("parsing source {}", source_idx + 1));
 

	
 
            if let Err(err) = parser.feed(input_source) {
 
                return AstTesterResult::Err(AstErrTester::new(self.test_name, err))
 
            }
 
        }
 

	
 
        if let Err(err) = parser.parse() {
 
            return AstTesterResult::Err(AstErrTester::new(self.test_name, err))
 
        }
 

	
 
        AstTesterResult::Ok(AstOkTester::new(self.test_name, parser))
 
    }
 
}
 

	
 
pub(crate) enum AstTesterResult {
 
    Ok(AstOkTester),
 
    Err(AstErrTester)
 
}
 

	
 
impl AstTesterResult {
 
    pub(crate) fn expect_ok(self) -> AstOkTester {
 
        match self {
 
            AstTesterResult::Ok(v) => v,
 
            AstTesterResult::Err(err) => {
 
                let wrapped = ErrorTester{ test_name: &err.test_name, error: &err.error };
 
                println!("DEBUG: Full error:\n{}", &err.error);
 
                assert!(
 
                    false,
 
                    "[{}] Expected compilation to succeed, but it failed with {}",
 
                    err.test_name, wrapped.assert_postfix()
 
                );
 
                unreachable!();
 
            }
 
        }
 
    }
 

	
 
    pub(crate) fn expect_err(self) -> AstErrTester {
 
        match self {
 
            AstTesterResult::Ok(ok) => {
 
                assert!(false, "[{}] Expected compilation to fail, but it succeeded", ok.test_name);
 
                unreachable!();
 
            },
 
            AstTesterResult::Err(err) => err,
 
        }
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Interface for successful compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct AstOkTester {
 
    test_name: String,
 
    modules: Vec<LexedModule>,
 
    heap: Heap,
 
    symbols: SymbolTable,
 
    types: TypeTable,
 
}
 

	
 
impl AstOkTester {
 
    fn new(test_name: String, parser: Parser) -> Self {
 
        Self {
 
            test_name,
 
            modules: parser.modules,
 
            heap: parser.heap,
 
            symbols: parser.symbol_table,
 
            types: parser.type_table,
 
        }
 
    }
 

	
 
    pub(crate) fn for_struct<F: Fn(StructTester)>(self, name: &str, f: F) -> Self {
 
        let mut found = false;
 
        for definition in self.heap.definitions.iter() {
 
            if let Definition::Struct(definition) = definition {
 
                if String::from_utf8_lossy(&definition.identifier.value) != name {
 
                if definition.identifier.value.as_str() != name {
 
                    continue;
 
                }
 

	
 
                // Found struct with the same name
 
                let tester = StructTester::new(self.ctx(), definition);
 
                f(tester);
 
                found = true;
 
                break
 
            }
 
        }
 

	
 
        assert!(
 
            found, "[{}] Failed to find definition for struct '{}'",
 
            self.test_name, name
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn for_enum<F: Fn(EnumTester)>(self, name: &str, f: F) -> Self {
 
        let mut found = false;
 
        for definition in self.heap.definitions.iter() {
 
            if let Definition::Enum(definition) = definition {
 
                if String::from_utf8_lossy(&definition.identifier.value) != name {
 
                if definition.identifier.value.as_str() != name {
 
                    continue;
 
                }
 

	
 
                // Found enum with the same name
 
                let tester = EnumTester::new(self.ctx(), definition);
 
                f(tester);
 
                found = true;
 
                break;
 
            }
 
        }
 

	
 
        assert!(
 
            found, "[{}] Failed to find definition for enum '{}'",
 
            self.test_name, name
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn for_union<F: Fn(UnionTester)>(self, name: &str, f: F) -> Self {
 
        let mut found = false;
 
        for definition in self.heap.definitions.iter() {
 
            if let Definition::Union(definition) = definition {
 
                if String::from_utf8_lossy(&definition.identifier.value) != name {
 
                if definition.identifier.value.as_str() != name {
 
                    continue;
 
                }
 

	
 
                // Found union with the same name
 
                let tester = UnionTester::new(self.ctx(), definition);
 
                f(tester);
 
                found = true;
 
                break;
 
            }
 
        }
 

	
 
        assert!(
 
            found, "[{}] Failed to find definition for union '{}'",
 
            self.test_name, name
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn for_function<F: Fn(FunctionTester)>(self, name: &str, f: F) -> Self {
 
        let mut found = false;
 
        for definition in self.heap.definitions.iter() {
 
            if let Definition::Function(definition) = definition {
 
                if String::from_utf8_lossy(&definition.identifier.value) != name {
 
                if definition.identifier.value.as_str() != name {
 
                    continue;
 
                }
 

	
 
                // Found function
 
                let tester = FunctionTester::new(self.ctx(), definition);
 
                f(tester);
 
                found = true;
 
                break;
 
            }
 
        }
 

	
 
        if found { return self }
 

	
 
        assert!(
 
            false, "[{}] failed to find definition for function '{}'",
 
            self.test_name, name
 
        );
 
        unreachable!();
 
    }
 

	
 
    fn ctx(&self) -> TestCtx {
 
        TestCtx{
 
            test_name: &self.test_name,
 
            modules: &self.modules,
 
            heap: &self.heap,
 
            types: &self.types,
 
            symbols: &self.symbols,
 
        }
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Utilities for successful compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct StructTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a StructDefinition,
 
}
 

	
 
impl<'a> StructTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a StructDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn assert_num_fields(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.def.fields.len(),
 
            "[{}] Expected {} struct fields, but found {} for {}",
 
            self.ctx.test_name, num, self.def.fields.len(), self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_num_monomorphs(self, num: usize) -> Self {
 
        let (is_equal, num_encountered) = has_equal_num_monomorphs(self.ctx, num, self.def.this.upcast());
 
        assert!(
 
            is_equal, "[{}] Expected {} monomorphs, but got {} for {}",
 
            self.ctx.test_name, num, num_encountered, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    /// Asserts that a monomorph exist, separate polymorphic variable types by
 
    /// a semicolon.
 
    pub(crate) fn assert_has_monomorph(self, serialized_monomorph: &str) -> Self {
 
        let (has_monomorph, serialized) = has_monomorph(self.ctx, self.def.this.upcast(), serialized_monomorph);
 
        assert!(
 
            has_monomorph, "[{}] Expected to find monomorph {}, but got {} for {}",
 
            self.ctx.test_name, serialized_monomorph, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn for_field<F: Fn(StructFieldTester)>(self, name: &str, f: F) -> Self {
 
        // Find field with specified name
 
        for field in &self.def.fields {
 
            if String::from_utf8_lossy(&field.field.value) == name {
 
            if field.field.value.as_str() == name {
 
                let tester = StructFieldTester::new(self.ctx, field);
 
                f(tester);
 
                return self;
 
            }
 
        }
 

	
 
        assert!(
 
            false, "[{}] Could not find struct field '{}' for {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 
        unreachable!();
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("Struct{ name: ");
 
        v.push_str(&String::from_utf8_lossy(&self.def.identifier.value));
 
        v.push_str(self.def.identifier.value.as_str());
 
        v.push_str(", fields: [");
 
        for (field_idx, field) in self.def.fields.iter().enumerate() {
 
            if field_idx != 0 { v.push_str(", "); }
 
            v.push_str(&String::from_utf8_lossy(&field.field.value));
 
            v.push_str(field.field.value.as_str());
 
        }
 
        v.push_str("] }");
 
        v
 
    }
 
}
 

	
 
pub(crate) struct StructFieldTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a StructFieldDefinition,
 
}
 

	
 
impl<'a> StructFieldTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a StructFieldDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn assert_parser_type(self, expected: &str) -> Self {
 
        let mut serialized_type = String::new();
 
        serialize_parser_type(&mut serialized_type, &self.ctx.heap, self.def.parser_type);
 
        serialize_parser_type(&mut serialized_type, &self.ctx.heap, &self.def.parser_type);
 
        assert_eq!(
 
            expected, &serialized_type,
 
            "[{}] Expected type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized_type, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut serialized_type = String::new();
 
        serialize_parser_type(&mut serialized_type, &self.ctx.heap, self.def.parser_type);
 
        format!(
 
            "StructField{{ name: {}, parser_type: {} }}",
 
            String::from_utf8_lossy(&self.def.field.value), serialized_type
 
        )
 
        serialize_parser_type(&mut serialized_type, &self.ctx.heap, &self.def.parser_type);
 
        format!("StructField{{ name: {}, parser_type: {} }}", self.def.field.value.as_str(), serialized_type)
 
    }
 
}
 

	
 
pub(crate) struct EnumTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a EnumDefinition,
 
}
 

	
 
impl<'a> EnumTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a EnumDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn assert_num_variants(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.def.variants.len(),
 
            "[{}] Expected {} enum variants, but found {} for {}",
 
            self.ctx.test_name, num, self.def.variants.len(), self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_num_monomorphs(self, num: usize) -> Self {
 
        let (is_equal, num_encountered) = has_equal_num_monomorphs(self.ctx, num, self.def.this.upcast());
 
        assert!(
 
            is_equal, "[{}] Expected {} monomorphs, but got {} for {}",
 
            self.ctx.test_name, num, num_encountered, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_has_monomorph(self, serialized_monomorph: &str) -> Self {
 
        let (has_monomorph, serialized) = has_monomorph(self.ctx, self.def.this.upcast(), serialized_monomorph);
 
        assert!(
 
            has_monomorph, "[{}] Expected to find monomorph {}, but got {} for {}",
 
            self.ctx.test_name, serialized_monomorph, serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("Enum{ name: ");
 
        v.push_str(&String::from_utf8_lossy(&self.def.identifier.value));
 
        v.push_str(self.def.identifier.value.as_str());
 
        v.push_str(", variants: [");
 
        for (variant_idx, variant) in self.def.variants.iter().enumerate() {
 
            if variant_idx != 0 { v.push_str(", "); }
 
            v.push_str(&String::from_utf8_lossy(&variant.identifier.value));
 
            v.push_str(variant.identifier.value.as_str());
 
        }
 
        v.push_str("] }");
 
        v
 
    }
 
}
 

	
 
pub(crate) struct UnionTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a UnionDefinition,
 
}
 

	
 
impl<'a> UnionTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a UnionDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn assert_num_variants(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.def.variants.len(),
 
            "[{}] Expected {} union variants, but found {} for {}",
 
            self.ctx.test_name, num, self.def.variants.len(), self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_num_monomorphs(self, num: usize) -> Self {
 
        let (is_equal, num_encountered) = has_equal_num_monomorphs(self.ctx, num, self.def.this.upcast());
 
        assert!(
 
            is_equal, "[{}] Expected {} monomorphs, but got {} for {}",
 
            self.ctx.test_name, num, num_encountered, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_has_monomorph(self, serialized_monomorph: &str) -> Self {
 
        let (has_monomorph, serialized) = has_monomorph(self.ctx, self.def.this.upcast(), serialized_monomorph);
 
        assert!(
 
            has_monomorph, "[{}] Expected to find monomorph {}, but got {} for {}",
 
            self.ctx.test_name, serialized_monomorph, serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("Union{ name: ");
 
        v.push_str(&String::from_utf8_lossy(&self.def.identifier.value));
 
        v.push_str(self.def.identifier.value.as_str());
 
        v.push_str(", variants: [");
 
        for (variant_idx, variant) in self.def.variants.iter().enumerate() {
 
            if variant_idx != 0 { v.push_str(", "); }
 
            v.push_str(&String::from_utf8_lossy(&variant.identifier.value));
 
            v.push_str(variant.identifier.value.as_str());
 
        }
 
        v.push_str("] }");
 
        v
 
    }
 
}
 

	
 
pub(crate) struct FunctionTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a FunctionDefinition,
 
}
 

	
 
impl<'a> FunctionTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a FunctionDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn for_variable<F: Fn(VariableTester)>(self, name: &str, f: F) -> Self {
 
        // Find the memory statement in order to find the local
 
        let mem_stmt_id = seek_stmt(
 
            self.ctx.heap, self.def.body,
 
            self.ctx.heap, self.def.body.upcast(),
 
            &|stmt| {
 
                if let Statement::Local(local) = stmt {
 
                    if let LocalStatement::Memory(memory) = local {
 
                        let local = &self.ctx.heap[memory.variable];
 
                        if local.identifier.value == name.as_bytes() {
 
                        if local.identifier.value.as_str() == name {
 
                            return true;
 
                        }
 
                    }
 
                }
 

	
 
                false
 
            }
 
        );
 

	
 
        assert!(
 
            mem_stmt_id.is_some(), "[{}] Failed to find variable '{}' in {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 

	
 
        let mem_stmt_id = mem_stmt_id.unwrap();
 
        let local_id = self.ctx.heap[mem_stmt_id].as_memory().variable;
 
        let local = &self.ctx.heap[local_id];
 

	
 
        // Find the assignment expression that follows it
 
        let assignment_id = seek_expr_in_stmt(
 
            self.ctx.heap, self.def.body,
 
            self.ctx.heap, self.def.body.upcast(),
 
            &|expr| {
 
                if let Expression::Assignment(assign_expr) = expr {
 
                    if let Expression::Variable(variable_expr) = &self.ctx.heap[assign_expr.left] {
 
                        if variable_expr.position.offset == local.identifier.position.offset {
 
                            return true;
 
                        }
 
                    }
 
                }
 

	
 
                false
 
            }
 
        );
 

	
 
        assert!(
 
            assignment_id.is_some(), "[{}] Failed to find assignment to variable '{}' in {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 

	
 
        let assignment = &self.ctx.heap[assignment_id.unwrap()];
 

	
 
        // Construct tester and pass to tester function
 
        let tester = VariableTester::new(
 
            self.ctx, self.def.this.upcast(), local, 
 
            assignment.as_assignment()
 
        );
 
        f(tester);
 

	
 
        self
 
    }
 

	
 
    /// Finds a specific expression within a function. There are two matchers:
 
    /// one outer matcher (to find a rough indication of the expression) and an
 
    /// inner matcher to find the exact expression. 
 
    ///
 
    /// The reason being that, for example, a function's body might be littered
 
    /// with addition symbols, so we first match on "some_var + some_other_var",
 
    /// and then match exactly on "+".
 
    pub(crate) fn for_expression_by_source<F: Fn(ExpressionTester)>(self, outer_match: &str, inner_match: &str, f: F) -> Self {
 
        // Seek the expression in the source code
 
        assert!(outer_match.contains(inner_match), "improper testing code");
 

	
 
        let module = seek_def_in_modules(
 
            &self.ctx.heap, &self.ctx.modules, self.def.this.upcast()
 
        ).unwrap();
 

	
 
        // Find the first occurrence of the expression after the definition of
 
        // the function, we'll check that it is included in the body later.
 
        let mut outer_match_idx = self.def.position.offset;
 
        while outer_match_idx < module.source.input.len() {
 
            if module.source.input[outer_match_idx..].starts_with(outer_match.as_bytes()) {
 
                break;
 
            }
 
            outer_match_idx += 1
 
        }
 

	
 
        assert!(
 
            outer_match_idx < module.source.input.len(),
 
            "[{}] Failed to find '{}' within the source that contains {}",
 
            self.ctx.test_name, outer_match, self.assert_postfix()
 
        );
 
        let inner_match_idx = outer_match_idx + outer_match.find(inner_match).unwrap();
 

	
 
        // Use the inner match index to find the expression
 
        let expr_id = seek_expr_in_stmt(
 
            &self.ctx.heap, self.def.body,
 
            &self.ctx.heap, self.def.body.upcast(),
 
            &|expr| expr.position().offset == inner_match_idx
 
        );
 
        assert!(
 
            expr_id.is_some(),
 
            "[{}] Failed to find '{}' within the source that contains {} \
 
            (note: expression was found, but not within the specified function",
 
            self.ctx.test_name, outer_match, self.assert_postfix()
 
        );
 
        let expr_id = expr_id.unwrap();
 

	
 
        // We have the expression, call the testing function
 
        let tester = ExpressionTester::new(
 
            self.ctx, self.def.this.upcast(), &self.ctx.heap[expr_id]
 
        );
 
        f(tester);
 

	
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!(
 
            "Function{{ name: {} }}",
 
            &String::from_utf8_lossy(&self.def.identifier.value)
 
        )
 
        format!("Function{{ name: {} }}", self.def.identifier.value.as_str())
 
    }
 
}
 

	
 
pub(crate) struct VariableTester<'a> {
 
    ctx: TestCtx<'a>,
 
    definition_id: DefinitionId,
 
    local: &'a Local,
 
    assignment: &'a AssignmentExpression,
 
}
 

	
 
impl<'a> VariableTester<'a> {
 
    fn new(
 
        ctx: TestCtx<'a>, definition_id: DefinitionId, local: &'a Local, assignment: &'a AssignmentExpression
 
    ) -> Self {
 
        Self{ ctx, definition_id, local, assignment }
 
    }
 

	
 
    pub(crate) fn assert_parser_type(self, expected: &str) -> Self {
 
        let mut serialized = String::new();
 
        serialize_parser_type(&mut serialized, self.ctx.heap, self.local.parser_type);
 
        serialize_parser_type(&mut serialized, self.ctx.heap, &self.local.parser_type);
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected parser type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_concrete_type(self, expected: &str) -> Self {
 
        let mut serialized = String::new();
 
        serialize_concrete_type(
 
            &mut serialized, self.ctx.heap, self.definition_id, 
 
            &self.assignment.concrete_type
 
        );
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected concrete type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        println!("DEBUG: {:?}", self.assignment.concrete_type);
 
        format!(
 
            "Variable{{ name: {} }}",
 
            &String::from_utf8_lossy(&self.local.identifier.value)
 
        )
 
        format!("Variable{{ name: {} }}", self.local.identifier.value.as_str())
 
    }
 
}
 

	
 
pub(crate) struct ExpressionTester<'a> {
 
    ctx: TestCtx<'a>,
 
    definition_id: DefinitionId, // of the enclosing function/component
 
    expr: &'a Expression
 
}
 

	
 
impl<'a> ExpressionTester<'a> {
 
    fn new(
 
        ctx: TestCtx<'a>, definition_id: DefinitionId, expr: &'a Expression
 
    ) -> Self {
 
        Self{ ctx, definition_id, expr }
 
    }
 

	
 
    pub(crate) fn assert_concrete_type(self, expected: &str) -> Self {
 
        let mut serialized = String::new();
 
        serialize_concrete_type(
 
            &mut serialized, self.ctx.heap, self.definition_id,
 
            self.expr.get_type()
 
        );
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected concrete type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!(
 
            "Expression{{ debug: {:?} }}",
 
            self.expr
 
        )
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Interface for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct AstErrTester {
 
    test_name: String,
 
    error: ParseError,
 
}
 

	
 
impl AstErrTester {
 
    fn new(test_name: String, error: ParseError) -> Self {
 
        Self{ test_name, error }
 
    }
 

	
 
    pub(crate) fn error<F: Fn(ErrorTester)>(&self, f: F) {
 
        // Maybe multiple errors will be supported in the future
 
        let tester = ErrorTester{ test_name: &self.test_name, error: &self.error };
 
        f(tester)
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Utilities for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct ErrorTester<'a> {
 
    test_name: &'a str,
 
    error: &'a ParseError,
 
}
 

	
 
impl<'a> ErrorTester<'a> {
 
    pub(crate) fn assert_num(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.error.statements.len(),
 
            "[{}] expected error to consist of '{}' parts, but encountered '{}' for {}",
 
            self.test_name, num, self.error.statements.len(), self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    pub(crate) fn assert_ctx_has(self, idx: usize, msg: &str) -> Self {
 
        assert!(
 
            self.error.statements[idx].context.contains(msg),
 
            "[{}] expected error statement {}'s context to contain '{}' for {}",
 
            self.test_name, idx, msg, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    pub(crate) fn assert_msg_has(self, idx: usize, msg: &str) -> Self {
 
        assert!(
 
            self.error.statements[idx].message.contains(msg),
 
            "[{}] expected error statement {}'s message to contain '{}' for {}",
 
            self.test_name, idx, msg, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    // TODO: @tokenizer This should really be removed, as compilation should be
 
    //  deterministic, but we're currently using rather inefficient hashsets for
 
    //  the type inference, so remove once compiler architecture has changed.
 
    pub(crate) fn assert_any_msg_has(self, msg: &str) -> Self {
 
        let mut is_present = false;
 
        for statement in &self.error.statements {
 
            if statement.message.contains(msg) {
 
                is_present = true;
 
                break;
 
            }
 
        }
 

	
 
        assert!(
 
            is_present, "[{}] Expected an error statement to contain '{}' for {}",
 
            self.test_name, msg, self.assert_postfix()
 
        );
 
        
 
        self
 
    }
 

	
 
    /// Seeks the index of the pattern in the context message, then checks if
 
    /// the input position corresponds to that index.
 
    pub (crate) fn assert_occurs_at(self, idx: usize, pattern: &str) -> Self {
 
        let pos = self.error.statements[idx].context.find(pattern);
 
        assert!(
 
            pos.is_some(),
 
            "[{}] incorrect occurs_at: '{}' could not be found in the context for {}",
 
            self.test_name, pattern, self.assert_postfix()
 
        );
 
        let pos = pos.unwrap();
 
        let col = self.error.statements[idx].position.column;
 
        assert_eq!(
 
            pos + 1, col,
 
            "[{}] Expected error to occur at column {}, but found it at {} for {}",
 
            self.test_name, pos + 1, col, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("error: [");
 
        for (idx, stmt) in self.error.statements.iter().enumerate() {
 
            if idx != 0 {
 
                v.push_str(", ");
 
            }
 

	
 
            v.push_str(&format!("{{ context: {}, message: {} }}", &stmt.context, stmt.message));
 
        }
 
        v.push(']');
 
        v
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Generic utilities
 
//------------------------------------------------------------------------------
 

	
 
fn has_equal_num_monomorphs<'a>(ctx: TestCtx<'a>, num: usize, definition_id: DefinitionId) -> (bool, usize) {
 
    let type_def = ctx.types.get_base_definition(&definition_id).unwrap();
 
    let num_on_type = type_def.monomorphs.len();
 
    
 
    (num_on_type == num, num_on_type)
 
}
 

	
 
fn has_monomorph<'a>(ctx: TestCtx<'a>, definition_id: DefinitionId, serialized_monomorph: &str) -> (bool, String) {
 
    let type_def = ctx.types.get_base_definition(&definition_id).unwrap();
 

	
 
    let mut full_buffer = String::new();
 
    let mut has_match = false;
 
    full_buffer.push('[');
 
    for (monomorph_idx, monomorph) in type_def.monomorphs.iter().enumerate() {
 
        let mut buffer = String::new();
 
        for (element_idx, monomorph_element) in monomorph.iter().enumerate() {
 
            if element_idx != 0 { buffer.push(';'); }
 
            serialize_concrete_type(&mut buffer, ctx.heap, definition_id, monomorph_element);
 
        }
 

	
 
        if buffer == serialized_monomorph {
 
            // Found an exact match
 
            has_match = true;
 
        }
 

	
 
        if monomorph_idx != 0 {
 
            full_buffer.push_str(", ");
 
        }
 
        full_buffer.push('"');
 
        full_buffer.push_str(&buffer);
 
        full_buffer.push('"');
 
    }
 
    full_buffer.push(']');
 

	
 
    (has_match, full_buffer)
 
}
 

	
 
fn serialize_parser_type(buffer: &mut String, heap: &Heap, id: ParserTypeId) {
 
fn serialize_parser_type(buffer: &mut String, heap: &Heap, parser_type: &ParserType) {
 
    use ParserTypeVariant as PTV;
 

	
 
    let p = &heap[id];
 
    match &p.variant {
 
        PTV::Message => buffer.push_str("msg"),
 
        PTV::Bool => buffer.push_str("bool"),
 
        PTV::Byte => buffer.push_str("byte"),
 
        PTV::Short => buffer.push_str("short"),
 
        PTV::Int => buffer.push_str("int"),
 
        PTV::Long => buffer.push_str("long"),
 
        PTV::String => buffer.push_str("string"),
 
        PTV::IntegerLiteral => buffer.push_str("intlit"),
 
        PTV::Inferred => buffer.push_str("auto"),
 
        PTV::Array(sub_id) => {
 
            serialize_parser_type(buffer, heap, *sub_id);
 
    fn write_bytes(buffer: &mut String, bytes: &[u8]) {
 
        let utf8 = String::from_utf8_lossy(bytes);
 
        buffer.push_str(&utf8);
 
    }
 

	
 
    fn serialize_variant(buffer: &mut String, heap: &Heap, parser_type: &ParserType, mut idx: usize) -> usize {
 
        match &parser_type.elements[idx].variant {
 
            PTV::Message => write_bytes(buffer, KW_TYPE_MESSAGE),
 
            PTV::Bool => write_bytes(buffer, KW_TYPE_BOOL),
 
            PTV::UInt8 => write_bytes(buffer, KW_TYPE_UINT8),
 
            PTV::UInt16 => write_bytes(buffer, KW_TYPE_UINT16),
 
            PTV::UInt32 => write_bytes(buffer, KW_TYPE_UINT32),
 
            PTV::UInt64 => write_bytes(buffer, KW_TYPE_UINT64),
 
            PTV::SInt8 => write_bytes(buffer, KW_TYPE_SINT8),
 
            PTV::SInt16 => write_bytes(buffer, KW_TYPE_SINT16),
 
            PTV::SInt32 => write_bytes(buffer, KW_TYPE_SINT32),
 
            PTV::SInt64 => write_bytes(buffer, KW_TYPE_SINT64),
 
            PTV::Character => write_bytes(buffer, KW_TYPE_CHAR),
 
            PTV::String => write_bytes(buffer, KW_TYPE_STRING),
 
            PTV::IntegerLiteral => buffer.push_str("int_literal"),
 
            PTV::Inferred => write_bytes(buffer, KW_TYPE_INFERRED),
 
            PTV::Array => {
 
                idx = serialize_variant(buffer, heap, parser_type, idx + 1);
 
                buffer.push_str("[]");
 
            },
 
        PTV::Input(sub_id) => {
 
            buffer.push_str("in<");
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            PTV::Input => {
 
                write_bytes(buffer, KW_TYPE_IN_PORT);
 
                buffer.push('<');
 
                idx = serialize_variant(buffer, heap, parser_type, idx + 1);
 
                buffer.push('>');
 
            },
 
        PTV::Output(sub_id) => {
 
            buffer.push_str("out<");
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            PTV::Output => {
 
                write_bytes(buffer, KW_TYPE_OUT_PORT);
 
                buffer.push('<');
 
                idx = serialize_variant(buffer, heap, parser_type, idx + 1);
 
                buffer.push('>');
 
            },
 
        PTV::Symbolic(symbolic) => {
 
            buffer.push_str(&String::from_utf8_lossy(&symbolic.identifier.value));
 
            if symbolic.poly_args2.len() > 0 {
 
            PTV::PolymorphicArgument(definition_id, poly_idx) => {
 
                let definition = &heap[*definition_id];
 
                let poly_arg = &definition.poly_vars()[*poly_idx];
 
                buffer.push_str(poly_arg.value.as_str());
 
            },
 
            PTV::Definition(definition_id, num_embedded) => {
 
                let definition = &heap[*definition_id];
 
                buffer.push_str(definition.identifier().value.as_str());
 

	
 
                let num_embedded = *num_embedded;
 
                if num_embedded != 0 {
 
                    buffer.push('<');
 
                for (poly_idx, poly_arg) in symbolic.poly_args2.iter().enumerate() {
 
                    if poly_idx != 0 { buffer.push(','); }
 
                    serialize_parser_type(buffer, heap, *poly_arg);
 
                    for embedded_idx in 0..num_embedded {
 
                        if embedded_idx != 0 {
 
                            buffer.push(',');
 
                        }
 
                        idx = serialize_variant(buffer, heap, parser_type, idx + 1);
 
                    }
 
                    buffer.push('>');
 
                }
 
            }
 
        }
 

	
 
        idx
 
    }
 

	
 
    serialize_variant(buffer, heap, parser_type, 0);
 
}
 

	
 
fn serialize_concrete_type(buffer: &mut String, heap: &Heap, def: DefinitionId, concrete: &ConcreteType) {
 
    // Retrieve polymorphic variables
 
    let poly_vars = match &heap[def] {
 
        Definition::Function(definition) => &definition.poly_vars,
 
        Definition::Component(definition) => &definition.poly_vars,
 
        Definition::Struct(definition) => &definition.poly_vars,
 
        Definition::Enum(definition) => &definition.poly_vars,
 
        Definition::Union(definition) => &definition.poly_vars,
 
    };
 
    let poly_vars = heap[def].poly_vars();
 

	
 
    fn write_bytes(buffer: &mut String, bytes: &[u8]) {
 
        let utf8 = String::from_utf8_lossy(bytes);
 
        buffer.push_str(&utf8);
 
    }
 

	
 
    fn serialize_recursive(
 
        buffer: &mut String, heap: &Heap, poly_vars: &Vec<Identifier>, concrete: &ConcreteType, mut idx: usize
 
    ) -> usize {
 
        use ConcreteTypePart as CTP;
 

	
 
        let part = &concrete.parts[idx];
 
        match part {
 
            CTP::Marker(poly_idx) => {
 
                buffer.push_str(&String::from_utf8_lossy(&poly_vars[*poly_idx].value));
 
                buffer.push_str(poly_vars[*poly_idx].value.as_str());
 
            },
 
            CTP::Void => buffer.push_str("void"),
 
            CTP::Message => buffer.push_str("msg"),
 
            CTP::Bool => buffer.push_str("bool"),
 
            CTP::Byte => buffer.push_str("byte"),
 
            CTP::Short => buffer.push_str("short"),
 
            CTP::Int => buffer.push_str("int"),
 
            CTP::Long => buffer.push_str("long"),
 
            CTP::String => buffer.push_str("string"),
 
            CTP::Message => write_bytes(buffer, KW_TYPE_MESSAGE),
 
            CTP::Bool => write_bytes(buffer, KW_TYPE_BOOL),
 
            CTP::UInt8 => write_bytes(buffer, KW_TYPE_UINT8),
 
            CTP::UInt16 => write_bytes(buffer, KW_TYPE_UINT16),
 
            CTP::UInt32 => write_bytes(buffer, KW_TYPE_UINT32),
 
            CTP::UInt64 => write_bytes(buffer, KW_TYPE_UINT64),
 
            CTP::SInt8 => write_bytes(buffer, KW_TYPE_SINT8),
 
            CTP::SInt16 => write_bytes(buffer, KW_TYPE_SINT16),
 
            CTP::SInt32 => write_bytes(buffer, KW_TYPE_SINT32),
 
            CTP::SInt64 => write_bytes(buffer, KW_TYPE_SINT64),
 
            CTP::Character => write_bytes(buffer, KW_TYPE_CHAR),
 
            CTP::String => write_bytes(buffer, KW_TYPE_STRING),
 
            CTP::Array => {
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push_str("[]");
 
            },
 
            CTP::Slice => {
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push_str("[..]");
 
            },
 
            CTP::Input => {
 
                buffer.push_str("in<");
 
                write_bytes(buffer, KW_TYPE_IN_PORT);
 
                buffer.push('<');
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push('>');
 
            },
 
            CTP::Output => {
 
                buffer.push_str("out<");
 
                write_bytes(buffer, KW_TYPE_OUT_PORT);
 
                buffer.push('<');
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push('>');
 
            },
 
            CTP::Instance(definition_id, num_sub) => {
 
                let definition_name = heap[*definition_id].identifier();
 
                buffer.push_str(&String::from_utf8_lossy(&definition_name.value));
 
                buffer.push_str(definition_name.value.as_str());
 
                if *num_sub != 0 {
 
                    buffer.push('<');
 
                    for sub_idx in 0..*num_sub {
 
                        if sub_idx != 0 { buffer.push(','); }
 
                        idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                    }
 
                    buffer.push('>');
 
                }
 
            }
 
        }
 

	
 
        idx
 
    }
 

	
 
    serialize_recursive(buffer, heap, poly_vars, concrete, 0);
 
}
 

	
 
fn seek_def_in_modules<'a>(heap: &Heap, modules: &'a [LexedModule], def_id: DefinitionId) -> Option<&'a LexedModule> {
 
    for module in modules {
 
        let root = &heap.protocol_descriptions[module.root_id];
 
        for definition in &root.definitions {
 
            if *definition == def_id {
 
                return Some(module)
 
            }
 
        }
 
    }
 

	
 
    None
 
}
 

	
 
fn seek_stmt<F: Fn(&Statement) -> bool>(heap: &Heap, start: StatementId, f: &F) -> Option<StatementId> {
 
    let stmt = &heap[start];
 
    if f(stmt) { return Some(start); }
 

	
 
    // This statement wasn't it, try to recurse
 
    let matched = match stmt {
 
        Statement::Block(block) => {
 
            for sub_id in &block.statements {
 
                if let Some(id) = seek_stmt(heap, *sub_id, f) {
 
                    return Some(id);
 
                }
 
            }
 

	
 
            None
 
        },
 
        Statement::Labeled(stmt) => seek_stmt(heap, stmt.body, f),
 
        Statement::If(stmt) => {
 
            if let Some(id) = seek_stmt(heap,stmt.true_body, f) {
 
            if let Some(id) = seek_stmt(heap, stmt.true_body.upcast(), f) {
 
                return Some(id);
 
            } else if let Some(id) = seek_stmt(heap, stmt.false_body, f) {
 
            } else if let Some(false_body) = stmt.false_body {
 
                if let Some(id) = seek_stmt(heap, false_body.upcast(), f) {
 
                    return Some(id);
 
                }
 
            }
 
            None
 
        },
 
        Statement::While(stmt) => seek_stmt(heap, stmt.body, f),
 
        Statement::Synchronous(stmt) => seek_stmt(heap, stmt.body, f),
 
        Statement::While(stmt) => seek_stmt(heap, stmt.body.upcast(), f),
 
        Statement::Synchronous(stmt) => seek_stmt(heap, stmt.body.upcast(), f),
 
        _ => None
 
    };
 

	
 
    matched
 
}
 

	
 
fn seek_expr_in_expr<F: Fn(&Expression) -> bool>(heap: &Heap, start: ExpressionId, f: &F) -> Option<ExpressionId> {
 
    let expr = &heap[start];
 
    if f(expr) { return Some(start); }
 

	
 
    match expr {
 
        Expression::Assignment(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.left, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.right, f))
 
        },
 
        Expression::Binding(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.left.upcast(), f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.right, f))
 
        }
 
        Expression::Conditional(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.test, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.true_expression, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.false_expression, f))
 
        },
 
        Expression::Binary(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.left, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.right, f))
 
        },
 
        Expression::Unary(expr) => {
 
            seek_expr_in_expr(heap, expr.expression, f)
 
        },
 
        Expression::Indexing(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.subject, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.index, f))
 
        },
 
        Expression::Slicing(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.subject, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.from_index, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.to_index, f))
 
        },
 
        Expression::Select(expr) => {
 
            seek_expr_in_expr(heap, expr.subject, f)
 
        },
 
        Expression::Array(expr) => {
 
            for element in &expr.elements {
 
                if let Some(id) = seek_expr_in_expr(heap, *element, f) {
 
                    return Some(id)
 
                }
 
            }
 
            None
 
        },
 
        Expression::Literal(expr) => {
 
            if let Literal::Struct(lit) = &expr.value {
 
                for field in &lit.fields {
 
                    if let Some(id) = seek_expr_in_expr(heap, field.value, f) {
 
                        return Some(id)
 
                    }
 
                }
 
            } else if let Literal::Array(elements) = &expr.value {
 
                for element in elements {
 
                    if let Some(id) = seek_expr_in_expr(heap, *element, f) {
 
                        return Some(id)
 
                    }
 
                }
 
            }
 
            None
 
        },
 
        Expression::Call(expr) => {
 
            for arg in &expr.arguments {
 
                if let Some(id) = seek_expr_in_expr(heap, *arg, f) {
 
                    return Some(id)
 
                }
 
            }
 
            None
 
        },
 
        Expression::Variable(expr) => {
 
            None
 
        }
 
    }
 
}
 

	
 
fn seek_expr_in_stmt<F: Fn(&Expression) -> bool>(heap: &Heap, start: StatementId, f: &F) -> Option<ExpressionId> {
 
    let stmt = &heap[start];
 

	
 
    match stmt {
 
        Statement::Block(stmt) => {
 
            for stmt_id in &stmt.statements {
 
                if let Some(id) = seek_expr_in_stmt(heap, *stmt_id, f) {
 
                    return Some(id)
 
                }
 
            }
 
            None
 
        },
 
        Statement::Labeled(stmt) => {
 
            seek_expr_in_stmt(heap, stmt.body, f)
 
        },
 
        Statement::If(stmt) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, stmt.test, f))
 
            .or_else(|| seek_expr_in_stmt(heap, stmt.true_body, f))
 
            .or_else(|| seek_expr_in_stmt(heap, stmt.false_body, f))
 
            .or_else(|| seek_expr_in_stmt(heap, stmt.true_body.upcast(), f))
 
            .or_else(|| if let Some(false_body) = stmt.false_body {
 
                seek_expr_in_stmt(heap, false_body.upcast(), f)
 
            } else {
 
                None
 
            })
 
        },
 
        Statement::While(stmt) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, stmt.test, f))
 
            .or_else(|| seek_expr_in_stmt(heap, stmt.body, f))
 
            .or_else(|| seek_expr_in_stmt(heap, stmt.body.upcast(), f))
 
        },
 
        Statement::Synchronous(stmt) => {
 
            seek_expr_in_stmt(heap, stmt.body, f)
 
            seek_expr_in_stmt(heap, stmt.body.upcast(), f)
 
        },
 
        Statement::Return(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression, f)
 
        },
 
        Statement::Assert(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression, f)
 
        },
 
        Statement::New(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression.upcast(), f)
 
        },
 
        Statement::Expression(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression, f)
 
        },
 
        _ => None
 
    }
 
}
 
\ No newline at end of file
0 comments (0 inline, 0 general)