Changeset - 87aa65714efe
[Not reviewed]
0 17 0
MH - 4 years ago 2021-05-04 11:13:24
contact@maxhenger.nl
back to a compiling project
11 files changed:
0 comments (0 inline, 0 general)
src/collections/scoped_buffer.rs
Show inline comments
 
use std::iter::FromIterator;
 

	
 
/// scoped_buffer.rs
 
///
 
/// Solves the common pattern where we are performing some kind of recursive
 
/// pattern while using a temporary buffer. At the start, or during the
 
/// procedure, we push stuff into the buffer. At the end we take out what we
 
/// have put in.
 
///
 
/// It is unsafe because we're using pointers to circumvent borrowing rules in
 
/// the name of code cleanliness. The correctness of use is checked in debug
 
/// mode.
 

	
 
/// The buffer itself. This struct should be the shared buffer. The type `T` is
 
/// intentionally `Copy` such that it can be copied out and the underlying
 
/// container can be truncated.
 
pub(crate) struct ScopedBuffer<T: Sized + Copy> {
 
pub(crate) struct ScopedBuffer<T: Sized> {
 
    pub inner: Vec<T>,
 
}
 

	
 
/// A section of the buffer. Keeps track of where we started the section. When
 
/// done with the section one must call `into_vec` or `forget` to remove the
 
/// section from the underlying buffer.
 
pub(crate) struct ScopedSection<T: Sized + Copy> {
 
pub(crate) struct ScopedSection<T: Sized> {
 
    inner: *mut Vec<T>,
 
    start_size: u32,
 
    #[cfg(debug_assertions)] cur_size: u32,
 
}
 

	
 
impl<T: Sized + Copy> ScopedBuffer<T> {
 
impl<T: Sized> ScopedBuffer<T> {
 
    pub(crate) fn new_reserved(capacity: usize) -> Self {
 
        Self{ inner: Vec::with_capacity(capacity) }
 
        Self { inner: Vec::with_capacity(capacity) }
 
    }
 

	
 
    pub(crate) fn start_section(&mut self) -> ScopedSection<T> {
 
        let start_size = self.inner.len() as u32;
 
        ScopedSection{
 
        ScopedSection {
 
            inner: &mut self.inner,
 
            start_size,
 
            cur_size: start_size
 
        }
 
    }
 
}
 

	
 
impl<T: Clone> ScopedBuffer<T> {
 
    pub(crate) fn start_section_initialized(&mut self, initialize_with: &[T]) -> ScopedSection<T> {
 
        let start_size = self.inner.len() as u32;
 
        let data_size = initialize_with.len() as u32;
 
        self.inner.extend_from_slice(initialize_with);
 
        ScopedSection{
 
            inner: &mut self.inner,
 
            start_size,
 
            cur_size: start_size + data_size,
 
        }
 
    }
 
}
 

	
 
#[cfg(debug_assertions)]
 
impl<T: Sized + Copy> Drop for ScopedBuffer<T> {
 
impl<T: Sized> Drop for ScopedBuffer<T> {
 
    fn drop(&mut self) {
 
        // Make sure that everyone cleaned up the buffer neatly
 
        debug_assert!(self.inner.is_empty(), "dropped non-empty scoped buffer");
 
    }
 
}
 

	
 
impl<T: Sized + Copy> ScopedSection<T> {
 
impl<T: Sized> ScopedSection<T> {
 
    #[inline]
 
    pub(crate) fn push(&mut self, value: T) {
 
        let vec = unsafe{&mut *self.inner};
 
        debug_assert_eq!(vec.len(), self.cur_size as usize, "trying to push onto section, but size is larger than expected");
 
        vec.push(value);
 
        if cfg!(debug_assertions) { self.cur_size += 1; }
 
    }
 

	
 
    pub(crate) fn len(&self) -> usize {
 
        let vec = unsafe{&mut *self.inner};
 
        debug_assert_eq!(vec.len(), self.cur_size as usize, "trying to get section length, but size is larger than expected");
 
        return vec.len() - self.start_size;
 
        return vec.len() - self.start_size as usize;
 
    }
 

	
 
    #[inline]
 
    pub(crate) fn forget(self) {
 
        let vec = unsafe{&mut *self.inner};
 
        debug_assert_eq!(vec.len(), self.cur_size as usize, "trying to forget section, but size is larger than expected");
 
        vec.truncate(self.start_size as usize);
 
    }
 

	
 
    #[inline]
 
    pub(crate) fn into_vec(self) -> Vec<T> {
 
        let vec = unsafe{&mut *self.inner};
 
        debug_assert_eq!(vec.len(), self.cur_size as usize, "trying to turn section into vec, but size is larger than expected");
 
        let section = Vec::from(&vec[self.start_size as usize..]);
 
        vec.truncate(self.start_size as usize);
 
        let section = Vec::from_iter(vec.drain(self.start_size as usize..));
 
        section
 
    }
 
}
 

	
 
impl<T: Sized + Copy> std::ops::Index<usize> for ScopedSection<T> {
 
impl<T: Sized> std::ops::Index<usize> for ScopedSection<T> {
 
    type Output = T;
 

	
 
    fn index(&self, idx: usize) -> &Self::Output {
 
        let vec = unsafe{&*self.inner};
 
        return vec[self.start_size as usize + idx]
 
        return &vec[self.start_size as usize + idx]
 
    }
 
}
 

	
 
#[cfg(debug_assertions)]
 
impl<T: Sized + Copy> Drop for ScopedSection<T> {
 
impl<T: Sized> Drop for ScopedSection<T> {
 
    fn drop(&mut self) {
 
        // Make sure that the data was actually taken out of the scoped section
 
        let vec = unsafe{&*self.inner};
 
        debug_assert_eq!(vec.len(), self.start_size as usize);
 
    }
 
}
 
\ No newline at end of file
src/collections/string_pool.rs
Show inline comments
 
use std::ptr::null_mut;
 
use std::hash::{Hash, Hasher};
 
use std::marker::PhantomData;
 
use std::fmt::{Debug, Display, Result as FmtResult};
 
use crate::common::Formatter;
 

	
 
const SLAB_SIZE: usize = u16::MAX as usize;
 

	
 
#[derive(Clone)]
 
pub struct StringRef<'a> {
 
    data: *const u8,
 
    length: usize,
 
    _phantom: PhantomData<&'a [u8]>,
 
}
 

	
 
// As the StringRef is an immutable thing:
 
unsafe impl Sync for StringRef<'_> {}
 
unsafe impl Send for StringRef<'_> {}
 

	
 
impl<'a> StringRef<'a> {
 
    /// `new` constructs a new StringRef whose data is not owned by the
 
    /// `StringPool`, hence cannot have a `'static` lifetime.
 
    pub(crate) fn new(data: &'a [u8]) -> StringRef<'a> {
 
        // This is an internal (compiler) function: so debug_assert that the
 
        // string is valid ascii. Most commonly the input will come from the
 
        // code's source file, which is checked for ASCII-ness anyway.
 
        debug_assert!(data.is_ascii());
 
        let length = data.len();
 
        let data = data.as_ptr();
 
        StringRef{ data, length, _phantom: PhantomData }
 
    }
 

	
 
    pub fn as_str(&self) -> &'a str {
 
        unsafe {
 
            let slice = std::slice::from_raw_parts::<'a, u8>(self.data, self.length);
 
            std::str::from_utf8_unchecked(slice)
 
        }
 
    }
 

	
 
    pub fn as_bytes(&self) -> &'a [u8] {
 
        unsafe {
 
            std::slice::from_raw_parts::<'a, u8>(self.data, self.length)
 
        }
 
    }
 
}
 

	
 
impl<'a> Debug for StringRef<'a> {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
 
        f.write_str("StringRef{ value: ")?;
 
        f.write_str(self.as_str())?;
 
        f.write_str(" }")
 
    }
 
}
 

	
 
impl<'a> Display for StringRef<'a> {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
 
        f.write_str(self.as_str())
 
    }
 
}
 

	
 
impl PartialEq for StringRef<'_> {
 
    fn eq(&self, other: &StringRef) -> bool {
 
        self.as_str() == other.as_str()
 
    }
 
}
 

	
 
impl Eq for StringRef<'_> {}
 

	
 
impl Hash for StringRef<'_> {
 
    fn hash<H: Hasher>(&self, state: &mut H) {
 
        unsafe{
 
            state.write(self.as_bytes());
 
        }
 
        state.write(self.as_bytes());
 
    }
 
}
 

	
 
struct StringPoolSlab {
 
    prev: *mut StringPoolSlab,
 
    data: Vec<u8>,
 
    remaining: usize,
 
}
 

	
 
impl StringPoolSlab {
 
    fn new(prev: *mut StringPoolSlab) -> Self {
 
        Self{ prev, data: Vec::with_capacity(SLAB_SIZE), remaining: SLAB_SIZE }
 
    }
 
}
 

	
 
/// StringPool is a ever-growing pool of strings. Strings have a maximum size
 
/// equal to the slab size. The slabs are essentially a linked list to maintain
 
/// pointer-stability of the strings themselves.
 
/// All `StringRef` instances are invalidated when the string pool is dropped
 
pub(crate) struct StringPool {
 
    last: *mut StringPoolSlab,
 
}
 

	
 
impl StringPool {
src/protocol/ast.rs
Show inline comments
 
@@ -37,48 +37,49 @@ macro_rules! define_aliased_ast_id {
 
    };
 
    // Variant where we define type, Index(Mut) traits and an allocation function
 
    (
 
        $name:ident, $parent:ty,
 
        index($indexed_type:ty, $indexed_arena:ident),
 
        alloc($fn_name:ident)
 
    ) => {
 
        define_aliased_ast_id!($name, $parent, index($indexed_type, $indexed_arena));
 
        impl Heap {
 
            pub fn $fn_name(&mut self, f: impl FnOnce($name) -> $indexed_type) -> $name {
 
                self.$indexed_arena.alloc_with_id(|id| f(id))
 
            }
 
        }
 
    };
 
}
 

	
 
/// Helper macro that defines a wrapper type for a particular variant of an AST
 
/// element ID. Only used to define single-wrapping IDs.
 
macro_rules! define_new_ast_id {
 
    // Variant where we just defined the new type, without any indexing
 
    ($name:ident, $parent:ty) => {
 
        #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 
        pub struct $name (pub(crate) $parent);
 

	
 
        #[allow(dead_code)]
 
        impl $name {
 
            pub(crate) fn new_invalid() -> Self     { Self(<$parent>::new_invalid()) }
 
            pub(crate) fn is_invalid(&self) -> bool { self.0.is_invalid() }
 
            pub fn upcast(self) -> $parent          { self.0 }
 
        }
 
    };
 
    // Variant where we define the type, and the Index and IndexMut traits
 
    (
 
        $name:ident, $parent:ty, 
 
        index($indexed_type:ty, $wrapper_type:path, $indexed_arena:ident)
 
    ) => {
 
        define_new_ast_id!($name, $parent);
 
        impl Index<$name> for Heap {
 
            type Output = $indexed_type;
 
            fn index(&self, index: $name) -> &Self::Output {
 
                if let $wrapper_type(v) = &self.$indexed_arena[index.0] {
 
                    v
 
                } else {
 
                    unreachable!()
 
                }
 
            }
 
        }
 

	
 
        impl IndexMut<$name> for Heap {
 
@@ -335,97 +336,159 @@ pub struct ImportSymbols {
 
    pub span: InputSpan,
 
    pub module: Identifier,
 
    pub module_id: RootId,
 
    pub symbols: Vec<AliasedSymbol>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct Identifier {
 
    pub span: InputSpan,
 
    pub value: StringRef<'static>,
 
}
 

	
 
impl PartialEq for Identifier {
 
    fn eq(&self, other: &Self) -> bool {
 
        return self.value == other.value
 
    }
 
}
 

	
 
impl Display for Identifier {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.value.as_str())
 
    }
 
}
 

	
 
#[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)]
 
#[derive(Debug, Clone, PartialEq, Eq)]
 
pub enum ParserTypeVariant {
 
    // Basic builtin
 
    Message,
 
    Bool,
 
    UInt8, UInt16, UInt32, UInt64,
 
    SInt8, SInt16, SInt32, SInt64,
 
    Character, String,
 
    // Literals (need to get concrete builtin type during typechecking)
 
    IntegerLiteral,
 
    // Marker for inference
 
    Inferred,
 
    // Builtins expecting one subsequent type
 
    Array,
 
    Input,
 
    Output,
 
    // User-defined types
 
    PolymorphicArgument(DefinitionId, usize), // usize = index into polymorphic variables
 
    Definition(DefinitionId, usize), // usize = number of following subtypes
 
}
 

	
 
impl ParserTypeVariant {
 
    pub(crate) fn num_embedded(&self) -> usize {
 
        use ParserTypeVariant::*;
 

	
 
        match self {
 
            x if *x <= ParserTypeVariant::Inferred => 0,
 
            x if *x <= ParserTypeVariant::Output => 1,
 
            ParserTypeVariant::PolymorphicArgument(_, _) => 0,
 
            ParserTypeVariant::Definition(_, num) => num,
 
            _ => { debug_assert!(false); 0 },
 
            Message | Bool |
 
            UInt8 | UInt16 | UInt32 | UInt64 |
 
            SInt8 | SInt16 | SInt32 | SInt64 |
 
            Character | String | IntegerLiteral |
 
            Inferred | PolymorphicArgument(_, _) =>
 
                0,
 
            Array | Input | Output =>
 
                1,
 
            Definition(_, num) => *num,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ParserTypeElement {
 
    // TODO: @cleanup, do we ever need the span of a user-defined type after
 
    //  constructing it?
 
    pub full_span: InputSpan, // full span of type, including any polymorphic arguments
 
    pub variant: ParserTypeVariant,
 
}
 

	
 
/// ParserType is a specification of a type during the parsing phase and initial
 
/// linker/validator phase of the compilation process. These types may be
 
/// (partially) inferred or represent literals (e.g. a integer whose bytesize is
 
/// not yet determined).
 
#[derive(Debug, Clone)]
 
pub struct ParserType {
 
    pub elements: Vec<ParserTypeElement>
 
}
 

	
 
impl ParserType {
 
    pub(crate) fn iter_embedded(&self, parent_idx: usize) -> ParserTypeIter {
 
        ParserTypeIter::new(&self.elements, parent_idx)
 
    }
 
}
 

	
 
/// Iterator over the embedded elements of a specific element.
 
pub struct ParserTypeIter<'a> {
 
    pub elements: &'a [ParserTypeElement],
 
    pub cur_embedded_idx: usize,
 
}
 

	
 
impl<'a> ParserTypeIter<'a> {
 
    fn new(elements: &'a [ParserTypeElement], parent_idx: usize) -> Self {
 
        debug_assert!(parent_idx < elements.len(), "parent index exceeds number of elements in ParserType");
 
        if elements[0].variant.num_embedded() == 0 {
 
            // Parent element does not have any embedded types, place
 
            // `cur_embedded_idx` at end so we will always return `None`
 
            Self{ elements, cur_embedded_idx: elements.len() }
 
        } else {
 
            // Parent element has an embedded type
 
            Self{ elements, cur_embedded_idx: parent_idx + 1 }
 
        }
 
    }
 
}
 

	
 
impl<'a> Iterator for ParserTypeIter<'a> {
 
    type Item = &'a [ParserTypeElement];
 

	
 
    fn next(&mut self) -> Option<Self::Item> {
 
        let elements_len = self.elements.len();
 
        if self.cur_embedded_idx >= elements_len {
 
            return None;
 
        }
 

	
 
        // Seek to the end of the subtree
 
        let mut depth = 1;
 
        let start_element = self.cur_embedded_idx;
 
        while self.cur_embedded_idx < elements_len {
 
            let cur_element = &self.elements[self.cur_embedded_idx];
 
            let depth_change = cur_element.variant.num_embedded() as i32 - 1;
 
            depth += depth_change;
 
            debug_assert!(depth >= 0, "illegally constructed ParserType: {:?}", self.elements);
 

	
 
            self.cur_embedded_idx += 1;
 
            if depth == 0 {
 
                break;
 
            }
 
        }
 

	
 
        debug_assert!(depth == 0, "illegally constructed ParserType: {:?}", self.elements);
 
        return Some(&self.elements[start_element..self.cur_embedded_idx]);
 
    }
 
}
 

	
 
/// Specifies whether the symbolic type points to an actual user-defined type,
 
/// or whether it points to a polymorphic argument within the definition (e.g.
 
/// a defined variable `T var` within a function `int func<T>()`
 
#[derive(Debug, Clone)]
 
pub enum SymbolicParserTypeVariant {
 
    Definition(DefinitionId),
 
    // TODO: figure out if I need the DefinitionId here
 
    PolyArg(DefinitionId, usize), // index of polyarg in the definition
 
}
 

	
 
/// ConcreteType is the representation of a type after resolving symbolic types
 
/// and performing type inference
 
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
 
pub enum ConcreteTypePart {
 
    // Markers for the use of polymorphic types within a procedure's body that
 
    // refer to polymorphic variables on the procedure's definition. Different
 
    // from markers in the `InferenceType`, these will not contain nested types.
 
    Marker(usize),
 
    // Special types (cannot be explicitly constructed by the programmer)
 
    Void,
 
    // Builtin types without nested types
 
    Message,
 
    Bool,
 
    UInt8, UInt16, UInt32, UInt64,
 
@@ -755,61 +818,59 @@ impl Definition {
 
        match self {
 
            Definition::Function(result) => result,
 
            _ => panic!("Unable to cast `Definition` to `Function`"),
 
        }
 
    }
 
    pub fn defined_in(&self) -> RootId {
 
        match self {
 
            Definition::Struct(def) => def.defined_in,
 
            Definition::Enum(def) => def.defined_in,
 
            Definition::Union(def) => def.defined_in,
 
            Definition::Component(def) => def.defined_in,
 
            Definition::Function(def) => def.defined_in,
 
        }
 
    }
 
    pub fn identifier(&self) -> &Identifier {
 
        match self {
 
            Definition::Struct(def) => &def.identifier,
 
            Definition::Enum(def) => &def.identifier,
 
            Definition::Union(def) => &def.identifier,
 
            Definition::Component(def) => &def.identifier,
 
            Definition::Function(def) => &def.identifier,
 
        }
 
    }
 
    pub fn parameters(&self) -> &Vec<ParameterId> {
 
        // TODO: Fix this
 
        static EMPTY_VEC: Vec<ParameterId> = Vec::new();
 
        match self {
 
            Definition::Component(com) => &com.parameters,
 
            Definition::Function(fun) => &fun.parameters,
 
            _ => &EMPTY_VEC,
 
            _ => panic!("cannot retrieve parameters for {:?}", self),
 
        }
 
    }
 
    pub fn body(&self) -> BlockStatementId {
 
        match self {
 
            Definition::Component(com) => com.body,
 
            Definition::Function(fun) => fun.body,
 
            _ => panic!("cannot retrieve body (for EnumDefinition/UnionDefinition or StructDefinition)")
 
            _ => panic!("cannot retrieve body for {:?}", self),
 
        }
 
    }
 
    pub fn poly_vars(&self) -> &Vec<Identifier> {
 
        match self {
 
            Definition::Struct(def) => &def.poly_vars,
 
            Definition::Enum(def) => &def.poly_vars,
 
            Definition::Union(def) => &def.poly_vars,
 
            Definition::Component(def) => &def.poly_vars,
 
            Definition::Function(def) => &def.poly_vars,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct StructFieldDefinition {
 
    pub span: InputSpan,
 
    pub field: Identifier,
 
    pub parser_type: ParserType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct StructDefinition {
 
    pub this: StructDefinitionId,
 
    pub defined_in: RootId,
 
@@ -1188,49 +1249,49 @@ pub struct BlockStatement {
 
    pub span: InputSpan, // of the complete block
 
    pub statements: Vec<StatementId>,
 
    // Phase 2: linker
 
    pub parent_scope: Option<Scope>,
 
    pub relative_pos_in_parent: u32,
 
    pub locals: Vec<LocalId>,
 
    pub labels: Vec<LabeledStatementId>,
 
}
 

	
 
impl BlockStatement {
 
    pub fn parent_block(&self, h: &Heap) -> Option<BlockStatementId> {
 
        let parent = self.parent_scope.unwrap();
 
        match parent {
 
            Scope::Definition(_) => {
 
                // If the parent scope is a definition, then there is no
 
                // parent block.
 
                None
 
            }
 
            Scope::Synchronous((parent, _)) => {
 
                // It is always the case that when this function is called,
 
                // the parent of a synchronous statement is a block statement:
 
                // nested synchronous statements are flagged illegal,
 
                // and that happens before resolving variables that
 
                // creates the parent_scope references in the first place.
 
                Some(h[parent].parent_scope(h).unwrap().to_block())
 
                Some(h[parent].parent_scope.unwrap().to_block())
 
            }
 
            Scope::Regular(parent) => {
 
                // A variable scope is either a definition, sync, or block.
 
                Some(parent)
 
            }
 
        }
 
    }
 
    pub fn first(&self) -> StatementId {
 
        // It is an invariant (guaranteed by the lexer) that block statements have at least one stmt
 
        *self.statements.first().unwrap()
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum LocalStatement {
 
    Memory(MemoryStatement),
 
    Channel(ChannelStatement),
 
}
 

	
 
impl LocalStatement {
 
    pub fn this(&self) -> LocalStatementId {
 
        match self {
 
            LocalStatement::Memory(stmt) => stmt.this.upcast(),
 
            LocalStatement::Channel(stmt) => stmt.this.upcast(),
 
@@ -1407,48 +1468,57 @@ pub struct NewStatement {
 
    pub next: Option<StatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ExpressionStatement {
 
    pub this: ExpressionStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub expression: ExpressionId,
 
    // Phase 2: linker
 
    pub next: Option<StatementId>,
 
}
 

	
 
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
 
pub enum ExpressionParent {
 
    None, // only set during initial parsing
 
    If(IfStatementId),
 
    While(WhileStatementId),
 
    Return(ReturnStatementId),
 
    New(NewStatementId),
 
    ExpressionStmt(ExpressionStatementId),
 
    Expression(ExpressionId, u32) // index within expression (e.g LHS or RHS of expression)
 
}
 

	
 
impl ExpressionParent {
 
    pub fn is_new(&self) -> bool {
 
        match self {
 
            ExpressionParent::New(_) => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Expression {
 
    Assignment(AssignmentExpression),
 
    Binding(BindingExpression),
 
    Conditional(ConditionalExpression),
 
    Binary(BinaryExpression),
 
    Unary(UnaryExpression),
 
    Indexing(IndexingExpression),
 
    Slicing(SlicingExpression),
 
    Select(SelectExpression),
 
    Literal(LiteralExpression),
 
    Call(CallExpression),
 
    Variable(VariableExpression),
 
}
 

	
 
impl Expression {
 
    pub fn as_assignment(&self) -> &AssignmentExpression {
 
        match self {
 
            Expression::Assignment(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `AssignmentExpression`"),
 
        }
 
    }
 
    pub fn as_conditional(&self) -> &ConditionalExpression {
 
        match self {
 
@@ -1741,56 +1811,56 @@ pub struct SlicingExpression {
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SelectExpression {
 
    pub this: SelectExpressionId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the '.'
 
    pub subject: ExpressionId,
 
    pub field: Field,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct CallExpression {
 
    pub this: CallExpressionId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub parser_type: ParserType, // of the function call
 
    pub parser_type: ParserType, // of the function call, not the return type
 
    pub method: Method,
 
    pub arguments: Vec<ExpressionId>,
 
    pub definition: DefinitionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
    pub concrete_type: ConcreteType, // of the return type
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq)]
 
pub enum Method {
 
    // Builtin
 
    Get,
 
    Put,
 
    Fires,
 
    Create,
 
    Length,
 
    Assert,
 
    UserFunction,
 
    UserComponent,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct MethodSymbolic {
 
    pub(crate) parser_type: ParserType,
 
    pub(crate) definition: DefinitionId
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralExpression {
 
    pub this: LiteralExpressionId,
src/protocol/ast_printer.rs
Show inline comments
 
#![allow(dead_code)]
 

	
 
use std::fmt::{Debug, Display, Write};
 
use std::io::Write as IOWrite;
 

	
 
use super::ast::*;
 
use super::token_parsing::*;
 

	
 
const INDENT: usize = 2;
 

	
 
const PREFIX_EMPTY: &'static str = "    ";
 
const PREFIX_ROOT_ID: &'static str = "Root";
 
const PREFIX_PRAGMA_ID: &'static str = "Prag";
 
const PREFIX_IMPORT_ID: &'static str = "Imp ";
 
const PREFIX_TYPE_ANNOT_ID: &'static str = "TyAn";
 
const PREFIX_VARIABLE_ID: &'static str = "Var ";
 
const PREFIX_PARAMETER_ID: &'static str = "Par ";
 
const PREFIX_LOCAL_ID: &'static str = "Loc ";
 
const PREFIX_DEFINITION_ID: &'static str = "Def ";
 
const PREFIX_STRUCT_ID: &'static str = "DefS";
 
const PREFIX_ENUM_ID: &'static str = "DefE";
 
const PREFIX_UNION_ID: &'static str = "DefU";
 
const PREFIX_COMPONENT_ID: &'static str = "DefC";
 
const PREFIX_FUNCTION_ID: &'static str = "DefF";
 
const PREFIX_STMT_ID: &'static str = "Stmt";
 
const PREFIX_BLOCK_STMT_ID: &'static str = "SBl ";
 
@@ -219,69 +221,66 @@ impl ASTWriter {
 
            Pragma::Version(pragma) => {
 
                self.kv(indent).with_id(PREFIX_PRAGMA_ID, pragma.this.index)
 
                    .with_s_key("PragmaVersion")
 
                    .with_disp_val(&pragma.version);
 
            },
 
            Pragma::Module(pragma) => {
 
                self.kv(indent).with_id(PREFIX_PRAGMA_ID, pragma.this.index)
 
                    .with_s_key("PragmaModule")
 
                    .with_identifier_val(&pragma.value);
 
            }
 
        }
 
    }
 

	
 
    fn write_import(&mut self, heap: &Heap, import_id: ImportId, indent: usize) {
 
        let import = &heap[import_id];
 
        let indent2 = indent + 1;
 

	
 
        match import {
 
            Import::Module(import) => {
 
                self.kv(indent).with_id(PREFIX_IMPORT_ID, import.this.index)
 
                    .with_s_key("ImportModule");
 

	
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&import.module);
 
                self.kv(indent2).with_s_key("Alias").with_identifier_val(&import.alias);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(import.module_id.as_ref().map(|v| &v.index));
 
                self.kv(indent2).with_s_key("Target").with_disp_val(&import.module_id.index);
 
            },
 
            Import::Symbols(import) => {
 
                self.kv(indent).with_id(PREFIX_IMPORT_ID, import.this.index)
 
                    .with_s_key("ImportSymbol");
 

	
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&import.module);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(import.module_id.as_ref().map(|v| &v.index));
 
                self.kv(indent2).with_s_key("Target").with_disp_val(&import.module_id.index);
 

	
 
                self.kv(indent2).with_s_key("Symbols");
 

	
 
                let indent3 = indent2 + 1;
 
                let indent4 = indent3 + 1;
 
                for symbol in &import.symbols {
 
                    self.kv(indent3).with_s_key("AliasedSymbol");
 
                    self.kv(indent4).with_s_key("Name").with_identifier_val(&symbol.name);
 
                    self.kv(indent4).with_s_key("Alias").with_opt_identifier_val(symbol.alias.as_ref());
 
                    self.kv(indent4).with_s_key("Definition")
 
                        .with_opt_disp_val(symbol.definition_id.as_ref().map(|v| &v.index));
 
                    self.kv(indent4).with_s_key("Definition").with_disp_val(&symbol.definition_id.index);
 
                }
 
            }
 
        }
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Top-level definition writing
 
    //--------------------------------------------------------------------------
 

	
 
    fn write_definition(&mut self, heap: &Heap, def_id: DefinitionId, indent: usize) {
 
        self.cur_definition = Some(def_id);
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 
        let indent4 = indent3 + 1;
 

	
 
        match &heap[def_id] {
 
            Definition::Struct(def) => {
 
                self.kv(indent).with_id(PREFIX_STRUCT_ID, def.this.0.index)
 
                    .with_s_key("DefinitionStruct");
 

	
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&def.identifier);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar").with_identifier_val(&poly_var_id);
 
                }
 
@@ -505,50 +504,52 @@ impl ASTWriter {
 
                    .with_s_key("Continue");
 
                self.kv(indent2).with_s_key("Label")
 
                    .with_opt_identifier_val(stmt.label.as_ref());
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::Synchronous(stmt) => {
 
                self.kv(indent).with_id(PREFIX_SYNC_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Synchronous");
 
                self.kv(indent2).with_s_key("EndSync")
 
                    .with_opt_disp_val(stmt.end_sync.as_ref().map(|v| &v.0.index));
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, stmt.body.upcast(), indent3);
 
            },
 
            Statement::EndSynchronous(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDSYNC_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndSynchronous");
 
                self.kv(indent2).with_s_key("StartSync").with_disp_val(&stmt.start_sync.0.index);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Return(stmt) => {
 
                self.kv(indent).with_id(PREFIX_RETURN_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Return");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression, indent3);
 
                self.kv(indent2).with_s_key("Expressions");
 
                for expr_id in &stmt.expressions {
 
                    self.write_expr(heap, *expr_id, indent3);
 
                }
 
            },
 
            Statement::Goto(stmt) => {
 
                self.kv(indent).with_id(PREFIX_GOTO_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Goto");
 
                self.kv(indent2).with_s_key("Label").with_identifier_val(&stmt.label);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::New(stmt) => {
 
                self.kv(indent).with_id(PREFIX_NEW_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("New");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression.upcast(), indent3);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Expression(stmt) => {
 
                self.kv(indent).with_id(PREFIX_EXPR_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("ExpressionStatement");
 
                self.write_expr(heap, stmt.expression, indent2);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            }
 
        }
 
@@ -660,87 +661,85 @@ impl ASTWriter {
 
                    Field::Length => {
 
                        self.kv(indent2).with_s_key("Field").with_s_val("length");
 
                    },
 
                    Field::Symbolic(field) => {
 
                        self.kv(indent2).with_s_key("Field").with_identifier_val(&field.identifier);
 
                        self.kv(indent3).with_s_key("Definition").with_opt_disp_val(field.definition.as_ref().map(|v| &v.index));
 
                        self.kv(indent3).with_s_key("Index").with_disp_val(&field.field_idx);
 
                    }
 
                }
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Literal(expr) => {
 
                self.kv(indent).with_id(PREFIX_LITERAL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("LiteralExpr");
 

	
 
                let val = self.kv(indent2).with_s_key("Value");
 
                match &expr.value {
 
                    Literal::Null => { val.with_s_val("null"); },
 
                    Literal::True => { val.with_s_val("true"); },
 
                    Literal::False => { val.with_s_val("false"); },
 
                    Literal::Character(data) => { val.with_disp_val(data); },
 
                    Literal::String(data) => { val.with_disp_val(data.as_str()); },
 
                    Literal::String(data) => {
 
                        // Stupid hack
 
                        let string = String::from(data.as_str());
 
                        val.with_disp_val(&string);
 
                    },
 
                    Literal::Integer(data) => { val.with_debug_val(data); },
 
                    Literal::Struct(data) => {
 
                        val.with_s_val("Struct");
 
                        let indent4 = indent3 + 1;
 

	
 
                        self.kv(indent3).with_s_key("ParserType")
 
                            .with_custom_val(|t| write_parser_type(t, heap, &data.parser_type));
 
                        self.kv(indent3).with_s_key("Definition").with_custom_val(|s| {
 
                            write_option(s, data.definition.as_ref().map(|v| &v.index));
 
                        });
 
                        self.kv(indent3).with_s_key("Definition").with_disp_val(&data.definition.index);
 

	
 
                        for field in &data.fields {
 
                            self.kv(indent3).with_s_key("Field");
 
                            self.kv(indent4).with_s_key("Name").with_identifier_val(&field.identifier);
 
                            self.kv(indent4).with_s_key("Index").with_disp_val(&field.field_idx);
 
                            self.kv(indent4).with_s_key("ParserType");
 
                            self.write_expr(heap, field.value, indent4 + 1);
 
                        }
 
                    },
 
                    Literal::Enum(data) => {
 
                        val.with_s_val("Enum");
 

	
 
                        self.kv(indent3).with_s_key("ParserType")
 
                            .with_custom_val(|t| write_parser_type(t, heap, &data.parser_type));
 
                        self.kv(indent3).with_s_key("Definition").with_custom_val(|s| {
 
                            write_option(s, data.definition.as_ref().map(|v| &v.index))
 
                        });
 
                        self.kv(indent3).with_s_key("Definition").with_disp_val(&data.definition.index);
 
                        self.kv(indent3).with_s_key("VariantIdx").with_disp_val(&data.variant_idx);
 
                    },
 
                    Literal::Union(data) => {
 
                        val.with_s_val("Union");
 
                        let indent4 = indent3 + 1;
 

	
 
                        self.kv(indent3).with_s_key("ParserType")
 
                            .with_custom_val(|t| write_parser_type(t, heap, &data.parser_type));
 
                        self.kv(indent3).with_s_key("Definition").with_custom_val(|s| {
 
                            write_option(s, data.definition.as_ref().map(|v| &v.index));
 
                        });
 
                        self.kv(indent3).with_s_key("Definition").with_disp_val(&data.definition.index);
 
                        self.kv(indent3).with_s_key("VariantIdx").with_disp_val(&data.variant_idx);
 

	
 
                        for value in &data.values {
 
                            self.kv(indent3).with_s_key("Value");
 
                            self.write_expr(heap, *value, indent4);
 
                        }
 
                    }
 
                    Literal::Array(data) => {
 
                        val.with_s_val("Array");
 
                        let indent4 = indent3 + 1;
 

	
 
                        self.kv(indent3).with_s_key("Elements");
 
                        for expr_id in data {
 
                            self.write_expr(heap, *expr_id, indent4);
 
                        }
 
                    }
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Call(expr) => {
 
@@ -845,127 +844,132 @@ fn write_parser_type(target: &mut String, heap: &Heap, t: &ParserType) {
 
            PTV::SInt64 => { push_bytes(target, KW_TYPE_SINT64); },
 
            PTV::Character => { push_bytes(target, KW_TYPE_CHAR); },
 
            PTV::String => { push_bytes(target, KW_TYPE_STRING); },
 
            PTV::IntegerLiteral => { target.push_str("int_literal"); },
 
            PTV::Inferred => { push_bytes(target, KW_TYPE_INFERRED); },
 
            PTV::Array => {
 
                element_idx = write_element(target, heap, t, element_idx + 1);
 
                target.push_str("[]");
 
            },
 
            PTV::Input => {
 
                push_bytes(target, KW_TYPE_IN_PORT);
 
                target.push('<');
 
                element_idx = write_element(target, heap, t, element_idx + 1);
 
                target.push('>');
 
            },
 
            PTV::Output => {
 
                push_bytes(target, KW_TYPE_OUT_PORT);
 
                target.push('<');
 
                element_idx = write_element(target, heap, t, element_idx + 1);
 
                target.push('>');
 
            },
 
            PTV::PolymorphicArgument(definition_id, arg_idx) => {
 
                let definition = &heap[*definition_id];
 
                let poly_var = &definition.poly_vars()[*arg_idx].value;
 
                target.write_str(poly_var.as_str());
 
                target.push_str(poly_var.as_str());
 
            },
 
            PTV::Definition(definition_id, num_embedded) => {
 
                let definition = &heap[*definition_id];
 
                let definition_ident = definition.identifier().value.as_str();
 
                target.write_str(definition_ident);
 
                target.push_str(definition_ident);
 

	
 
                let num_embedded = *num_embedded;
 
                if num_embedded != 0 {
 
                    target.push('<');
 
                    for embedded_idx in 0..num_embedded {
 
                        if embedded_idx != 0 {
 
                            target.push(',');
 
                        }
 
                        element_idx = write_element(target, heap, t, element_idx + 1);
 
                    }
 
                    target.push('>');
 
                }
 
            }
 
        }
 

	
 
        element_idx
 
    }
 

	
 
    write_element(target, heap, t, 0);
 
}
 

	
 
// TODO: @Cleanup, this is littered at three places in the codebase
 
fn write_concrete_type(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType) {
 
    use ConcreteTypePart as CTP;
 

	
 
    fn write_concrete_part(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType, mut idx: usize) -> usize {
 
        if idx >= t.parts.len() {
 
            return idx;
 
        }
 

	
 
        match &t.parts[idx] {
 
            CTP::Marker(marker) => {
 
                // Marker points to polymorphic variable index
 
                let definition = &heap[def_id];
 
                let poly_var_ident = &definition.poly_vars()[*marker];
 
                target.push_str(poly_var_ident.value.as_str());
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
            },
 
            CTP::Void => target.push_str("void"),
 
            CTP::Message => target.push_str("msg"),
 
            CTP::Bool => target.push_str("bool"),
 
            CTP::Byte => target.push_str("byte"),
 
            CTP::Short => target.push_str("short"),
 
            CTP::Int => target.push_str("int"),
 
            CTP::Long => target.push_str("long"),
 
            CTP::String => target.push_str("string"),
 
            CTP::UInt8 => target.push_str(KW_TYPE_UINT8_STR),
 
            CTP::UInt16 => target.push_str(KW_TYPE_UINT16_STR),
 
            CTP::UInt32 => target.push_str(KW_TYPE_UINT32_STR),
 
            CTP::UInt64 => target.push_str(KW_TYPE_UINT64_STR),
 
            CTP::SInt8 => target.push_str(KW_TYPE_SINT8_STR),
 
            CTP::SInt16 => target.push_str(KW_TYPE_SINT16_STR),
 
            CTP::SInt32 => target.push_str(KW_TYPE_SINT32_STR),
 
            CTP::SInt64 => target.push_str(KW_TYPE_SINT64_STR),
 
            CTP::Character => target.push_str(KW_TYPE_CHAR_STR),
 
            CTP::String => target.push_str(KW_TYPE_STRING_STR),
 
            CTP::Array => {
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push_str("[]");
 
            },
 
            CTP::Slice => {
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push_str("[..]");
 
            }
 
            CTP::Input => {
 
                target.push_str("in<");
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push('>');
 
            },
 
            CTP::Output => {
 
                target.push_str("out<");
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push('>')
 
            },
 
            CTP::Instance(definition_id, num_embedded) => {
 
                let identifier = heap[*definition_id].identifier();
 
                target.push_str(identifier.value.as_str());
 
                target.push('<');
 
                for idx_embedded in 0..*num_embedded {
 
                    if idx_embedded != 0 {
 
                        target.push_str(", ");
 
                    }
 
                    idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                }
 
                target.push('>');
 
            }
 
        }
 

	
 
        idx + 1
 
    }
 

	
 
    write_concrete_part(target, heap, def_id, t, 0);
 
}
 

	
 
fn write_expression_parent(target: &mut String, parent: &ExpressionParent) {
 
    use ExpressionParent as EP;
 

	
 
    *target = match parent {
 
        EP::None => String::from("None"),
 
        EP::If(id) => format!("IfStmt({})", id.0.index),
 
        EP::While(id) => format!("WhileStmt({})", id.0.index),
 
        EP::Return(id) => format!("ReturnStmt({})", id.0.index),
 
        EP::Assert(id) => format!("AssertStmt({})", id.0.index),
 
        EP::New(id) => format!("NewStmt({})", id.0.index),
 
        EP::ExpressionStmt(id) => format!("ExprStmt({})", id.0.index),
 
        EP::Expression(id, idx) => format!("Expr({}, {})", id.index, idx)
 
    };
 
}
 
\ No newline at end of file
src/protocol/eval.rs
Show inline comments
 
@@ -3,52 +3,52 @@ use std::fmt;
 
use std::fmt::{Debug, Display, Formatter};
 
use std::{i16, i32, i64, i8};
 

	
 
use crate::common::*;
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::EvalContext;
 

	
 
// const MAX_RECURSION: usize = 1024;
 

	
 
const BYTE_MIN: i64 = i8::MIN as i64;
 
const BYTE_MAX: i64 = i8::MAX as i64;
 
const SHORT_MIN: i64 = i16::MIN as i64;
 
const SHORT_MAX: i64 = i16::MAX as i64;
 
const INT_MIN: i64 = i32::MIN as i64;
 
const INT_MAX: i64 = i32::MAX as i64;
 

	
 
const MESSAGE_MAX_LENGTH: i64 = SHORT_MAX;
 

	
 
const ONE: Value = Value::Byte(ByteValue(1));
 

	
 
// TODO: All goes one day anyway, so dirty typechecking hack
 
trait ValueImpl {
 
    fn exact_type(&self) -> Type;
 
    fn is_type_compatible(&self, h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible(&self, h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        Self::is_type_compatible_hack(h, t)
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool;
 
    fn is_type_compatible_hack(h: &Heap, t: &[ParserTypeElement]) -> bool;
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Value {
 
    Unassigned,
 
    Input(InputValue),
 
    Output(OutputValue),
 
    Message(MessageValue),
 
    Boolean(BooleanValue),
 
    Byte(ByteValue),
 
    Short(ShortValue),
 
    Int(IntValue),
 
    Long(LongValue),
 
    InputArray(InputArrayValue),
 
    OutputArray(OutputArrayValue),
 
    MessageArray(MessageArrayValue),
 
    BooleanArray(BooleanArrayValue),
 
    ByteArray(ByteArrayValue),
 
    ShortArray(ShortArrayValue),
 
    IntArray(IntArrayValue),
 
    LongArray(LongArrayValue),
 
}
 
impl Value {
 
    pub fn receive_message(buffer: &Payload) -> Value {
 
@@ -72,49 +72,49 @@ impl Value {
 
        match constant {
 
            Literal::Null => Value::Message(MessageValue(None)),
 
            Literal::True => Value::Boolean(BooleanValue(true)),
 
            Literal::False => Value::Boolean(BooleanValue(false)),
 
            Literal::Integer(val) => {
 
                // Convert raw ASCII data to UTF-8 string
 
                let mut integer_value = val.unsigned_value as i64; // TODO: @Int
 
                if val.negated { integer_value = -integer_value; };
 

	
 
                if integer_value >= BYTE_MIN && integer_value <= BYTE_MAX {
 
                    Value::Byte(ByteValue(integer_value as i8))
 
                } else if integer_value >= SHORT_MIN && integer_value <= SHORT_MAX {
 
                    Value::Short(ShortValue(integer_value as i16))
 
                } else if integer_value >= INT_MIN && integer_value <= INT_MAX {
 
                    Value::Int(IntValue(integer_value as i32))
 
                } else {
 
                    Value::Long(LongValue(integer_value))
 
                }
 
            }
 
            Literal::Character(_data) => unimplemented!(),
 
            Literal::String(_data) => unimplemented!(),
 
            Literal::Struct(_data) => unimplemented!(),
 
            Literal::Enum(_data) => unimplemented!(),
 
            Literal::Union(_data) => unimplemented!(),
 
            Literal::Array(expressions) => unimplemented!(),
 
            Literal::Array(_expressions) => unimplemented!(),
 
        }
 
    }
 
    fn set(&mut self, index: &Value, value: &Value) -> Option<Value> {
 
        // The index must be of integer type, and non-negative
 
        let the_index: usize;
 
        match index {
 
            Value::Byte(_) | Value::Short(_) | Value::Int(_) | Value::Long(_) => {
 
                let index = i64::from(index);
 
                if index < 0 || index >= MESSAGE_MAX_LENGTH {
 
                    // It is inconsistent to update out of bounds
 
                    return None;
 
                }
 
                the_index = index.try_into().unwrap();
 
            }
 
            _ => unreachable!(),
 
        }
 
        // The subject must be either a message or an array
 
        // And the value and the subject must be compatible
 
        match (self, value) {
 
            (Value::Message(MessageValue(None)), _) => {
 
                // It is inconsistent to update the null message
 
                None
 
            }
 
            (Value::Message(MessageValue(Some(payload))), Value::Byte(ByteValue(b))) => {
 
@@ -830,566 +830,566 @@ impl From<&Value> for i64 {
 
}
 

	
 
impl ValueImpl for Value {
 
    fn exact_type(&self) -> Type {
 
        match self {
 
            Value::Unassigned => Type::UNASSIGNED,
 
            Value::Input(val) => val.exact_type(),
 
            Value::Output(val) => val.exact_type(),
 
            Value::Message(val) => val.exact_type(),
 
            Value::Boolean(val) => val.exact_type(),
 
            Value::Byte(val) => val.exact_type(),
 
            Value::Short(val) => val.exact_type(),
 
            Value::Int(val) => val.exact_type(),
 
            Value::Long(val) => val.exact_type(),
 
            Value::InputArray(val) => val.exact_type(),
 
            Value::OutputArray(val) => val.exact_type(),
 
            Value::MessageArray(val) => val.exact_type(),
 
            Value::BooleanArray(val) => val.exact_type(),
 
            Value::ByteArray(val) => val.exact_type(),
 
            Value::ShortArray(val) => val.exact_type(),
 
            Value::IntArray(val) => val.exact_type(),
 
            Value::LongArray(val) => val.exact_type(),
 
        }
 
    }
 
    fn is_type_compatible(&self, h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible(&self, h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        match self {
 
            Value::Unassigned => true,
 
            Value::Input(_) => InputValue::is_type_compatible_hack(h, t),
 
            Value::Output(_) => OutputValue::is_type_compatible_hack(h, t),
 
            Value::Message(_) => MessageValue::is_type_compatible_hack(h, t),
 
            Value::Boolean(_) => BooleanValue::is_type_compatible_hack(h, t),
 
            Value::Byte(_) => ByteValue::is_type_compatible_hack(h, t),
 
            Value::Short(_) => ShortValue::is_type_compatible_hack(h, t),
 
            Value::Int(_) => IntValue::is_type_compatible_hack(h, t),
 
            Value::Long(_) => LongValue::is_type_compatible_hack(h, t),
 
            Value::InputArray(_) => InputArrayValue::is_type_compatible_hack(h, t),
 
            Value::OutputArray(_) => OutputArrayValue::is_type_compatible_hack(h, t),
 
            Value::MessageArray(_) => MessageArrayValue::is_type_compatible_hack(h, t),
 
            Value::BooleanArray(_) => BooleanArrayValue::is_type_compatible_hack(h, t),
 
            Value::ByteArray(_) => ByteArrayValue::is_type_compatible_hack(h, t),
 
            Value::ShortArray(_) => ShortArrayValue::is_type_compatible_hack(h, t),
 
            Value::IntArray(_) => InputArrayValue::is_type_compatible_hack(h, t),
 
            Value::LongArray(_) => LongArrayValue::is_type_compatible_hack(h, t),
 
        }
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, _t: &ParserType) -> bool { false }
 
    fn is_type_compatible_hack(_h: &Heap, _t: &[ParserTypeElement]) -> bool { false }
 
}
 

	
 
impl Display for Value {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        let disp: &dyn Display;
 
        match self {
 
            Value::Unassigned => disp = &Type::UNASSIGNED,
 
            Value::Input(val) => disp = val,
 
            Value::Output(val) => disp = val,
 
            Value::Message(val) => disp = val,
 
            Value::Boolean(val) => disp = val,
 
            Value::Byte(val) => disp = val,
 
            Value::Short(val) => disp = val,
 
            Value::Int(val) => disp = val,
 
            Value::Long(val) => disp = val,
 
            Value::InputArray(val) => disp = val,
 
            Value::OutputArray(val) => disp = val,
 
            Value::MessageArray(val) => disp = val,
 
            Value::BooleanArray(val) => disp = val,
 
            Value::ByteArray(val) => disp = val,
 
            Value::ShortArray(val) => disp = val,
 
            Value::IntArray(val) => disp = val,
 
            Value::LongArray(val) => disp = val,
 
        }
 
        disp.fmt(f)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct InputValue(pub PortId);
 

	
 
impl Display for InputValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "#in")
 
    }
 
}
 

	
 
impl ValueImpl for InputValue {
 
    fn exact_type(&self) -> Type {
 
        Type::INPUT
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(_h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        use ParserTypeVariant::*;
 
        match &t.variant {
 
        match &t[0].variant {
 
            Input | Inferred | Definition(_, _) => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct OutputValue(pub PortId);
 

	
 
impl Display for OutputValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "#out")
 
    }
 
}
 

	
 
impl ValueImpl for OutputValue {
 
    fn exact_type(&self) -> Type {
 
        Type::OUTPUT
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(_h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        use ParserTypeVariant::*;
 
        match &t.elements[0].variant {
 
        match &t[0].variant {
 
            Output | Inferred | Definition(_, _) => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct MessageValue(pub Option<Payload>);
 

	
 
impl Display for MessageValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        match &self.0 {
 
            None => write!(f, "null"),
 
            Some(payload) => {
 
                // format print up to 10 bytes
 
                let mut slice = payload.as_slice();
 
                if slice.len() > 10 {
 
                    slice = &slice[..10];
 
                }
 
                f.debug_list().entries(slice.iter().copied()).finish()
 
            }
 
        }
 
    }
 
}
 

	
 
impl ValueImpl for MessageValue {
 
    fn exact_type(&self) -> Type {
 
        Type::MESSAGE
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(_h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        use ParserTypeVariant::*;
 
        match &t.elements[0].variant {
 
        match &t[0].variant {
 
            Message | Inferred | Definition(_, _) => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BooleanValue(bool);
 

	
 
impl Display for BooleanValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for BooleanValue {
 
    fn exact_type(&self) -> Type {
 
        Type::BOOLEAN
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(_h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        use ParserTypeVariant::*;
 
        match t.elements[0].variant {
 
        match &t[0].variant {
 
            Definition(_, _) | Inferred | Bool |
 
            UInt8 | UInt16 | UInt32 | UInt64 |
 
            SInt8 | SInt16 | SInt32 | SInt64 => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ByteValue(i8);
 

	
 
impl Display for ByteValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for ByteValue {
 
    fn exact_type(&self) -> Type {
 
        Type::BYTE
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(_h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        use ParserTypeVariant::*;
 
        match t.elements[0].variant {
 
        match &t[0].variant {
 
            Definition(_, _) | Inferred |
 
            UInt8 | UInt16 | UInt32 | UInt64 |
 
            SInt8 | SInt16 | SInt32 | SInt64 => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ShortValue(i16);
 

	
 
impl Display for ShortValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for ShortValue {
 
    fn exact_type(&self) -> Type {
 
        Type::SHORT
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(_h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        use ParserTypeVariant::*;
 
        match t.elements[0].variant {
 
        match &t[0].variant {
 
            Definition(_, _) | Inferred |
 
            UInt16 | UInt32 | UInt64 |
 
            SInt16 | SInt32 | SInt64=> true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct IntValue(i32);
 

	
 
impl Display for IntValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for IntValue {
 
    fn exact_type(&self) -> Type {
 
        Type::INT
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(_h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        use ParserTypeVariant::*;
 
        match t.elements[0].variant {
 
        match t[0].variant {
 
            Definition(_, _) | Inferred |
 
            UInt32 | UInt64 |
 
            SInt32 | SInt64 => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LongValue(i64);
 

	
 
impl Display for LongValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for LongValue {
 
    fn exact_type(&self) -> Type {
 
        Type::LONG
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(_h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        use ParserTypeVariant::*;
 
        match &t.elements[0].variant {
 
        match &t[0].variant {
 
            UInt64 | SInt64 | Inferred | Definition(_, _) => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
fn get_array_inner(t: &ParserType) -> Option<ParserTypeVariant> {
 
    if t.elements[0].variant == ParserTypeVariant::Array {
 
        return Some(t.elements[1].variant.clone())
 
fn get_array_inner(t: &[ParserTypeElement]) -> Option<&[ParserTypeElement]> {
 
    if t[0].variant == ParserTypeVariant::Array {
 
        return Some(&t[1..])
 
    } else {
 
        return None;
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct InputArrayValue(Vec<InputValue>);
 

	
 
impl Display for InputArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for InputArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::INPUT_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        get_array_inner(t)
 
            .map(|v| InputValue::is_type_compatible_hack(h, &h[v]))
 
            .map(|v| InputValue::is_type_compatible_hack(h, v))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct OutputArrayValue(Vec<OutputValue>);
 

	
 
impl Display for OutputArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for OutputArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::OUTPUT_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        get_array_inner(t)
 
            .map(|v| OutputValue::is_type_compatible_hack(h, &h[v]))
 
            .map(|v| OutputValue::is_type_compatible_hack(h, v))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct MessageArrayValue(Vec<MessageValue>);
 

	
 
impl Display for MessageArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for MessageArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::MESSAGE_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        get_array_inner(t)
 
            .map(|v| MessageValue::is_type_compatible_hack(h, &h[v]))
 
            .map(|v| MessageValue::is_type_compatible_hack(h, v))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BooleanArrayValue(Vec<BooleanValue>);
 

	
 
impl Display for BooleanArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for BooleanArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::BOOLEAN_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        get_array_inner(t)
 
            .map(|v| BooleanValue::is_type_compatible_hack(h, &h[v]))
 
            .map(|v| BooleanValue::is_type_compatible_hack(h, v))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ByteArrayValue(Vec<ByteValue>);
 

	
 
impl Display for ByteArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for ByteArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::BYTE_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        get_array_inner(t)
 
            .map(|v| ByteValue::is_type_compatible_hack(h, &h[v]))
 
            .map(|v| ByteValue::is_type_compatible_hack(h, v))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ShortArrayValue(Vec<ShortValue>);
 

	
 
impl Display for ShortArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for ShortArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::SHORT_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        get_array_inner(t)
 
            .map(|v| ShortValue::is_type_compatible_hack(h, &h[v]))
 
            .map(|v| ShortValue::is_type_compatible_hack(h, v))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct IntArrayValue(Vec<IntValue>);
 

	
 
impl Display for IntArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for IntArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::INT_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        get_array_inner(t)
 
            .map(|v| IntValue::is_type_compatible_hack(h, &h[v]))
 
            .map(|v| IntValue::is_type_compatible_hack(h, v))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LongArrayValue(Vec<LongValue>);
 

	
 
impl Display for LongArrayValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{{")?;
 
        let mut first = true;
 
        for v in self.0.iter() {
 
            if !first {
 
                write!(f, ",")?;
 
            }
 
            write!(f, "{}", v)?;
 
            first = false;
 
        }
 
        write!(f, "}}")
 
    }
 
}
 

	
 
impl ValueImpl for LongArrayValue {
 
    fn exact_type(&self) -> Type {
 
        Type::LONG_ARRAY
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool {
 
    fn is_type_compatible_hack(h: &Heap, t: &[ParserTypeElement]) -> bool {
 
        get_array_inner(t)
 
            .map(|v| LongValue::is_type_compatible_hack(h, &h[v]))
 
            .map(|v| LongValue::is_type_compatible_hack(h, v))
 
            .unwrap_or(false)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
struct Store {
 
    map: HashMap<VariableId, Value>,
 
}
 
impl Store {
 
    fn new() -> Self {
 
        Store { map: HashMap::new() }
 
    }
 
    fn initialize(&mut self, h: &Heap, var: VariableId, value: Value) {
 
        // Ensure value is compatible with type of variable
 
        let parser_type = match &h[var] {
 
            Variable::Local(v) => &v.parser_type,
 
            Variable::Parameter(v) => &v.parser_type,
 
        };
 
        assert!(value.is_type_compatible(h, parser_type));
 
        assert!(value.is_type_compatible(h, &parser_type.elements));
 
        // Overwrite mapping
 
        self.map.insert(var, value.clone());
 
    }
 
    fn update(
 
        &mut self,
 
        h: &Heap,
 
        ctx: &mut EvalContext,
 
        lexpr: ExpressionId,
 
        value: Value,
 
    ) -> EvalResult {
 
        match &h[lexpr] {
 
            Expression::Variable(var) => {
 
                let var = var.declaration.unwrap();
 
                // Ensure value is compatible with type of variable
 
                let parser_type = match &h[var] {
 
                    Variable::Local(v) => &v.parser_type,
 
                    Variable::Parameter(v) => &v.parser_type
 
                };
 
                assert!(value.is_type_compatible(h, parser_type));
 
                assert!(value.is_type_compatible(h, &parser_type.elements));
 
                // Overwrite mapping
 
                self.map.insert(var, value.clone());
 
                Ok(value)
 
            }
 
            Expression::Indexing(indexing) => {
 
                // Evaluate index expression, which must be some integral type
 
                let index = self.eval(h, ctx, indexing.index)?;
 
                // Mutable reference to the subject
 
                let subject;
 
                match &h[indexing.subject] {
 
                    Expression::Variable(var) => {
 
                        let var = var.declaration.unwrap();
 
                        subject = self.map.get_mut(&var).unwrap();
 
                    }
 
                    _ => unreachable!(),
 
                }
 
                match subject.set(&index, &value) {
 
                    Some(value) => Ok(value),
 
                    None => Err(EvalContinuation::Inconsistent),
 
                }
 
            }
 
            _ => unimplemented!("{:?}", h[lexpr]),
 
        }
 
    }
 
@@ -1437,49 +1437,49 @@ impl Store {
 
            }
 
            _ => unimplemented!("{:?}", h[rexpr]),
 
        }
 
    }
 
    fn eval(&mut self, h: &Heap, ctx: &mut EvalContext, expr: ExpressionId) -> EvalResult {
 
        match &h[expr] {
 
            Expression::Assignment(expr) => {
 
                let value = self.eval(h, ctx, expr.right)?;
 
                match expr.operation {
 
                    AssignmentOperator::Set => {
 
                        self.update(h, ctx, expr.left, value.clone())?;
 
                    }
 
                    AssignmentOperator::Added => {
 
                        let old = self.get(h, ctx, expr.left)?;
 
                        self.update(h, ctx, expr.left, old.plus(&value))?;
 
                    }
 
                    AssignmentOperator::Subtracted => {
 
                        let old = self.get(h, ctx, expr.left)?;
 
                        self.update(h, ctx, expr.left, old.minus(&value))?;
 
                    }
 
                    _ => unimplemented!("{:?}", expr),
 
                }
 
                Ok(value)
 
            }
 
            Expression::Binding(expr) => {
 
            Expression::Binding(_expr) => {
 
                unimplemented!("eval binding expression");
 
            }
 
            Expression::Conditional(expr) => {
 
                let test = self.eval(h, ctx, expr.test)?;
 
                if test.as_boolean().0 {
 
                    self.eval(h, ctx, expr.true_expression)
 
                } else {
 
                    self.eval(h, ctx, expr.false_expression)
 
                }
 
            }
 
            Expression::Binary(expr) => {
 
                let left = self.eval(h, ctx, expr.left)?;
 
                let right;
 
                match expr.operation {
 
                    BinaryOperator::LogicalAnd => {
 
                        if left.as_boolean().0 == false {
 
                            return Ok(left);
 
                        }
 
                        right = self.eval(h, ctx, expr.right)?;
 
                        right.as_boolean(); // panics if not a boolean
 
                        return Ok(right);
 
                    }
 
                    BinaryOperator::LogicalOr => {
 
                        if left.as_boolean().0 == true {
 
@@ -1540,127 +1540,136 @@ impl Store {
 
                }
 
                Method::Put => {
 
                    assert_eq!(2, expr.arguments.len());
 
                    let port_value = self.eval(h, ctx, expr.arguments[0])?;
 
                    let msg_value = self.eval(h, ctx, expr.arguments[1])?;
 
                    if ctx.did_put(port_value.clone()) {
 
                        // Return bogus, replacing this at some point anyway
 
                        Ok(Value::Message(MessageValue(None)))
 
                    } else {
 
                        Err(EvalContinuation::Put(port_value, msg_value))
 
                    }
 
                }
 
                Method::Fires => {
 
                    assert_eq!(1, expr.arguments.len());
 
                    let value = self.eval(h, ctx, expr.arguments[0])?;
 
                    match ctx.fires(value.clone()) {
 
                        None => Err(EvalContinuation::BlockFires(value)),
 
                        Some(result) => Ok(result),
 
                    }
 
                }
 
                Method::Create => {
 
                    assert_eq!(1, expr.arguments.len());
 
                    let length = self.eval(h, ctx, expr.arguments[0])?;
 
                    Ok(Value::create_message(length))
 
                }
 
                Method::Symbolic(_symbol) => unimplemented!(),
 
                },
 
                Method::Length => {
 
                    todo!("implement")
 
                },
 
                Method::Assert => {
 
                    let value = self.eval(h, ctx, expr.arguments[0])?;
 
                    if value.as_boolean().0 {
 
                        // Return bogus
 
                        Ok(Value::Unassigned)
 
                    } else {
 
                        // Failed assertion
 
                        Err(EvalContinuation::Inconsistent)
 
                    }
 
                },
 
                Method::UserFunction => unimplemented!(),
 
                Method::UserComponent => unreachable!(),
 
            },
 
            Expression::Variable(expr) => self.get(h, ctx, expr.this.upcast()),
 
        }
 
    }
 
}
 

	
 
type EvalResult = Result<Value, EvalContinuation>;
 
pub enum EvalContinuation {
 
    Stepping,
 
    Inconsistent,
 
    Terminal,
 
    SyncBlockStart,
 
    SyncBlockEnd,
 
    NewComponent(DefinitionId, Vec<Value>),
 
    BlockFires(Value),
 
    BlockGet(Value),
 
    Put(Value, Value),
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub(crate) struct Prompt {
 
    definition: DefinitionId,
 
    store: Store,
 
    position: Option<StatementId>,
 
}
 

	
 
impl Prompt {
 
    pub fn new(h: &Heap, def: DefinitionId, args: &Vec<Value>) -> Self {
 
        let mut prompt =
 
            Prompt { definition: def, store: Store::new(), position: Some((&h[def]).body().upcast()) };
 
        prompt.set_arguments(h, args);
 
        prompt
 
    }
 
    fn set_arguments(&mut self, h: &Heap, args: &Vec<Value>) {
 
        let def = &h[self.definition];
 
        let params = def.parameters();
 
        assert_eq!(params.len(), args.len());
 
        for (param, value) in params.iter().zip(args.iter()) {
 
            let hparam = &h[*param];
 
            assert!(value.is_type_compatible(h, &hparam.parser_type));
 
            assert!(value.is_type_compatible(h, &hparam.parser_type.elements));
 
            self.store.initialize(h, param.upcast(), value.clone());
 
        }
 
    }
 
    pub fn step(&mut self, h: &Heap, ctx: &mut EvalContext) -> EvalResult {
 
        if self.position.is_none() {
 
            return Err(EvalContinuation::Terminal);
 
        }
 

	
 
        let stmt = &h[self.position.unwrap()];
 
        match stmt {
 
            Statement::Block(stmt) => {
 
                // Continue to first statement
 
                self.position = Some(stmt.first());
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Local(stmt) => {
 
                match stmt {
 
                    LocalStatement::Memory(stmt) => {
 
                        // Update store
 
                        self.store.initialize(h, stmt.variable.upcast(), Value::Unassigned);
 
                    }
 
                    LocalStatement::Channel(stmt) => {
 
                        let [from, to] = ctx.new_channel();
 
                        // Store the values in the declared variables
 
                        self.store.initialize(h, stmt.from.upcast(), from);
 
                        self.store.initialize(h, stmt.to.upcast(), to);
 
                    }
 
                }
 
                // Continue to next statement
 
                self.position = stmt.next();
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Skip(stmt) => {
 
                // Continue to next statement
 
                self.position = stmt.next;
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Labeled(stmt) => {
 
                // Continue to next statement
 
                self.position = Some(stmt.body);
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::If(stmt) => {
 
                // Evaluate test
 
                let value = self.store.eval(h, ctx, stmt.test)?;
 
                // Continue with either branch
 
                if value.as_boolean().0 {
 
                    self.position = Some(stmt.true_body.upcast());
 
                } else if let Some(false_body) = stmt.false_body {
 
                    self.position = Some(false_body.upcast());
 
                } else {
 
                    // No false body
 
                    self.position = Some(stmt.end_if.unwrap().upcast());
 
                }
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::EndIf(stmt) => {
 
                // Continue to next statement
 
                self.position = stmt.next;
 
                Err(EvalContinuation::Stepping)
 
            }
 
@@ -1679,82 +1688,71 @@ impl Prompt {
 
                // Continue to next statement
 
                self.position = stmt.next;
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Synchronous(stmt) => {
 
                // Continue to next statement, and signal upward
 
                self.position = Some(stmt.body.upcast());
 
                Err(EvalContinuation::SyncBlockStart)
 
            }
 
            Statement::EndSynchronous(stmt) => {
 
                // Continue to next statement, and signal upward
 
                self.position = stmt.next;
 
                Err(EvalContinuation::SyncBlockEnd)
 
            }
 
            Statement::Break(stmt) => {
 
                // Continue to end of while
 
                self.position = stmt.target.map(EndWhileStatementId::upcast);
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Continue(stmt) => {
 
                // Continue to beginning of while
 
                self.position = stmt.target.map(WhileStatementId::upcast);
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Assert(stmt) => {
 
                // Evaluate expression
 
                let value = self.store.eval(h, ctx, stmt.expression)?;
 
                if value.as_boolean().0 {
 
                    // Continue to next statement
 
                    self.position = stmt.next;
 
                    Err(EvalContinuation::Stepping)
 
                } else {
 
                    // Assertion failed: inconsistent
 
                    Err(EvalContinuation::Inconsistent)
 
                }
 
            }
 
            Statement::Return(stmt) => {
 
                // Evaluate expression
 
                let value = self.store.eval(h, ctx, stmt.expression)?;
 
                debug_assert_eq!(stmt.expressions.len(), 1);
 
                let value = self.store.eval(h, ctx, stmt.expressions[0])?;
 
                // Done with evaluation
 
                Ok(value)
 
            }
 
            Statement::Goto(stmt) => {
 
                // Continue to target
 
                self.position = stmt.target.map(|x| x.upcast());
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::New(stmt) => {
 
                let expr = &h[stmt.expression];
 
                let mut args = Vec::new();
 
                for &arg in expr.arguments.iter() {
 
                    let value = self.store.eval(h, ctx, arg)?;
 
                    args.push(value);
 
                }
 
                self.position = stmt.next;
 
                match &expr.method {
 
                    Method::Symbolic(symbolic) => {
 
                         Err(EvalContinuation::NewComponent(symbolic.definition.unwrap(), args))
 
                    Method::UserComponent => {
 
                         Err(EvalContinuation::NewComponent(expr.definition, args))
 
                    },
 
                    _ => unreachable!("not a symbolic call expression")
 
                }
 
            }
 
            Statement::Expression(stmt) => {
 
                // Evaluate expression
 
                let _value = self.store.eval(h, ctx, stmt.expression)?;
 
                // Continue to next statement
 
                self.position = stmt.next;
 
                Err(EvalContinuation::Stepping)
 
            }
 
        }
 
    }
 
    // fn compute_function(_h: &Heap, _fun: FunctionId, _args: &Vec<Value>) -> Option<Value> {
 
    // let mut prompt = Self::new(h, fun.upcast(), args);
 
    // let mut context = EvalContext::None;
 
    // loop {
 
    //     let result = prompt.step(h, &mut context);
 
    //     match result {
 
    //         Ok(val) => return Some(val),
 
    //         Err(cont) => match cont {
 
    //             EvalContinuation::Stepping => continue,
 
    //             EvalContinuation::Inconsistent => return None,
 
    //             // Functions never terminate without returning
src/protocol/input_source.rs
Show inline comments
 
use std::fmt;
 
use std::cell::{Ref, RefCell};
 
use std::sync::{RwLock, RwLockReadGuard};
 
use std::fmt::Write;
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub struct InputPosition {
 
    pub line: u32,
 
    pub offset: u32,
 
}
 

	
 
impl InputPosition {
 
    pub(crate) fn with_offset(&self, offset: u32) -> Self {
 
        InputPosition { line: self.line, offset: self.offset + offset }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub struct InputSpan {
 
    pub begin: InputPosition,
 
    pub end: InputPosition,
 
}
 

	
 
impl InputSpan {
 
    #[inline]
 
    pub fn from_positions(begin: InputPosition, end: InputPosition) -> Self {
 
        Self { begin, end }
 
    }
 
}
 

	
 
/// Wrapper around source file with optional filename. Ensures that the file is
 
/// only scanned once.
 
pub struct InputSource {
 
    pub(crate) filename: String,
 
    pub(crate) input: Vec<u8>,
 
    // Iteration
 
    line: u32,
 
    offset: usize,
 
    // State tracking
 
    pub(crate) had_error: Option<ParseError>,
 
    // The offset_lookup is built on-demand upon attempting to report an error.
 
    // As the compiler is currently not multithreaded, we simply put it in a 
 
    // RefCell to allow interior mutability.
 
    offset_lookup: RefCell<Vec<u32>>,
 
    // Only one procedure will actually create the lookup, afterwards only read
 
    // locks will be held.
 
    offset_lookup: RwLock<Vec<u32>>,
 
}
 

	
 
impl InputSource {
 
    pub fn new(filename: String, input: Vec<u8>) -> Self {
 
        Self{
 
            filename,
 
            input,
 
            line: 1,
 
            offset: 0,
 
            had_error: None,
 
            offset_lookup: RefCell::new(Vec::new()),
 
            offset_lookup: RwLock::new(Vec::new()),
 
        }
 
    }
 

	
 
    #[cfg(test)]
 
    pub fn new_test(input: &str) -> Self {
 
        let bytes = Vec::from(input.as_bytes());
 
        return Self::new(String::from("test"), bytes)
 
    }
 

	
 
    #[inline]
 
    pub fn pos(&self) -> InputPosition {
 
        InputPosition { line: self.line, offset: self.offset as u32 }
 
    }
 

	
 
    pub fn next(&self) -> Option<u8> {
 
        if self.offset < self.input.len() {
 
            Some(self.input[self.offset])
 
        } else {
 
            None
 
        }
 
    }
 

	
 
    pub fn lookahead(&self, offset: usize) -> Option<u8> {
 
        let offset_pos = self.offset + offset;
 
@@ -108,75 +108,86 @@ impl InputSource {
 
                    self.set_error("Encountered carriage-feed without a following newline");
 
                }
 
            },
 
            Some(b'\n') => {
 
                self.line += 1;
 
                self.offset += 1;
 
            },
 
            Some(_) => {
 
                self.offset += 1;
 
            }
 
            None => {}
 
        }
 

	
 
        // Maybe we actually want to check this in release mode. Then again:
 
        // a 4 gigabyte source file... Really?
 
        debug_assert!(self.offset < u32::max_value() as usize);
 
    }
 

	
 
    fn set_error(&mut self, msg: &str) {
 
        if self.had_error.is_none() {
 
            self.had_error = Some(ParseError::new_error_str_at_pos(self, self.pos(), msg));
 
        }
 
    }
 

	
 
    fn get_lookup(&self) -> Ref<Vec<u32>> {
 
    fn get_lookup(&self) -> RwLockReadGuard<Vec<u32>> {
 
        // Once constructed the lookup always contains one element. We use this
 
        // to see if it is constructed already.
 
        let lookup = self.offset_lookup.borrow();
 
        {
 
            let lookup = self.offset_lookup.read().unwrap();
 
            if !lookup.is_empty() {
 
                return lookup;
 
            }
 
        }
 

	
 
        // Lookup was not constructed yet
 
        let mut lookup = self.offset_lookup.write().unwrap();
 
        if !lookup.is_empty() {
 
            // Somebody created it before we had the chance
 
            drop(lookup);
 
            let lookup = self.offset_lookup.read().unwrap();
 
            return lookup;
 
        }
 

	
 
        // Build the line number (!) to offset lookup, so offset by 1. We 
 
        // assume the entire source file is scanned (most common case) for
 
        // preallocation.
 
        let mut lookup = self.offset_lookup.borrow_mut();
 
        lookup.reserve(self.line as usize + 2);
 
        lookup.push(0); // line 0: never used
 
        lookup.push(0); // first line: first character
 

	
 
        for char_idx in 0..self.input.len() {
 
            if self.input[char_idx] == b'\n' {
 
                lookup.push(char_idx as u32 + 1);
 
            }
 
        }
 

	
 
        lookup.push(self.input.len() as u32); // for lookup_line_end
 
        debug_assert_eq!(self.line as usize + 2, lookup.len(), "remove me: i am a testing assert and sometimes invalid");
 

	
 
        // Return created lookup
 
        let lookup = self.offset_lookup.borrow();
 
        drop(lookup);
 
        let lookup = self.offset_lookup.read().unwrap();
 
        return lookup;
 
    }
 

	
 
    /// Retrieves offset at which line starts (right after newline)
 
    fn lookup_line_start_offset(&self, line_number: u32) -> u32 {
 
        let lookup = self.get_lookup();
 
        lookup[line_number as usize]
 
    }
 

	
 
    /// Retrieves offset at which line ends (at the newline character or the
 
    /// preceding carriage feed for \r\n-encoded newlines)
 
    fn lookup_line_end_offset(&self, line_number: u32) -> u32 {
 
        let lookup = self.get_lookup();
 
        let offset = lookup[(line_number + 1) as usize] - 1;
 
        let offset_usize = offset as usize;
 

	
 
        // Compensate for newlines and a potential carriage feed
 
        if self.input[offset_usize] == b'\n' {
 
            if offset_usize > 0 && self.input[offset_usize] == b'\r' {
 
                offset - 2
 
            } else {
 
                offset - 1
 
            }
 
        } else {
 
@@ -334,90 +345,86 @@ impl fmt::Display for ParseErrorStatement {
 
                transform_context(&self.context, &mut context);
 
                context.push('\n');
 
                f.write_str(&context)?;
 

	
 
                annotation.push_str(" | ");
 
                extend_annotation(1, self.start_column, &self.context, &mut annotation, ' ');
 
                extend_annotation(self.start_column, self.end_column, &self.context, &mut annotation, '~');
 
                annotation.push('\n');
 

	
 
                f.write_str(&annotation)?;
 
            },
 
            ContextKind::MultiLine => {
 
                // Annotate all offending lines
 
                // - first line
 
                let mut lines = self.context.lines();
 
                let first_line = lines.next().unwrap();
 
                transform_context(first_line, &mut context);
 
                writeln!(f, " |- {}", &context)?;
 

	
 
                // - remaining lines
 
                let mut last_line = first_line;
 
                while let Some(cur_line) = lines.next() {
 
                    context.clear();
 
                    transform_context(cur_line, &mut context);
 
                    writeln!(f, " |  {}", &context);
 
                    writeln!(f, " |  {}", &context)?;
 
                    last_line = cur_line;
 
                }
 

	
 
                // - underline beneath last line
 
                annotation.push_str(" \\__");
 
                extend_annotation(1, self.end_column, &last_line, &mut annotation, '_');
 
                annotation.push_str("/\n");
 
                f.write_str(&annotation)?;
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 
}
 

	
 
#[derive(Debug)]
 
pub struct ParseError {
 
    pub(crate) statements: Vec<ParseErrorStatement>
 
}
 

	
 
impl fmt::Display for ParseError {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        if self.statements.is_empty() {
 
            return Ok(())
 
        }
 

	
 
        self.statements[0].fmt(f)?;
 
        for statement in self.statements.iter().skip(1) {
 
            writeln!(f)?;
 
            statement.fmt(f)?;
 
        }
 

	
 
        Ok(())
 
    }
 
}
 

	
 
impl ParseError {
 
    pub fn empty() -> Self {
 
        Self{ statements: Vec::new() }
 
    }
 

	
 
    pub fn new_error_at_pos(source: &InputSource, position: InputPosition, message: String) -> Self {
 
        Self{ statements: vec!(ParseErrorStatement::from_source_at_pos(
 
            StatementKind::Error, source, position, message
 
        )) }
 
    }
 

	
 
    pub fn new_error_str_at_pos(source: &InputSource, position: InputPosition, message: &str) -> Self {
 
        Self{ statements: vec!(ParseErrorStatement::from_source_at_pos(
 
            StatementKind::Error, source, position, message.to_string()
 
        )) }
 
    }
 

	
 
    pub fn new_error_at_span(source: &InputSource, span: InputSpan, message: String) -> Self {
 
        Self{ statements: vec!(ParseErrorStatement::from_source_at_span(
 
            StatementKind::Error, source, span, message
 
        )) }
 
    }
 

	
 
    pub fn new_error_str_at_span(source: &InputSource, span: InputSpan, message: &str) -> Self {
 
        Self{ statements: vec!(ParseErrorStatement::from_source_at_span(
 
            StatementKind::Error, source, span, message.to_string()
 
        )) }
 
    }
 

	
src/protocol/parser/pass_definitions.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use super::symbol_table::*;
 
use super::{Module, ModuleCompilationPhase, PassCtx};
 
use super::tokens::*;
 
use super::token_parsing::*;
 
use crate::protocol::input_source::{InputSource as InputSource, InputPosition as InputPosition, InputSpan, ParseError};
 
use crate::collections::*;
 

	
 
/// Parses all the tokenized definitions into actual AST nodes.
 
pub(crate) struct PassDefinitions {
 
    // State
 
    cur_definition: DefinitionId,
 
    // Temporary buffers of various kinds
 
    buffer: String,
 
    struct_fields: Vec<StructFieldDefinition>,
 
    enum_variants: Vec<EnumVariantDefinition>,
 
    union_variants: Vec<UnionVariantDefinition>,
 
    struct_fields: ScopedBuffer<StructFieldDefinition>,
 
    enum_variants: ScopedBuffer<EnumVariantDefinition>,
 
    union_variants: ScopedBuffer<UnionVariantDefinition>,
 
    parameters: ScopedBuffer<ParameterId>,
 
    expressions: ScopedBuffer<ExpressionId>,
 
    statements: ScopedBuffer<StatementId>,
 
    parser_types: Vec<ParserType>,
 
}
 

	
 
impl PassDefinitions {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            cur_definition: DefinitionId::new_invalid(),
 
            buffer: String::with_capacity(128),
 
            struct_fields: Vec::with_capacity(128),
 
            enum_variants: Vec::with_capacity(128),
 
            union_variants: Vec::with_capacity(128),
 
            struct_fields: ScopedBuffer::new_reserved(128),
 
            enum_variants: ScopedBuffer::new_reserved(128),
 
            union_variants: ScopedBuffer::new_reserved(128),
 
            parameters: ScopedBuffer::new_reserved(128),
 
            expressions: ScopedBuffer::new_reserved(128),
 
            statements: ScopedBuffer::new_reserved(128),
 
            parser_types: Vec::with_capacity(128),
 
        }
 
    }
 

	
 
    pub(crate) fn parse(&mut self, modules: &mut [Module], module_idx: usize, ctx: &mut PassCtx) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let module_range = &module.tokens.ranges[0];
 
        debug_assert_eq!(module.phase, ModuleCompilationPhase::ImportsResolved);
 
        debug_assert_eq!(module_range.range_kind, TokenRangeKind::Module);
 

	
 
        // Although we only need to parse the definitions, we want to go through
 
        // code ranges as well such that we can throw errors if we get
 
        // unexpected tokens at the module level of the source.
 
        let mut range_idx = module_range.first_child_idx;
 
        loop {
 
            let range_idx_usize = range_idx as usize;
 
            let cur_range = &module.tokens.ranges[range_idx_usize];
 

	
 
            match cur_range.range_kind {
 
                TokenRangeKind::Module => unreachable!(), // should not be reachable
 
                TokenRangeKind::Pragma | TokenRangeKind::Import => continue, // already fully parsed
 
@@ -68,258 +68,259 @@ impl PassDefinitions {
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_range(
 
        &mut self, modules: &[Module], module_idx: usize, ctx: &mut PassCtx, range_idx: usize
 
    ) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let cur_range = &module.tokens.ranges[range_idx];
 
        debug_assert!(cur_range.range_kind == TokenRangeKind::Definition || cur_range.range_kind == TokenRangeKind::Code);
 

	
 
        // Detect which definition we're parsing
 
        let mut iter = module.tokens.iter_range(cur_range);
 
        loop {
 
            let next = iter.next();
 
            if next.is_none() {
 
                return Ok(())
 
            }
 

	
 
            // Token was not None, so peek_ident returns None if not an ident
 
            let ident = peek_ident(&module.source, &mut iter);
 
            match ident {
 
                Some(KW_STRUCT) => self.visit_struct_definition(module, &mut iter, ctx)?,
 
                Some(KW_ENUM) => self.visit_enum_definition(module, &mut iter, ctx)?,
 
                Some(KW_UNION) => self.visit_union_definition(module, &mut iter, ctx)?,
 
                Some(KW_FUNCTION) => self.visit_function_definition(module, &mut iter, ctx)?,
 
                Some(KW_PRIMITIVE) | Some(KW_COMPOSITE) => self.visit_component_definition(module, &mut iter, ctx)?,
 
                _ => return Err(ParseError::new_error_str_at_pos(
 
                    &module.source, iter.last_valid_pos(),
 
                    "unexpected symbol, expected some kind of type or procedure definition"
 
                )),
 
            }
 
        }
 
    }
 

	
 
    fn visit_struct_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_STRUCT)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        // Parse struct definition
 
        consume_polymorphic_vars_spilled(&module.source, iter)?;
 
        debug_assert!(self.struct_fields.is_empty());
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        let mut fields_section = self.struct_fields.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter,
 
            |source, iter| {
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let poly_vars = ctx.heap[definition_id].poly_vars(); // TODO: @Cleanup, this is really ugly. But rust...
 

	
 
                let start_pos = iter.last_valid_pos();
 
                let parser_type = consume_parser_type(
 
                    source, iter, &ctx.symbols, &ctx.heap, poly_vars, module_scope,
 
                    definition_id, false, 0
 
                )?;
 
                let field = consume_ident_interned(source, iter, ctx)?;
 
                Ok(StructFieldDefinition{
 
                    span: InputSpan::from_positions(start_pos, field.span.end),
 
                    field, parser_type
 
                })
 
            },
 
            &mut self.struct_fields, "a struct field", "a list of struct fields", None
 
            &mut fields_section, "a struct field", "a list of struct fields", None
 
        )?;
 

	
 
        // Transfer to preallocated definition
 
        let struct_def = ctx.heap[definition_id].as_struct_mut();
 
        struct_def.fields.clone_from(&self.struct_fields);
 
        self.struct_fields.clear();
 
        struct_def.fields = fields_section.into_vec();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_enum_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_ENUM)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        // Parse enum definition
 
        consume_polymorphic_vars_spilled(&module.source, iter)?;
 
        debug_assert!(self.enum_variants.is_empty());
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        let mut enum_section = self.enum_variants.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter,
 
            |source, iter| {
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let identifier = consume_ident_interned(source, iter, ctx)?;
 
                let value = if iter.next() == Some(TokenKind::Equal) {
 
                    iter.consume();
 
                    let (variant_number, _) = consume_integer_literal(source, iter, &mut self.buffer)?;
 
                    EnumVariantValue::Integer(variant_number as i64) // TODO: @int
 
                } else {
 
                    EnumVariantValue::None
 
                };
 
                Ok(EnumVariantDefinition{ identifier, value })
 
            },
 
            &mut self.enum_variants, "an enum variant", "a list of enum variants", None
 
            &mut enum_section, "an enum variant", "a list of enum variants", None
 
        )?;
 

	
 
        // Transfer to definition
 
        let enum_def = ctx.heap[definition_id].as_enum_mut();
 
        enum_def.variants.clone_from(&self.enum_variants);
 
        self.enum_variants.clear();
 
        enum_def.variants = enum_section.into_vec();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_union_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_UNION)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        // Parse union definition
 
        consume_polymorphic_vars_spilled(&module.source, iter)?;
 
        debug_assert!(self.union_variants.is_empty());
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        let mut variants_section = self.union_variants.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter,
 
            |source, iter| {
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let identifier = consume_ident_interned(source, iter, ctx)?;
 
                let mut close_pos = identifier.span.end;
 

	
 
                let has_embedded = maybe_consume_comma_separated(
 
                    TokenKind::OpenParen, TokenKind::CloseParen, source, iter,
 
                    |source, iter| {
 
                    TokenKind::OpenParen, TokenKind::CloseParen, source, iter, ctx,
 
                    |source, iter, ctx| {
 
                        let poly_vars = ctx.heap[definition_id].poly_vars(); // TODO: @Cleanup, this is really ugly. But rust...
 
                        consume_parser_type(
 
                            source, iter, &ctx.symbols, &ctx.heap, poly_vars,
 
                            module_scope, definition_id, false, 0
 
                        )
 
                    },
 
                    &mut self.parser_types, "an embedded type", Some(&mut close_pos)
 
                )?;
 
                let value = if has_embedded {
 
                    UnionVariantValue::Embedded(self.parser_types.clone())
 
                } else {
 
                    UnionVariantValue::None
 
                };
 
                self.parser_types.clear();
 

	
 
                Ok(UnionVariantDefinition{
 
                    span: InputSpan::from_positions(identifier.span.begin, close_pos),
 
                    identifier,
 
                    value
 
                })
 
            },
 
            &mut self.union_variants, "a union variant", "a list of union variants", None
 
            &mut variants_section, "a union variant", "a list of union variants", None
 
        )?;
 

	
 
        // Transfer to AST
 
        let union_def = ctx.heap[definition_id].as_union_mut();
 
        union_def.variants.clone_from(&self.union_variants);
 
        self.union_variants.clear();
 
        union_def.variants = variants_section.into_vec();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_function_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_FUNCTION)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        // Parse function's argument list
 
        let mut parameter_section = self.parameters.start_section();
 
        consume_parameter_list(
 
            &module.source, iter, ctx, &mut parameter_section, poly_vars, module_scope, definition_id
 
            &module.source, iter, ctx, &mut parameter_section, module_scope, definition_id
 
        )?;
 
        let parameters = parameter_section.into_vec();
 

	
 
        // Consume return types
 
        consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
        let mut open_curly_pos = iter.last_valid_pos();
 
        consume_comma_separated_until(
 
            TokenKind::OpenCurly, &module.source, iter,
 
            |source, iter| {
 
            TokenKind::OpenCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let poly_vars = ctx.heap[definition_id].poly_vars(); // TODO: @Cleanup, this is really ugly. But rust...
 
                consume_parser_type(source, iter, &ctx.symbols, &ctx.heap, poly_vars, module_scope, definition_id, false, 0)
 
            },
 
            &mut self.parser_types, "a return type", Some(&mut open_curly_pos)
 
        )?;
 
        let return_types = self.parser_types.clone();
 

	
 
        // TODO: @ReturnValues
 
        match return_types.len() {
 
            0 => return Err(ParseError::new_error_str_at_pos(&module.source, open_curly_pos, "expected a return type")),
 
            1 => {},
 
            _ => return Err(ParseError::new_error_str_at_pos(&module.source, open_curly_pos, "multiple return types are not (yet) allowed")),
 
        }
 

	
 
        // Consume block
 
        let body = self.consume_block_statement_without_leading_curly(module, iter, ctx, open_curly_pos)?;
 

	
 
        // Assign everything in the preallocated AST node
 
        let function = ctx.heap[definition_id].as_function_mut();
 
        function.return_types = return_types;
 
        function.parameters = parameters;
 
        function.body = body;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_component_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        let (_variant_text, _) = consume_any_ident(&module.source, iter)?;
 
        debug_assert!(_variant_text == KW_PRIMITIVE || _variant_text == KW_COMPOSITE);
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated definition
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        // Parse component's argument list
 
        let mut parameter_section = self.parameters.start_section();
 
        consume_parameter_list(
 
            &module.source, iter, ctx, &mut parameter_section, poly_vars, module_scope, definition_id
 
            &module.source, iter, ctx, &mut parameter_section, module_scope, definition_id
 
        )?;
 
        let parameters = parameter_section.into_vec();
 

	
 
        // Consume block
 
        let body = self.consume_block_statement(module, iter, ctx)?;
 

	
 
        // Assign everything in the AST node
 
        let component = ctx.heap[definition_id].as_component_mut();
 
        component.parameters = parameters;
 
        component.body = body;
 

	
 
        Ok(())
 
    }
 

	
 
    /// Consumes a block statement. If the resulting statement is not a block
 
    /// (e.g. for a shorthand "if (expr) single_statement") then it will be
 
    /// wrapped in one
 
    fn consume_block_or_wrapped_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<BlockStatementId, ParseError> {
 
        if Some(TokenKind::OpenCurly) == iter.next() {
 
            // This is a block statement
 
            self.consume_block_statement(module, iter, ctx)
 
        } else {
 
@@ -417,54 +418,61 @@ impl PassDefinitions {
 
                // expression. This is a bit ugly
 
                if let Some((memory_stmt_id, assignment_stmt_id)) = self.maybe_consume_memory_statement(module, iter, ctx)? {
 
                    section.push(memory_stmt_id.upcast().upcast());
 
                    section.push(assignment_stmt_id.upcast());
 
                } else {
 
                    let id = self.consume_expression_statement(module, iter, ctx)?;
 
                    section.push(id.upcast());
 
                }
 
            }
 
        };
 

	
 
        return Ok(());
 
    }
 

	
 
    fn consume_block_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<BlockStatementId, ParseError> {
 
        let open_span = consume_token(&module.source, iter, TokenKind::OpenCurly)?;
 
        self.consume_block_statement_without_leading_curly(module, iter, ctx, open_span.begin)
 
    }
 

	
 
    fn consume_block_statement_without_leading_curly(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, open_curly_pos: InputPosition
 
    ) -> Result<BlockStatementId, ParseError> {
 
        let mut statements = Vec::new();
 
        let mut stmt_section = self.statements.start_section();
 
        let mut next = iter.next();
 
        while next.is_some() && next != Some(TokenKind::CloseCurly) {
 

	
 
        while next != Some(TokenKind::CloseCurly) {
 
            if next.is_none() {
 
                return Err(ParseError::new_error_str_at_pos(
 
                    &module.source, iter.last_valid_pos(), "expected a statement or '}'"
 
                ));
 
            }
 
            self.consume_statement(module, iter, ctx, &mut stmt_section)?;
 
            next = iter.next();
 
        }
 

	
 
        let statements = stmt_section.into_vec();
 
        let mut block_span = consume_token(&module.source, iter, TokenKind::CloseCurly)?;
 
        block_span.begin = open_curly_pos;
 

	
 
        Ok(ctx.heap.alloc_block_statement(|this| BlockStatement{
 
            this,
 
            is_implicit: false,
 
            span: block_span,
 
            statements,
 
            parent_scope: None,
 
            relative_pos_in_parent: 0,
 
            locals: Vec::new(),
 
            labels: Vec::new(),
 
        }))
 
    }
 

	
 
    fn consume_if_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<IfStatementId, ParseError> {
 
        let if_span = consume_exact_ident(&module.source, iter, KW_STMT_IF)?;
 
        consume_token(&module.source, iter, TokenKind::OpenParen)?;
 
        let test = self.consume_expression(module, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::CloseParen)?;
 
        let true_body = self.consume_block_or_wrapped_statement(module, iter, ctx)?;
 

	
 
@@ -544,56 +552,58 @@ impl PassDefinitions {
 
    }
 

	
 
    fn consume_synchronous_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<SynchronousStatementId, ParseError> {
 
        let synchronous_span = consume_exact_ident(&module.source, iter, KW_STMT_SYNC)?;
 
        let body = self.consume_block_or_wrapped_statement(module, iter, ctx)?;
 

	
 
        Ok(ctx.heap.alloc_synchronous_statement(|this| SynchronousStatement{
 
            this,
 
            span: synchronous_span,
 
            body,
 
            end_sync: None,
 
            parent_scope: None,
 
        }))
 
    }
 

	
 
    fn consume_return_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ReturnStatementId, ParseError> {
 
        let return_span = consume_exact_ident(&module.source, iter, KW_STMT_RETURN)?;
 
        let mut scoped_section = self.expressions.start_section();
 

	
 
        consume_comma_separated_until(
 
            TokenKind::SemiColon, &module.source, iter,
 
            |source, iter| self.consume_expression(module, iter, ctx),
 
            TokenKind::SemiColon, &module.source, iter, ctx,
 
            |_source, iter, ctx| self.consume_expression(module, iter, ctx),
 
            &mut scoped_section, "a return expression", None
 
        )?;
 
        let expressions = scoped_section.into_vec();
 

	
 
        if expressions.is_empty() {
 
            return Err(ParseError::new_error_str_at_span(&module.source, return_span, "expected at least one return value"));
 
        } else if expressions.len() > 1 {
 
            return Err(ParseError::new_error_str_at_span(&module.source, return_span, "multiple return values are not (yet) supported"))
 
        }
 

	
 
        Ok(ctx.heap.alloc_return_statement(|this| ReturnStatement{
 
            this,
 
            span: return_span,
 
            expressions
 
        }))
 
    }
 

	
 
    fn consume_goto_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<GotoStatementId, ParseError> {
 
        let goto_span = consume_exact_ident(&module.source, iter, KW_STMT_GOTO)?;
 
        let label = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 
        Ok(ctx.heap.alloc_goto_statement(|this| GotoStatement{
 
            this,
 
            span: goto_span,
 
            label,
 
            target: None
 
        }))
 
    }
 

	
 
    fn consume_new_statement(
 
@@ -1169,50 +1179,50 @@ impl PassDefinitions {
 

	
 
            next = iter.next();
 
        }
 

	
 
        Ok(result)
 
    }
 

	
 
    fn consume_primary_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        let next = iter.next();
 

	
 
        let result = if next == Some(TokenKind::OpenParen) {
 
            // Expression between parentheses
 
            iter.consume();
 
            let result = self.consume_expression(module, iter, ctx)?;
 
            consume_token(&module.source, iter, TokenKind::CloseParen)?;
 

	
 
            result
 
        } else if next == Some(TokenKind::OpenCurly) {
 
            // Array literal
 
            let (start_pos, mut end_pos) = iter.next_positions();
 
            let mut scoped_section = self.expressions.start_section();
 
            consume_comma_separated(
 
                TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter,
 
                |source, iter| self.consume_expression(module, iter, ctx),
 
                TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
                |_source, iter, ctx| self.consume_expression(module, iter, ctx),
 
                &mut scoped_section, "an expression", "a list of expressions", Some(&mut end_pos)
 
            )?;
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this,
 
                span: InputSpan::from_positions(start_pos, end_pos),
 
                value: Literal::Array(scoped_section.into_vec()),
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast()
 
        } else if next == Some(TokenKind::Integer) {
 
            let (literal, span) = consume_integer_literal(&module.source, iter, &mut self.buffer)?;
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this, span,
 
                value: Literal::Integer(LiteralInteger{ unsigned_value: literal, negated: false }),
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast()
 
        } else if next == Some(TokenKind::String) {
 
            let span = consume_string_literal(&module.source, iter, &mut self.buffer)?;
 
            let interned = ctx.pool.intern(self.buffer.as_bytes());
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
@@ -1239,50 +1249,50 @@ impl PassDefinitions {
 
            let symbol = ctx.symbols.get_symbol_by_name(SymbolScope::Module(module.root_id), ident_text);
 

	
 
            if symbol.is_some() {
 
                // The first bit looked like a symbol, so we're going to follow
 
                // that all the way through, assume we arrive at some kind of
 
                // function call or type instantiation
 
                use ParserTypeVariant as PTV;
 

	
 
                let symbol_scope = SymbolScope::Definition(self.cur_definition);
 
                let poly_vars = ctx.heap[self.cur_definition].poly_vars();
 
                let parser_type = consume_parser_type(
 
                    &module.source, iter, &ctx.symbols, &ctx.heap, poly_vars, symbol_scope,
 
                    self.cur_definition, true, 0
 
                )?;
 
                debug_assert!(!parser_type.elements.is_empty());
 
                match parser_type.elements[0].variant {
 
                    PTV::Definition(target_definition_id, _) => {
 
                        let definition = &ctx.heap[target_definition_id];
 
                        match definition {
 
                            Definition::Struct(_) => {
 
                                // Struct literal
 
                                let mut last_token = iter.last_valid_pos();
 
                                let mut struct_fields = Vec::new();
 
                                consume_comma_separated(
 
                                    TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter,
 
                                    |source, iter| {
 
                                    TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
                                    |source, iter, ctx| {
 
                                        let identifier = consume_ident_interned(source, iter, ctx)?;
 
                                        consume_token(source, iter, TokenKind::Colon)?;
 
                                        let value = self.consume_expression(module, iter, ctx)?;
 
                                        Ok(LiteralStructField{ identifier, value, field_idx: 0 })
 
                                    },
 
                                    &mut struct_fields, "a struct field", "a list of struct field", Some(&mut last_token)
 
                                )?;
 

	
 
                                ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                                    this,
 
                                    span: InputSpan::from_positions(ident_span.begin, last_token),
 
                                    value: Literal::Struct(LiteralStruct{
 
                                        parser_type,
 
                                        fields: struct_fields,
 
                                        definition: target_definition_id,
 
                                    }),
 
                                    parent: ExpressionParent::None,
 
                                    concrete_type: ConcreteType::default(),
 
                                }).upcast()
 
                            },
 
                            Definition::Enum(_) => {
 
                                // Enum literal: consume the variant
 
                                consume_token(&module.source, iter, TokenKind::ColonColon)?;
 
                                let variant = consume_ident_interned(&module.source, iter, ctx)?;
 
@@ -1316,66 +1326,66 @@ impl PassDefinitions {
 
                                        parser_type, variant, values,
 
                                        definition: target_definition_id,
 
                                        variant_idx: 0,
 
                                    }),
 
                                    parent: ExpressionParent::None,
 
                                    concrete_type: ConcreteType::default()
 
                                }).upcast()
 
                            },
 
                            Definition::Component(_) => {
 
                                // Component instantiation
 
                                let arguments = self.consume_expression_list(module, iter, ctx, None)?;
 

	
 
                                ctx.heap.alloc_call_expression(|this| CallExpression{
 
                                    this,
 
                                    span: parser_type.elements[0].full_span, // TODO: @Span fix
 
                                    parser_type,
 
                                    method: Method::UserComponent,
 
                                    arguments,
 
                                    definition: target_definition_id,
 
                                    parent: ExpressionParent::None,
 
                                    concrete_type: ConcreteType::default(),
 
                                }).upcast()
 
                            },
 
                            Definition::Function(function_definition) => {
 
                                // Function call: consume the arguments
 
                                let arguments = self.consume_expression_list(module, iter, ctx, None)?;
 

	
 
                                // Check whether it is a builtin function
 
                                let method = if function_definition.builtin {
 
                                    match function_definition.identifier.value.as_str() {
 
                                        "get" => Method::Get,
 
                                        "put" => Method::Put,
 
                                        "fires" => Method::Fires,
 
                                        "create" => Method::Create,
 
                                        "length" => Method::Length,
 
                                        "assert" => Method::Assert,
 
                                        _ => unreachable!(),
 
                                    }
 
                                } else {
 
                                    Method::UserFunction
 
                                };
 

	
 
                                // Function call: consume the arguments
 
                                let arguments = self.consume_expression_list(module, iter, ctx, None)?;
 

	
 
                                ctx.heap.alloc_call_expression(|this| CallExpression{
 
                                    this,
 
                                    span: parser_type.elements[0].full_span, // TODO: @Span fix
 
                                    parser_type,
 
                                    method,
 
                                    arguments,
 
                                    definition: target_definition_id,
 
                                    parent: ExpressionParent::None,
 
                                    concrete_type: ConcreteType::default(),
 
                                }).upcast()
 
                            }
 
                        }
 
                    },
 
                    _ => {
 
                        // TODO: Casting expressions
 
                        return Err(ParseError::new_error_str_at_span(
 
                            &module.source, parser_type.elements[0].full_span,
 
                            "unexpected type in expression, note that casting expressions are not yet implemented"
 
                        ))
 
                    }
 
                }
 
            } else {
 
                // Check for builtin keywords or builtin functions
 
                if ident_text == KW_LIT_NULL || ident_text == KW_LIT_TRUE || ident_text == KW_LIT_FALSE {
 
@@ -1439,50 +1449,50 @@ impl PassDefinitions {
 
        let mut result = higher_precedence_fn(self, module, iter, ctx)?;
 
        while let Some(operation) = match_fn(iter.next()) {
 
            let span = iter.next_span();
 
            iter.consume();
 

	
 
            let left = result;
 
            let right = higher_precedence_fn(self, module, iter, ctx)?;
 

	
 
            result = ctx.heap.alloc_binary_expression(|this| BinaryExpression{
 
                this, span, left, operation, right,
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default()
 
            }).upcast();
 
        }
 

	
 
        Ok(result)
 
    }
 

	
 
    #[inline]
 
    fn consume_expression_list(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, end_pos: Option<&mut InputPosition>
 
    ) -> Result<Vec<ExpressionId>, ParseError> {
 
        let mut section = self.expressions.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenParen, TokenKind::CloseParen, &module.source, iter,
 
            |source, iter| self.consume_expression(module, iter, ctx),
 
            TokenKind::OpenParen, TokenKind::CloseParen, &module.source, iter, ctx,
 
            |_source, iter, ctx| self.consume_expression(module, iter, ctx),
 
            &mut section, "an expression", "a list of expressions", end_pos
 
        )?;
 
        Ok(section.into_vec())
 
    }
 
}
 

	
 
/// Consumes a type. A type always starts with an identifier which may indicate
 
/// a builtin type or a user-defined type. The fact that it may contain
 
/// polymorphic arguments makes it a tree-like structure. Because we cannot rely
 
/// on knowing the exact number of polymorphic arguments we do not check for
 
/// these.
 
///
 
/// Note that the first depth index is used as a hack.
 
// TODO: @Optimize, @Span fix
 
fn consume_parser_type(
 
    source: &InputSource, iter: &mut TokenIter, symbols: &SymbolTable, heap: &Heap, poly_vars: &[Identifier],
 
    cur_scope: SymbolScope, wrapping_definition: DefinitionId, allow_inference: bool, first_angle_depth: i32,
 
) -> Result<ParserType, ParseError> {
 
    struct Entry{
 
        element: ParserTypeElement,
 
        depth: i32,
 
    }
 

	
 
    fn insert_array_before(elements: &mut Vec<Entry>, depth: i32, span: InputSpan) {
 
@@ -1786,60 +1796,61 @@ fn consume_parser_type_ident(
 
                                    )
 
                                ));
 
                            }
 

	
 
                            last_symbol = new_symbol.unwrap();
 
                        },
 
                        SymbolVariant::Definition(symbol_definition) => {
 
                            let num_poly_vars = heap[symbol_definition.definition_id].poly_vars().len();
 
                            type_kind = Some(PTV::Definition(symbol_definition.definition_id, num_poly_vars));
 
                            break;
 
                        }
 
                    }
 
                }
 
            }
 

	
 
            debug_assert!(type_kind.is_some());
 
            type_kind.unwrap()
 
        },
 
    };
 

	
 
    Ok(ParserTypeElement{ full_span: type_span, variant })
 
}
 

	
 
/// Consumes polymorphic variables and throws them on the floor.
 
fn consume_polymorphic_vars_spilled(source: &InputSource, iter: &mut TokenIter) -> Result<(), ParseError> {
 
fn consume_polymorphic_vars_spilled(source: &InputSource, iter: &mut TokenIter, _ctx: &mut PassCtx) -> Result<(), ParseError> {
 
    maybe_consume_comma_separated_spilled(
 
        TokenKind::OpenAngle, TokenKind::CloseAngle, source, iter,
 
        |source, iter| {
 
        TokenKind::OpenAngle, TokenKind::CloseAngle, source, iter, _ctx,
 
        |source, iter, _ctx| {
 
            consume_ident(source, iter)?;
 
            Ok(())
 
        }, "a polymorphic variable"
 
    )?;
 
    Ok(())
 
}
 

	
 
/// Consumes the parameter list to functions/components
 
fn consume_parameter_list(
 
    source: &InputSource, iter: &mut TokenIter, ctx: &mut PassCtx, target: &mut ScopedSection<ParameterId>,
 
    poly_vars: &[Identifier], scope: SymbolScope, definition_id: DefinitionId
 
    source: &InputSource, iter: &mut TokenIter, ctx: &mut PassCtx,
 
    target: &mut ScopedSection<ParameterId>, scope: SymbolScope, definition_id: DefinitionId
 
) -> Result<(), ParseError> {
 
    consume_comma_separated(
 
        TokenKind::OpenParen, TokenKind::CloseParen, source, iter,
 
        |source, iter| {
 
        TokenKind::OpenParen, TokenKind::CloseParen, source, iter, ctx,
 
        |source, iter, ctx| {
 
            let poly_vars = ctx.heap[definition_id].poly_vars(); // TODO: @Cleanup, this is really ugly. But rust...
 
            let (start_pos, _) = iter.next_positions();
 
            let parser_type = consume_parser_type(
 
                source, iter, &ctx.symbols, &ctx.heap, poly_vars, scope,
 
                definition_id, false, 0
 
            )?;
 
            let identifier = consume_ident_interned(source, iter, ctx)?;
 
            let parameter_id = ctx.heap.alloc_parameter(|this| Parameter{
 
                this,
 
                span: InputSpan::from_positions(start_pos, identifier.span.end),
 
                parser_type,
 
                identifier
 
            });
 
            Ok(parameter_id)
 
        },
 
        target, "a parameter", "a parameter list", None
 
    )
 
}
 
\ No newline at end of file
src/protocol/parser/pass_imports.rs
Show inline comments
 
@@ -34,49 +34,49 @@ impl PassImport {
 
        loop {
 
            let range_idx_usize = range_idx as usize;
 
            let cur_range = &module.tokens.ranges[range_idx_usize];
 

	
 
            if cur_range.range_kind == TokenRangeKind::Import {
 
                self.visit_import_range(modules, module_idx, ctx, range_idx_usize)?;
 
            }
 

	
 
            match cur_range.next_sibling_idx {
 
                Some(idx) => { range_idx = idx; },
 
                None => { break; }
 
            }
 
        }
 

	
 
        let root = &mut ctx.heap[module.root_id];
 
        root.imports.extend(self.imports.drain(..));
 

	
 
        let module = &mut modules[module_idx];
 
        module.phase = ModuleCompilationPhase::ImportsResolved;
 

	
 
        Ok(())
 
    }
 

	
 
    pub(crate) fn visit_import_range(
 
        &mut self, modules: &mut [Module], module_idx: usize, ctx: &mut PassCtx, range_idx: usize
 
        &mut self, modules: &[Module], module_idx: usize, ctx: &mut PassCtx, range_idx: usize
 
    ) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let import_range = &module.tokens.ranges[range_idx];
 
        debug_assert_eq!(import_range.range_kind, TokenRangeKind::Import);
 

	
 
        let mut iter = module.tokens.iter_range(import_range);
 

	
 
        // Consume "import"
 
        let (_import_ident, import_span) =
 
            consume_any_ident(&module.source, &mut iter)?;
 
        debug_assert_eq!(_import_ident, KW_IMPORT);
 

	
 
        // Consume module name
 
        let (module_name, module_name_span) = consume_domain_ident(&module.source, &mut iter)?;
 
        let target_root_id = ctx.symbols.get_module_by_name(module_name);
 
        if target_root_id.is_none() {
 
            return Err(ParseError::new_error_at_span(
 
                &module.source, module_name_span,
 
                format!("could not resolve module '{}'", String::from_utf8_lossy(module_name))
 
            ));
 
        }
 
        let module_name = ctx.pool.intern(module_name);
 
        let module_identifier = Identifier{ span: module_name_span, value: module_name };
 
        let target_root_id = target_root_id.unwrap();
 
@@ -95,214 +95,223 @@ impl PassImport {
 
                this,
 
                span: import_span,
 
                module: module_identifier,
 
                alias: alias_identifier,
 
                module_id: target_root_id
 
            }));
 
            ctx.symbols.insert_symbol(SymbolScope::Module(module.root_id), Symbol{
 
                name: alias_name,
 
                variant: SymbolVariant::Module(SymbolModule{
 
                    root_id: target_root_id,
 
                    introduced_at: import_id,
 
                }),
 
            });
 
        } else if Some(TokenKind::ColonColon) == next {
 
            iter.consume();
 

	
 
            // Helper function to consume symbols, their alias, and the
 
            // definition the symbol is pointing to.
 
            fn consume_symbol_and_maybe_alias<'a>(
 
                source: &'a InputSource, iter: &mut TokenIter, ctx: &mut PassCtx,
 
                module_name: &StringRef<'static>, module_root_id: RootId,
 
            ) -> Result<(AliasedSymbol, SymbolDefinition), ParseError> {
 
                // Consume symbol name and make sure it points to an existing definition
 
                let symbol_identifier = consume_ident_interned(source, iter, ctx)?;
 

	
 
                // Consume alias text if specified
 
                let alias_identifier = if peek_ident(source, iter) == Some(b"as") {
 
                    // Consume alias
 
                    iter.consume();
 
                    Some(consume_ident_interned(source, iter, ctx)?)
 
                } else {
 
                    None
 
                };
 

	
 
                let target = ctx.symbols.get_symbol_by_name_defined_in_scope(
 
                    SymbolScope::Module(module_root_id), symbol_identifier.value.as_bytes()
 
                );
 

	
 
                if target.is_none() {
 
                    return Err(ParseError::new_error_at_span(
 
                        source, symbol_identifier.span,
 
                        format!(
 
                            "could not find symbol '{}' within module '{}'",
 
                            symbol_identifier.value.as_str(), module_name.as_str()
 
                        )
 
                    ));
 
                }
 
                let target = target.unwrap();
 
                debug_assert_ne!(target.class(), SymbolClass::Module);
 
                let target_definition = target.variant.as_definition();
 

	
 
                // Consume alias text if specified
 
                let alias_identifier = if peek_ident(source, iter) == b"as" {
 
                    // Consume alias
 
                    iter.consume();
 
                    Some(consume_ident_interned(source, iter, ctx)?)
 
                } else {
 
                    None
 
                };
 

	
 
                Ok((
 
                    AliasedSymbol{
 
                        name: symbol_identifier,
 
                        alias: alias_identifier,
 
                        definition_id: target_definition.definition_id,
 
                    },
 
                    target_definition.clone()
 
                ))
 
            }
 

	
 
            let next = iter.next();
 

	
 
            if Some(TokenKind::Ident) == next {
 
                // Importing a single symbol
 
                iter.consume();
 
                let (imported_symbol, symbol_definition) = consume_symbol_and_maybe_alias(
 
                    &module.source, &mut iter, ctx, &module_identifier.value, target_root_id
 
                )?;
 
                let alias_identifier = imported_symbol.alias.unwrap_or_else(|| { imported_symbol.name.clone() });
 

	
 
                let alias_identifier = match imported_symbol.alias.as_ref() {
 
                    Some(alias) => alias.clone(),
 
                    None => imported_symbol.name.clone(),
 
                };
 

	
 
                import_id = ctx.heap.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    span: InputSpan::from_positions(import_span.begin, alias_identifier.span.end),
 
                    module: module_identifier,
 
                    module_id: target_root_id,
 
                    symbols: vec![imported_symbol],
 
                }));
 
                if let Err((new_symbol, old_symbol)) = ctx.symbols.insert_symbol(
 
                    SymbolScope::Module(module.root_id),
 
                    Symbol{
 
                        name: alias_identifier.value,
 
                        variant: SymbolVariant::Definition(symbol_definition.into_imported(import_id))
 
                    }
 
                ) {
 
                    return Err(construct_symbol_conflict_error(
 
                        modules, module_idx, ctx, &new_symbol, old_symbol
 
                        modules, module_idx, ctx, &new_symbol, &old_symbol
 
                    ));
 
                }
 
            } else if Some(TokenKind::OpenCurly) == next {
 
                // Importing multiple symbols
 
                let mut end_of_list = iter.last_valid_pos();
 
                consume_comma_separated(
 
                    TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, &mut iter,
 
                    |source, iter| consume_symbol_and_maybe_alias(
 
                    TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, &mut iter, ctx,
 
                    |source, iter, ctx| consume_symbol_and_maybe_alias(
 
                        source, iter, ctx, &module_identifier.value, target_root_id
 
                    ),
 
                    &mut self.found_symbols, "a symbol", "a list of symbols to import", Some(&mut end_of_list)
 
                )?;
 

	
 
                // Preallocate import
 
                import_id = ctx.heap.alloc_import(|this| Import::Symbols(ImportSymbols {
 
                    this,
 
                    span: InputSpan::from_positions(import_span.begin, end_of_list),
 
                    module: module_identifier,
 
                    module_id: target_root_id,
 
                    symbols: Vec::with_capacity(self.found_symbols.len()),
 
                }));
 

	
 
                // Fill import symbols while inserting symbols in the
 
                // appropriate scope in the symbol table.
 
                let import = ctx.heap[import_id].as_symbols_mut();
 

	
 
                for (imported_symbol, symbol_definition) in self.found_symbols.drain(..) {
 
                    let import_name = imported_symbol.alias.map_or_else(
 
                        || imported_symbol.name.value.clone(),
 
                        |v| v.value.clone()
 
                    );
 
                    let import_name = match imported_symbol.alias.as_ref() {
 
                        Some(import) => import.value.clone(),
 
                        None => imported_symbol.name.value.clone()
 
                    };
 

	
 
                    import.symbols.push(imported_symbol);
 
                    if let Err((new_symbol, old_symbol)) = ctx.symbols.insert_symbol(
 
                        SymbolScope::Module(module.root_id), Symbol{
 
                            name: import_name,
 
                            variant: SymbolVariant::Definition(symbol_definition.into_imported(import_id))
 
                        }
 
                    ) {
 
                        return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, old_symbol));
 
                        return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, &old_symbol));
 
                    }
 
                }
 
            } else if Some(TokenKind::Star) == next {
 
                // Import all symbols from the module
 
                let star_span = iter.next_span();
 

	
 
                iter.consume();
 
                self.scoped_symbols.clear();
 
                let _found = ctx.symbols.get_all_symbols_defined_in_scope(
 
                    SymbolScope::Module(target_root_id),
 
                    &mut self.scoped_symbols
 
                );
 
                debug_assert!(_found); // even modules without symbols should have a scope
 

	
 
                // Preallocate import
 
                import_id = ctx.heap.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    span: InputSpan::from_positions(import_span.begin, star_span.end),
 
                    module: module_identifier,
 
                    module_id: target_root_id,
 
                    symbols: Vec::with_capacity(self.scoped_symbols.len())
 
                }));
 

	
 
                // Fill import AST node and symbol table
 
                let import = ctx.heap[import_id].as_symbols_mut();
 

	
 
                for symbol in self.scoped_symbols.drain(..) {
 
                    let symbol_name = symbol.name;
 
                    match symbol.variant {
 
                        SymbolVariant::Definition(symbol_definition) => {
 
                            import.symbols.push(AliasedSymbol{
 
                                name: Identifier{ span: star_span, value: symbol.name.clone() },
 
                                name: Identifier{ span: star_span, value: symbol_name.clone() },
 
                                alias: None,
 
                                definition_id: symbol_definition.definition_id,
 
                            });
 

	
 
                            if let Err((new_symbol, old_symbol)) = ctx.symbols.insert_symbol(
 
                                SymbolScope::Module(module.root_id),
 
                                Symbol{
 
                                    name: symbol_name,
 
                                    variant: SymbolVariant::Definition(symbol_definition.into_imported(import_id))
 
                                }
 
                            ) {
 
                                return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, old_symbol));
 
                                return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, &old_symbol));
 
                            }
 
                        },
 
                        _ => unreachable!(),
 
                    }
 
                }
 
            } else {
 
                return Err(ParseError::new_error_str_at_pos(
 
                    &module.source, iter.last_valid_pos(), "expected symbol name, '{' or '*'"
 
                ));
 
            }
 
        } else {
 
            // Assume implicit alias
 
            let module_name_str = module_identifier.value.clone();
 
            let last_ident_start = module_name_str.as_str().rfind('.').map_or(0, |v| v + 1);
 
            let alias_text = &module_name_str.as_bytes()[last_ident_start..];
 
            let alias = ctx.pool.intern(alias_text);
 
            let alias_span = InputSpan::from_positions(
 
                module_name_span.begin.with_offset(last_ident_start as u32),
 
                module_name_span.end
 
            );
 
            let alias_identifier = Identifier{ span: alias_span, value: alias.clone() };
 

	
 
            import_id = ctx.heap.alloc_import(|this| Import::Module(ImportModule{
 
                this,
 
                span: InputSpan::from_positions(import_span.begin, module_identifier.span.end),
 
                module: module_identifier,
 
                alias: alias_identifier,
 
                module_id: target_root_id,
 
            }));
 
            ctx.symbols.insert_symbol(SymbolScope::Module(module.root_id), Symbol{
 
            if let Err((new_symbol, old_symbol)) = ctx.symbols.insert_symbol(SymbolScope::Module(module.root_id), Symbol{
 
                name: alias,
 
                variant: SymbolVariant::Module(SymbolModule{
 
                    root_id: target_root_id,
 
                    introduced_at: import_id
 
                })
 
            });
 
            }) {
 
                return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, &old_symbol));
 
            }
 
        }
 

	
 
        // By now the `import_id` is set, just need to make sure that the import
 
        // properly ends with a semicolon
 
        consume_token(&module.source, &mut iter, TokenKind::SemiColon)?;
 
        self.imports.push(import_id);
 

	
 
        Ok(())
 
    }
 
}
src/protocol/parser/pass_symbols.rs
Show inline comments
 
@@ -41,171 +41,178 @@ impl PassSymbols {
 
        self.has_pragma_module = false;
 
    }
 

	
 
    pub(crate) fn parse(&mut self, modules: &mut [Module], module_idx: usize, ctx: &mut PassCtx) -> Result<(), ParseError> {
 
        self.reset();
 

	
 
        let module = &mut modules[module_idx];
 
        let module_range = &module.tokens.ranges[0];
 

	
 
        debug_assert_eq!(module.phase, ModuleCompilationPhase::Tokenized);
 
        debug_assert_eq!(module_range.range_kind, TokenRangeKind::Module);
 
        debug_assert!(module.root_id.is_invalid()); // not set yet,
 

	
 
        // Preallocate root in the heap
 
        let root_id = ctx.heap.alloc_protocol_description(|this| {
 
            Root{
 
                this,
 
                pragmas: Vec::new(),
 
                imports: Vec::new(),
 
                definitions: Vec::new(),
 
            }
 
        });
 
        module.root_id = root_id;
 

	
 
        // Visit token ranges to detect definitions and pragmas
 
        // Retrieve first range index, then make immutable borrow
 
        let mut range_idx = module_range.first_child_idx;
 
        let module = &modules[module_idx];
 

	
 
        // Visit token ranges to detect definitions and pragmas
 
        loop {
 
            let range_idx_usize = range_idx as usize;
 
            let cur_range = &module.tokens.ranges[range_idx_usize];
 
            let next_sibling_idx = cur_range.next_sibling_idx;
 
            let range_kind = cur_range.range_kind;
 

	
 
            // Parse if it is a definition or a pragma
 
            if cur_range.range_kind == TokenRangeKind::Definition {
 
            if range_kind == TokenRangeKind::Definition {
 
                self.visit_definition_range(modules, module_idx, ctx, range_idx_usize)?;
 
            } else if cur_range.range_kind == TokenRangeKind::Pragma {
 
            } else if range_kind == TokenRangeKind::Pragma {
 
                self.visit_pragma_range(modules, module_idx, ctx, range_idx_usize)?;
 
            }
 

	
 
            match cur_range.next_sibling_idx {
 
            match next_sibling_idx {
 
                Some(idx) => { range_idx = idx; },
 
                None => { break; },
 
            }
 
        }
 

	
 
        // Add the module's symbol scope and the symbols we just parsed
 
        let module_scope = SymbolScope::Module(root_id);
 
        ctx.symbols.insert_scope(None, module_scope);
 
        for symbol in self.symbols.drain(..) {
 
            if let Err((new_symbol, old_symbol)) = ctx.symbols.insert_symbol(module_scope, symbol) {
 
                return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, old_symbol))
 
                return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, &old_symbol))
 
            }
 
        }
 

	
 
        // Modify the preallocated root
 
        let root = &mut ctx.heap[root_id];
 
        root.pragmas.extend(self.pragmas.drain(..));
 
        root.definitions.extend(self.definitions.drain(..));
 

	
 
        let module = &mut modules[module_idx];
 
        module.phase = ModuleCompilationPhase::SymbolsScanned;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_pragma_range(&mut self, modules: &[Module], module_idx: usize, ctx: &mut PassCtx, range_idx: usize) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let range = &module.tokens.ranges[range_idx];
 
        let mut iter = module.tokens.iter_range(range);
 

	
 
        // Consume pragma name
 
        let (pragma_section, pragma_start, _) = consume_pragma(&module.source, &mut iter)?;
 

	
 
        // Consume pragma values
 
        if pragma_section == b"#module" {
 
            // Check if name is defined twice within the same file
 
            if self.has_pragma_module {
 
                return Err(ParseError::new_error_str_at_pos(&module.source, pragma_start, "module name is defined twice"));
 
            }
 

	
 
            // Consume the domain-name
 
            let (module_name, module_span) = consume_domain_ident(&module.source, &mut iter)?;
 
            if iter.next().is_some() {
 
                return Err(ParseError::new_error_str_at_pos(&module.source, iter.last_valid_pos(), "expected end of #module pragma after module name"));
 
            }
 

	
 
            // Add to heap and symbol table
 
            let pragma_span = InputSpan::from_positions(pragma_start, module_span.end);
 
            let module_name = ctx.pool.intern(module_name);
 
            let pragma_id = ctx.heap.alloc_pragma(|this| Pragma::Module(PragmaModule{
 
                this,
 
                span: pragma_span,
 
                value: Identifier{ span: module_span, value: module_name.clone() },
 
            }));
 
            self.pragmas.push(pragma_id);
 

	
 
            if let Err(other_module_root_id) = ctx.symbols.insert_module(module_name, module.root_id) {
 
                // Naming conflict
 
                let this_module = &modules[module_idx];
 
                let other_module = seek_module(modules, other_module_root_id).unwrap();
 
                let (other_module_pragma_id, _) = other_module.name.unwrap();
 
                let other_module_pragma_id = other_module.name.as_ref().map(|v| (*v).0).unwrap();
 
                let other_pragma = ctx.heap[other_module_pragma_id].as_module();
 
                return Err(ParseError::new_error_str_at_span(
 
                    &this_module.source, pragma_span, "conflict in module name"
 
                ).with_info_str_at_span(
 
                    &other_module.source, other_pragma.span, "other module is defined here"
 
                ));
 
            }
 
            self.has_pragma_module = true;
 
        } else if pragma_section == b"#version" {
 
            // Check if version is defined twice within the same file
 
            if self.has_pragma_version {
 
                return Err(ParseError::new_error_str_at_pos(&module.source, pragma_start, "module version is defined twice"));
 
            }
 

	
 
            // Consume the version pragma
 
            let (version, version_span) = consume_integer_literal(&module.source, &mut iter, &mut self.buffer)?;
 
            let pragma_id = ctx.heap.alloc_pragma(|this| Pragma::Version(PragmaVersion{
 
                this,
 
                span: InputSpan::from_positions(pragma_start, version_span.end),
 
                version,
 
            }));
 
            self.pragmas.push(pragma_id);
 
            self.has_pragma_version = true;
 
        } else {
 
            // Custom pragma, maybe we support this in the future, but for now
 
            // we don't.
 
            return Err(ParseError::new_error_str_at_pos(&module.source, pragma_start, "illegal pragma name"));
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_definition_range(&mut self, modules: &[Module], module_idx: usize, ctx: &mut PassCtx, range_idx: usize) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let range = &module.tokens.ranges[range_idx];
 
        let definition_span = InputSpan::from_positions(
 
            module.tokens.start_pos(range),
 
            module.tokens.end_pos(range)
 
        );
 
        let mut iter = module.tokens.iter_range(range);
 

	
 
        // First ident must be type of symbol
 
        let (kw_text, _) = consume_any_ident(&module.source, &mut iter).unwrap();
 

	
 
        // Retrieve identifier of definition
 
        let identifier = consume_ident_interned(&module.source, &mut iter, ctx)?;
 
        let mut poly_vars = Vec::new();
 
        maybe_consume_comma_separated(
 
            TokenKind::OpenAngle, TokenKind::CloseAngle, &module.source, &mut iter,
 
            |source, iter| consume_ident_interned(source, iter, ctx),
 
            TokenKind::OpenAngle, TokenKind::CloseAngle, &module.source, &mut iter, ctx,
 
            |source, iter, ctx| consume_ident_interned(source, iter, ctx),
 
            &mut poly_vars, "a polymorphic variable", None
 
        )?;
 
        let ident_text = identifier.value.clone(); // because we need it later
 
        let ident_span = identifier.span.clone();
 

	
 
        // Reserve space in AST for definition and add it to the symbol table
 
        let definition_class;
 
        let ast_definition_id;
 
        match kw_text {
 
            KW_STRUCT => {
 
                let struct_def_id = ctx.heap.alloc_struct_definition(|this| {
 
                    StructDefinition::new_empty(this, module.root_id, definition_span, identifier, poly_vars)
 
                });
 
                definition_class = DefinitionClass::Struct;
 
                ast_definition_id = struct_def_id.upcast();
 
            },
 
            KW_ENUM => {
 
                let enum_def_id = ctx.heap.alloc_enum_definition(|this| {
 
                    EnumDefinition::new_empty(this, module.root_id, definition_span, identifier, poly_vars)
 
                });
 
                definition_class = DefinitionClass::Enum;
 
                ast_definition_id = enum_def_id.upcast();
 
            },
 
            KW_UNION => {
src/protocol/parser/pass_tokenizer.rs
Show inline comments
 
use crate::protocol::input_source::{
 
    InputSource as InputSource,
 
    ParseError,
 
    InputPosition as InputPosition,
 
    InputSpan
 
};
 

	
 
use super::tokens::*;
 
use super::token_parsing::*;
 

	
 
/// Tokenizer is a reusable parser to tokenize multiple source files using the
 
/// same allocated buffers. In a well-formed program, we produce a consistent
 
/// tree of token ranges such that we may identify tokens that represent a
 
/// defintion or an import before producing the entire AST.
 
///
 
/// If the program is not well-formed then the tree may be inconsistent, but we
 
/// will detect this once we transform the tokens into the AST. To ensure a
 
/// consistent AST-producing phase we will require the import to have balanced
 
/// curly braces
 
pub(crate) struct PassTokenizer {
 
    // Stack of input positions of opening curly braces, used to detect
 
    // unmatched opening braces, unmatched closing braces are detected
 
    // immediately.
 
    curly_stack: Vec<InputPosition>,
 
    // Points to an element in the `TokenBuffer.ranges` variable.
 
    stack_idx: usize,
 
}
 

	
 
impl PassTokenizer {
 
@@ -136,49 +135,49 @@ impl PassTokenizer {
 

	
 
        if !self.curly_stack.is_empty() {
 
            // Let's not add a lot of heuristics and just tell the programmer
 
            // that something is wrong
 
            let last_unmatched_open = self.curly_stack.pop().unwrap();
 
            return Err(ParseError::new_error_str_at_pos(
 
                source, last_unmatched_open, "unmatched opening curly brace '{'"
 
            ));
 
        }
 

	
 
        // TODO: @remove once I'm sure the algorithm works. For now it is better
 
        //  if the debugging is a little more expedient
 
        if cfg!(debug_assertions) {
 
            // For each range make sure its children make sense
 
            for parent_idx in 0..target.ranges.len() {
 
                let cur_range = &target.ranges[parent_idx];
 
                if cur_range.num_child_ranges == 0 {
 
                    assert_eq!(cur_range.first_child_idx, parent_idx as u32);
 
                    assert_eq!(cur_range.last_child_idx, parent_idx as u32);
 
                } else {
 
                    assert_ne!(cur_range.first_child_idx, parent_idx as u32);
 
                    assert_ne!(cur_range.last_child_idx, parent_idx as u32);
 

	
 
                    let mut child_counter = 0u32;
 
                    let mut last_child_idx = cur_range.first_child_idx;
 
                    let last_child_idx = cur_range.first_child_idx;
 
                    let mut child_idx = Some(cur_range.first_child_idx);
 
                    while let Some(cur_child_idx) = child_idx {
 
                        let child_range = &target.ranges[cur_child_idx as usize];
 
                        assert_eq!(child_range.parent_idx, parent_idx);
 
                        child_idx = child_range.next_sibling_idx;
 
                        child_counter += 1;
 
                    }
 

	
 
                    assert_eq!(cur_range.last_child_idx, last_child_idx);
 
                    assert_eq!(cur_range.num_child_ranges, child_counter);
 
                }
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn is_line_comment_start(&self, first_char: u8, source: &InputSource) -> bool {
 
        return first_char == b'/' && Some(b'/') == source.lookahead(1);
 
    }
 

	
 
    fn is_block_comment_start(&self, first_char: u8, source: &InputSource) -> bool {
 
        return first_char == b'/' && Some(b'*') == source.lookahead(1);
 
    }
 
@@ -351,49 +350,52 @@ impl PassTokenizer {
 
            source.consume();
 
            if let Some(b'=') = source.next() {
 
                source.consume();
 
                token_kind = TokenKind::CaretEquals;
 
            } else {
 
                token_kind = TokenKind::Caret;
 
            }
 
        } else if first_char == b'{' {
 
            source.consume();
 
            token_kind = TokenKind::OpenCurly;
 
        } else if first_char == b'|' {
 
            source.consume();
 
            let next = source.next();
 
            if Some(b'|') == next {
 
                source.consume();
 
                token_kind = TokenKind::OrOr;
 
            } else if Some(b'=') == next {
 
                source.consume();
 
                token_kind = TokenKind::OrEquals;
 
            } else {
 
                token_kind = TokenKind::Or;
 
            }
 
        } else if first_char == b'}' {
 
            source.consume();
 
            token_kind = TokenKind::CloseCurly
 
            token_kind = TokenKind::CloseCurly;
 
        } else if first_char == b'~' {
 
            source.consume();
 
            token_kind = TokenKind::Tilde;
 
        } else {
 
            self.check_ascii(source)?;
 
            return Ok(None);
 
        }
 

	
 
        target.tokens.push(Token::new(token_kind, pos));
 
        Ok(Some((token_kind, pos)))
 
    }
 

	
 
    fn consume_char_literal(&mut self, source: &mut InputSource, target: &mut TokenBuffer) -> Result<(), ParseError> {
 
        let begin_pos = source.pos();
 

	
 
        // Consume the leading quote
 
        debug_assert!(source.next().unwrap() == b'\'');
 
        source.consume();
 

	
 
        let mut prev_char = b'\'';
 
        while let Some(c) = source.next() {
 
            if !c.is_ascii() {
 
                return Err(ParseError::new_error_str_at_pos(source, source.pos(), "non-ASCII character in char literal"));
 
            }
 
            source.consume();
 

	
 
            // Make sure ending quote was not escaped
 
@@ -605,84 +607,83 @@ impl PassTokenizer {
 
    }
 

	
 
    // Consumes whitespace and returns whether or not the whitespace contained
 
    // a newline.
 
    fn consume_whitespace(&self, source: &mut InputSource) -> bool {
 
        debug_assert!(is_whitespace(source.next().unwrap()));
 

	
 
        let mut has_newline = false;
 
        while let Some(c) = source.next() {
 
            if !is_whitespace(c) {
 
                break;
 
            }
 

	
 
            if c == b'\n' {
 
                has_newline = true;
 
            }
 
            source.consume();
 
        }
 

	
 
        has_newline
 
    }
 

	
 
    /// Pushes a new token range onto the stack in the buffers.
 
    fn push_range(&mut self, target: &mut TokenBuffer, range_kind: TokenRangeKind, first_token: u32) {
 
        let new_range_idx = target.ranges.len() as u32;
 
        let cur_range = &mut target.ranges[self.stack_idx];
 

	
 
        // If we have just popped a range and then push a new range, then the
 
        // first token is equal to the last token registered on the current
 
        // range. If not, then we had some intermediate tokens that did not
 
        // belong to a particular kind of token range: hence we insert an
 
        // intermediate "code" range.
 
        if cur_range.end != first_token {
 
            let code_start = cur_range.end;
 
            let code_range_idx = target.ranges.len() as u32;
 

	
 
            if cur_range.first_child_idx == self.stack_idx as u32 {
 
                // The parent of the new "code" range we're going to push does
 
                // not have any registered children yet.
 
                cur_range.first_child_idx = code_range_idx;
 
                cur_range.first_child_idx = new_range_idx;
 
            }
 
            cur_range.last_child_idx = code_range_idx + 1;
 
            cur_range.last_child_idx = new_range_idx + 1;
 

	
 
            cur_range.end = first_token;
 
            cur_range.num_child_ranges += 1;
 
            target.ranges.push(TokenRange{
 
                parent_idx: self.stack_idx,
 
                range_kind: TokenRangeKind::Code,
 
                curly_depth: self.curly_stack.len() as u32,
 
                start: code_start,
 
                end: first_token,
 
                num_child_ranges: 0,
 
                first_child_idx: code_range_idx,
 
                last_child_idx: code_range_idx,
 
                next_sibling_idx: Some(code_range_idx + 1), // we're going to push this thing next
 
                first_child_idx: new_range_idx,
 
                last_child_idx: new_range_idx,
 
                next_sibling_idx: Some(new_range_idx + 1), // we're going to push this thing next
 
            });
 
        } else {
 
            // We're going to push the range in the code below, but while we
 
            // have the `cur_range` borrowed mutably, we fix up its children
 
            // indices.
 
            let new_range_idx = target.ranges.len() as u32;
 
            if cur_range.first_child_idx == self.stack_idx as u32 {
 
                cur_range.first_child_idx = new_range_idx;
 
            }
 
            cur_range.last_child_idx = new_range_idx;
 
        }
 

	
 
        // Insert a new range
 
        let parent_idx = self.stack_idx;
 
        self.stack_idx = target.ranges.len();
 
        let new_range_idx = self.stack_idx as u32;
 
        target.ranges.push(TokenRange{
 
            parent_idx,
 
            range_kind,
 
            curly_depth: self.curly_stack.len() as u32,
 
            start: first_token,
 
            end: first_token,
 
            num_child_ranges: 0,
 
            first_child_idx: new_range_idx,
 
            last_child_idx: new_range_idx,
 
            next_sibling_idx: None,
 
        });
 
    }
 

	
 
    fn pop_range(&mut self, target: &mut TokenBuffer, end_index: u32) {
src/protocol/parser/pass_typing.rs
Show inline comments
 
@@ -39,48 +39,49 @@
 
///     Furthermore, queueing of expressions can be more intelligent, currently
 
///     every child/parent of an expression is inferred again when queued. Hence
 
///     we need to queue only specific children/parents of expressions.
 
///  3. Remove the `msg` type?
 
///  4. Disallow certain types in certain operations (e.g. `Void`).
 
///  5. Implement implicit and explicit casting.
 
///  6. Investigate different ways of performing the type-on-type inference,
 
///     maybe there is a better way then flattened trees + markers?
 

	
 
macro_rules! debug_log {
 
    ($format:literal) => {
 
        enabled_debug_print!(true, "types", $format);
 
    };
 
    ($format:literal, $($args:expr),*) => {
 
        enabled_debug_print!(true, "types", $format, $($args),*);
 
    };
 
}
 

	
 
use std::collections::{HashMap, HashSet};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::input_source::ParseError;
 
use crate::protocol::parser::ModuleCompilationPhase;
 
use crate::protocol::parser::type_table::*;
 
use crate::protocol::parser::token_parsing::*;
 
use super::visitor::{
 
    STMT_BUFFER_INIT_CAPACITY,
 
    EXPR_BUFFER_INIT_CAPACITY,
 
    Ctx,
 
    Visitor2,
 
    VisitorResult
 
};
 
use std::collections::hash_map::Entry;
 

	
 
const MESSAGE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Message, InferenceTypePart::UInt8 ];
 
const BOOL_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Bool ];
 
const CHARACTER_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Character ];
 
const STRING_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::String ];
 
const NUMBERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::NumberLike ];
 
const INTEGERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::IntegerLike ];
 
const ARRAY_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Array, InferenceTypePart::Unknown ];
 
const ARRAYLIKE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::ArrayLike, InferenceTypePart::Unknown ];
 

	
 
/// TODO: @performance Turn into PartialOrd+Ord to simplify checks
 
/// TODO: @types Remove the Message -> Byte hack at some point...
 
#[derive(Debug, Clone, Eq, PartialEq)]
 
pub(crate) enum InferenceTypePart {
 
    // A marker with an identifier which we can use to retrieve the type subtree
 
    // that follows the marker. This is used to perform type inference on
 
@@ -126,130 +127,137 @@ impl InferenceTypePart {
 
    fn is_marker(&self) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        match self {
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    /// Checks if the type is concrete, markers are interpreted as concrete
 
    /// types.
 
    fn is_concrete(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | 
 
            ITP::ArrayLike | ITP::PortLike => false,
 
            _ => true
 
        }
 
    }
 

	
 
    fn is_concrete_number(&self) -> bool {
 
        // TODO: @float
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long => true,
 
            ITP::UInt8 | ITP::UInt16 | ITP::UInt32 | ITP::UInt64 |
 
            ITP::SInt8 | ITP::SInt16 | ITP::SInt32 | ITP::SInt64 => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_integer(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long => true,
 
        match self {ITP::UInt8 | ITP::UInt16 | ITP::UInt32 | ITP::UInt64 |
 
        ITP::SInt8 | ITP::SInt16 | ITP::SInt32 | ITP::SInt64 => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_msg_array_or_slice(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Array | ITP::Slice | ITP::Message => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_port(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Input | ITP::Output => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    /// Checks if a part is less specific than the argument. Only checks for 
 
    /// single-part inference (i.e. not the replacement of an `Unknown` variant 
 
    /// with the argument)
 
    fn may_be_inferred_from(&self, arg: &InferenceTypePart) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        (*self == ITP::IntegerLike && arg.is_concrete_integer()) ||
 
        (*self == ITP::NumberLike && (arg.is_concrete_number() || *arg == ITP::IntegerLike)) ||
 
        (*self == ITP::ArrayLike && arg.is_concrete_msg_array_or_slice()) ||
 
        (*self == ITP::PortLike && arg.is_concrete_port())
 
    }
 

	
 
    /// Returns the change in "iteration depth" when traversing this particular
 
    /// part. The iteration depth is used to traverse the tree in a linear 
 
    /// fashion. It is basically `number_of_subtypes - 1`
 
    fn depth_change(&self) -> i32 {
 
        use InferenceTypePart as ITP;
 
        match &self {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike |
 
            ITP::Void | ITP::Bool |
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long | 
 
            ITP::String => {
 
            ITP::UInt8 | ITP::UInt16 | ITP::UInt32 | ITP::UInt64 |
 
            ITP::SInt8 | ITP::SInt16 | ITP::SInt32 | ITP::SInt64 |
 
            ITP::Character | ITP::String => {
 
                -1
 
            },
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) |
 
            ITP::ArrayLike | ITP::Message | ITP::Array | ITP::Slice |
 
            ITP::PortLike | ITP::Input | ITP::Output => {
 
                // One subtype, so do not modify depth
 
                0
 
            },
 
            ITP::Instance(_, num_args) => {
 
                (*num_args as i32) - 1
 
            }
 
        }
 
    }
 
}
 

	
 
impl From<ConcreteTypePart> for InferenceTypePart {
 
    fn from(v: ConcreteTypePart) -> InferenceTypePart {
 
        use ConcreteTypePart as CTP;
 
        use InferenceTypePart as ITP;
 

	
 
        match v {
 
            CTP::Marker(_) => {
 
                unreachable!("encountered marker while converting concrete type to inferred type");
 
            }
 
            CTP::Void => ITP::Void,
 
            CTP::Message => ITP::Message,
 
            CTP::Bool => ITP::Bool,
 
            CTP::Byte => ITP::Byte,
 
            CTP::Short => ITP::Short,
 
            CTP::Int => ITP::Int,
 
            CTP::Long => ITP::Long,
 
            CTP::UInt8 => ITP::UInt8,
 
            CTP::UInt16 => ITP::UInt16,
 
            CTP::UInt32 => ITP::UInt32,
 
            CTP::UInt64 => ITP::UInt64,
 
            CTP::SInt8 => ITP::SInt8,
 
            CTP::SInt16 => ITP::SInt16,
 
            CTP::SInt32 => ITP::SInt32,
 
            CTP::SInt64 => ITP::SInt64,
 
            CTP::Character => ITP::Character,
 
            CTP::String => ITP::String,
 
            CTP::Array => ITP::Array,
 
            CTP::Slice => ITP::Slice,
 
            CTP::Input => ITP::Input,
 
            CTP::Output => ITP::Output,
 
            CTP::Instance(id, num) => ITP::Instance(id, num),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug)]
 
struct InferenceType {
 
    has_body_marker: bool,
 
    is_done: bool,
 
    parts: Vec<InferenceTypePart>,
 
}
 

	
 
impl InferenceType {
 
    /// Generates a new InferenceType. The two boolean flags will be checked in
 
    /// debug mode.
 
    fn new(has_body_marker: bool, is_done: bool, parts: Vec<InferenceTypePart>) -> Self {
 
        if cfg!(debug_assertions) {
 
            debug_assert!(!parts.is_empty());
 
            let parts_body_marker = parts.iter().any(|v| {
 
@@ -614,120 +622,133 @@ impl InferenceType {
 

	
 
        let mut idx = 0;
 
        while idx < self.parts.len() {
 
            let part = &self.parts[idx];
 
            let converted_part = match part {
 
                ITP::MarkerDefinition(marker) => {
 
                    // Outer markers are converted to regular markers, we
 
                    // completely remove the type subtree that follows it
 
                    idx = InferenceType::find_subtree_end_idx(&self.parts, idx + 1);
 
                    concrete_type.parts.push(CTP::Marker(*marker));
 
                    continue;
 
                },
 
                ITP::MarkerBody(_) => {
 
                    // Inner markers are removed when writing to the concrete
 
                    // type.
 
                    idx += 1;
 
                    continue;
 
                },
 
                ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | ITP::ArrayLike | ITP::PortLike => {
 
                    unreachable!("Attempted to convert inference type part {:?} into concrete type", part);
 
                },
 
                ITP::Void => CTP::Void,
 
                ITP::Message => CTP::Message,
 
                ITP::Bool => CTP::Bool,
 
                ITP::Byte => CTP::Byte,
 
                ITP::Short => CTP::Short,
 
                ITP::Int => CTP::Int,
 
                ITP::Long => CTP::Long,
 
                ITP::UInt8 => CTP::UInt8,
 
                ITP::UInt16 => CTP::UInt16,
 
                ITP::UInt32 => CTP::UInt32,
 
                ITP::UInt64 => CTP::UInt64,
 
                ITP::SInt8 => CTP::SInt8,
 
                ITP::SInt16 => CTP::SInt16,
 
                ITP::SInt32 => CTP::SInt32,
 
                ITP::SInt64 => CTP::SInt64,
 
                ITP::Character => CTP::Character,
 
                ITP::String => CTP::String,
 
                ITP::Array => CTP::Array,
 
                ITP::Slice => CTP::Slice,
 
                ITP::Input => CTP::Input,
 
                ITP::Output => CTP::Output,
 
                ITP::Instance(id, num) => CTP::Instance(*id, *num),
 
            };
 

	
 
            concrete_type.parts.push(converted_part);
 
            idx += 1;
 
        }
 
    }
 

	
 
    /// Writes a human-readable version of the type to a string. This is used
 
    /// to display error messages
 
    fn write_display_name(
 
        buffer: &mut String, heap: &Heap, parts: &[InferenceTypePart], mut idx: usize
 
    ) -> usize {
 
        use InferenceTypePart as ITP;
 

	
 
        match &parts[idx] {
 
            ITP::MarkerDefinition(thing) => {
 
                buffer.push_str(&format!("{{D:{}}}", *thing));
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
            }, 
 
            ITP::MarkerBody(thing) => {
 
                buffer.push_str(&format!("{{B:{}}}", *thing));
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
            },
 
            ITP::Unknown => buffer.push_str("?"),
 
            ITP::NumberLike => buffer.push_str("num?"),
 
            ITP::IntegerLike => buffer.push_str("int?"),
 
            ITP::NumberLike => buffer.push_str("numberlike"),
 
            ITP::IntegerLike => buffer.push_str("integerlike"),
 
            ITP::ArrayLike => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[?]");
 
            },
 
            ITP::PortLike => {
 
                buffer.push_str("port?<");
 
                buffer.push_str("portlike<");
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            }
 
            ITP::Void => buffer.push_str("void"),
 
            ITP::Bool => buffer.push_str("bool"),
 
            ITP::Byte => buffer.push_str("byte"),
 
            ITP::Short => buffer.push_str("short"),
 
            ITP::Int => buffer.push_str("int"),
 
            ITP::Long => buffer.push_str("long"),
 
            ITP::String => buffer.push_str("str"),
 
            ITP::Bool => buffer.push_str(KW_TYPE_BOOL_STR),
 
            ITP::UInt8 => buffer.push_str(KW_TYPE_UINT8_STR),
 
            ITP::UInt16 => buffer.push_str(KW_TYPE_UINT16_STR),
 
            ITP::UInt32 => buffer.push_str(KW_TYPE_UINT32_STR),
 
            ITP::UInt64 => buffer.push_str(KW_TYPE_UINT64_STR),
 
            ITP::SInt8 => buffer.push_str(KW_TYPE_SINT8_STR),
 
            ITP::SInt16 => buffer.push_str(KW_TYPE_SINT16_STR),
 
            ITP::SInt32 => buffer.push_str(KW_TYPE_SINT32_STR),
 
            ITP::SInt64 => buffer.push_str(KW_TYPE_SINT64_STR),
 
            ITP::Character => buffer.push_str(KW_TYPE_CHAR_STR),
 
            ITP::String => buffer.push_str(KW_TYPE_STRING_STR),
 
            ITP::Message => {
 
                buffer.push_str("msg<");
 
                buffer.push_str(KW_TYPE_MESSAGE_STR);
 
                buffer.push('<');
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Array => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[]");
 
            },
 
            ITP::Slice => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[..]");
 
            },
 
            ITP::Input => {
 
                buffer.push_str("in<");
 
                buffer.push_str(KW_TYPE_IN_PORT_STR);
 
                buffer.push('<');
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Output => {
 
                buffer.push_str("out<");
 
                buffer.push_str(KW_TYPE_OUT_PORT_STR);
 
                buffer.push('<');
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Instance(definition_id, num_sub) => {
 
                let definition = &heap[*definition_id];
 
                buffer.push_str(definition.identifier().value.as_str());
 
                if *num_sub > 0 {
 
                    buffer.push('<');
 
                    idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    for _sub_idx in 1..*num_sub {
 
                        buffer.push_str(", ");
 
                        idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    }
 
                    buffer.push('>');
 
                }
 
            },
 
        }
 

	
 
        idx
 
    }
 

	
 
    /// Returns the display name of a (part of) the type tree. Will allocate a
 
    /// string.
 
    fn partial_display_name(heap: &Heap, parts: &[InferenceTypePart]) -> String {
 
@@ -863,49 +884,49 @@ struct ExtraData {
 
}
 

	
 
struct VarData {
 
    /// Type of the variable
 
    var_type: InferenceType,
 
    /// VariableExpressions that use the variable
 
    used_at: Vec<ExpressionId>,
 
    /// For channel statements we link to the other variable such that when one
 
    /// channel's interior type is resolved, we can also resolve the other one.
 
    linked_var: Option<VariableId>,
 
}
 

	
 
impl VarData {
 
    fn new_channel(var_type: InferenceType, other_port: VariableId) -> Self {
 
        Self{ var_type, used_at: Vec::new(), linked_var: Some(other_port) }
 
    }
 
    fn new_local(var_type: InferenceType) -> Self {
 
        Self{ var_type, used_at: Vec::new(), linked_var: None }
 
    }
 
}
 

	
 
impl PassTyping {
 
    pub(crate) fn new() -> Self {
 
        PassTyping {
 
            definition_type: DefinitionType::None,
 
            definition_type: DefinitionType::Function(FunctionDefinitionId::new_invalid()),
 
            poly_vars: Vec::new(),
 
            stmt_buffer: Vec::with_capacity(STMT_BUFFER_INIT_CAPACITY),
 
            expr_buffer: Vec::with_capacity(EXPR_BUFFER_INIT_CAPACITY),
 
            var_types: HashMap::new(),
 
            expr_types: HashMap::new(),
 
            extra_data: HashMap::new(),
 
            expr_queued: HashSet::new(),
 
        }
 
    }
 

	
 
    // TODO: @cleanup Unsure about this, maybe a pattern will arise after
 
    //  a while.
 
    pub(crate) fn queue_module_definitions(ctx: &Ctx, queue: &mut ResolveQueue) {
 
        debug_assert_eq!(ctx.module.phase, ModuleCompilationPhase::ValidatedAndLinked);
 
        let root_id = ctx.module.root_id;
 
        let root = &ctx.heap.protocol_descriptions[root_id];
 
        for definition_id in &root.definitions {
 
            let definition = &ctx.heap[*definition_id];
 
            match definition {
 
                Definition::Function(definition) => {
 
                    if definition.poly_vars.is_empty() {
 
                        queue.push(ResolveQueueElement{
 
                            root_id,
 
                            definition_id: *definition_id,
 
@@ -922,185 +943,186 @@ impl PassTyping {
 
                        })
 
                    }
 
                },
 
                Definition::Enum(_) | Definition::Struct(_) | Definition::Union(_) => {},
 
            }
 
        }
 
    }
 

	
 
    pub(crate) fn handle_module_definition(
 
        &mut self, ctx: &mut Ctx, queue: &mut ResolveQueue, element: ResolveQueueElement
 
    ) -> VisitorResult {
 
        // Visit the definition
 
        debug_assert_eq!(ctx.module.root_id, element.root_id);
 
        self.reset();
 
        self.poly_vars.clear();
 
        self.poly_vars.extend(element.monomorph_types.iter().cloned());
 
        self.visit_definition(ctx, element.definition_id)?;
 

	
 
        // Keep resolving types
 
        self.resolve_types(ctx, queue)?;
 
        Ok(())
 
    }
 

	
 
    fn reset(&mut self) {
 
        self.definition_type = DefinitionType::None;
 
        self.definition_type = DefinitionType::Function(FunctionDefinitionId::new_invalid());
 
        self.poly_vars.clear();
 
        self.stmt_buffer.clear();
 
        self.expr_buffer.clear();
 
        self.var_types.clear();
 
        self.expr_types.clear();
 
        self.extra_data.clear();
 
        self.expr_queued.clear();
 
    }
 
}
 

	
 
impl Visitor2 for PassTyping {
 
    // Definitions
 

	
 
    fn visit_component_definition(&mut self, ctx: &mut Ctx, id: ComponentDefinitionId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Component(id);
 

	
 
        let comp_def = &ctx.heap[id];
 
        debug_assert_eq!(comp_def.poly_vars.len(), self.poly_vars.len(), "component polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting component '{}': {}", &String::from_utf8_lossy(&comp_def.identifier.value), id.0.index);
 
        debug_log!("Visiting component '{}': {}", comp_def.identifier.value.as_str(), id.0.index);
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        for param_id in comp_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type(ctx, &param.parser_type, true);
 
            let var_type = self.determine_inference_type_from_parser_type_elements(&param.parser_type.elements, true);
 
            debug_assert!(var_type.is_done, "expected component arguments to be concrete types");
 
            self.var_types.insert(param_id.upcast(), VarData::new_local(var_type));
 
        }
 

	
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_block_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    fn visit_function_definition(&mut self, ctx: &mut Ctx, id: FunctionDefinitionId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Function(id);
 

	
 
        let func_def = &ctx.heap[id];
 
        debug_assert_eq!(func_def.poly_vars.len(), self.poly_vars.len(), "function polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting function '{}': {}", &String::from_utf8_lossy(&func_def.identifier.value), id.0.index);
 
        debug_log!("Visiting function '{}': {}", func_def.identifier.value.as_str(), id.0.index);
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        for param_id in func_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type(ctx, &param.parser_type, true);
 
            let var_type = self.determine_inference_type_from_parser_type_elements(&param.parser_type.elements, true);
 
            debug_assert!(var_type.is_done, "expected function arguments to be concrete types");
 
            self.var_types.insert(param_id.upcast(), VarData::new_local(var_type));
 
        }
 

	
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_block_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    // Statements
 

	
 
    fn visit_block_stmt(&mut self, ctx: &mut Ctx, id: BlockStatementId) -> VisitorResult {
 
        // Transfer statements for traversal
 
        let block = &ctx.heap[id];
 

	
 
        for stmt_id in block.statements.clone() {
 
            self.visit_stmt(ctx, stmt_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_memory_stmt(&mut self, ctx: &mut Ctx, id: MemoryStatementId) -> VisitorResult {
 
        let memory_stmt = &ctx.heap[id];
 

	
 
        let local = &ctx.heap[memory_stmt.variable];
 
        let var_type = self.determine_inference_type_from_parser_type(ctx, &local.parser_type, true);
 
        let var_type = self.determine_inference_type_from_parser_type_elements(&local.parser_type.elements, true);
 
        self.var_types.insert(memory_stmt.variable.upcast(), VarData::new_local(var_type));
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_channel_stmt(&mut self, ctx: &mut Ctx, id: ChannelStatementId) -> VisitorResult {
 
        let channel_stmt = &ctx.heap[id];
 

	
 
        let from_local = &ctx.heap[channel_stmt.from];
 
        let from_var_type = self.determine_inference_type_from_parser_type(ctx, &from_local.parser_type, true);
 
        let from_var_type = self.determine_inference_type_from_parser_type_elements(&from_local.parser_type.elements, true);
 
        self.var_types.insert(from_local.this.upcast(), VarData::new_channel(from_var_type, channel_stmt.to.upcast()));
 

	
 
        let to_local = &ctx.heap[channel_stmt.to];
 
        let to_var_type = self.determine_inference_type_from_parser_type(ctx, &to_local.parser_type, true);
 
        let to_var_type = self.determine_inference_type_from_parser_type_elements(&to_local.parser_type.elements, true);
 
        self.var_types.insert(to_local.this.upcast(), VarData::new_channel(to_var_type, channel_stmt.from.upcast()));
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_labeled_stmt(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> VisitorResult {
 
        let labeled_stmt = &ctx.heap[id];
 
        let substmt_id = labeled_stmt.body;
 
        self.visit_stmt(ctx, substmt_id)
 
    }
 

	
 
    fn visit_if_stmt(&mut self, ctx: &mut Ctx, id: IfStatementId) -> VisitorResult {
 
        let if_stmt = &ctx.heap[id];
 

	
 
        let true_body_id = if_stmt.true_body;
 
        let false_body_id = if_stmt.false_body;
 
        let test_expr_id = if_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_block_stmt(ctx, true_body_id)?;
 
        if let Some(false_body_id) = false_body_id {
 
            self.visit_block_stmt(ctx, false_body_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_while_stmt(&mut self, ctx: &mut Ctx, id: WhileStatementId) -> VisitorResult {
 
        let while_stmt = &ctx.heap[id];
 

	
 
        let body_id = while_stmt.body;
 
        let test_expr_id = while_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_block_stmt(ctx, body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_synchronous_stmt(&mut self, ctx: &mut Ctx, id: SynchronousStatementId) -> VisitorResult {
 
        let sync_stmt = &ctx.heap[id];
 
        let body_id = sync_stmt.body;
 

	
 
        self.visit_block_stmt(ctx, body_id)
 
    }
 

	
 
    fn visit_return_stmt(&mut self, ctx: &mut Ctx, id: ReturnStatementId) -> VisitorResult {
 
        let return_stmt = &ctx.heap[id];
 
        let expr_id = return_stmt.expression;
 
        debug_assert_eq!(return_stmt.expressions.len(), 1);
 
        let expr_id = return_stmt.expressions[0];
 

	
 
        self.visit_expr(ctx, expr_id)
 
    }
 

	
 
    fn visit_new_stmt(&mut self, ctx: &mut Ctx, id: NewStatementId) -> VisitorResult {
 
        let new_stmt = &ctx.heap[id];
 
        let call_expr_id = new_stmt.expression;
 

	
 
        self.visit_call_expr(ctx, call_expr_id)
 
    }
 

	
 
    fn visit_expr_stmt(&mut self, ctx: &mut Ctx, id: ExpressionStatementId) -> VisitorResult {
 
        let expr_stmt = &ctx.heap[id];
 
        let subexpr_id = expr_stmt.expression;
 

	
 
        self.visit_expr(ctx, subexpr_id)
 
    }
 

	
 
    // Expressions
 

	
 
    fn visit_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
@@ -1307,57 +1329,56 @@ macro_rules! debug_assert_ptrs_distinct {
 
    // Generic case
 
    ($ptr1:ident, $ptr2:ident, $($tail:ident),+) => {
 
        debug_assert_ptrs_distinct!($ptr1, $ptr2);
 
        debug_assert_ptrs_distinct!($ptr2, $($tail),+);
 
    };
 
}
 

	
 
impl PassTyping {
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError> {
 
        // Keep inferring until we can no longer make any progress
 
        while let Some(next_expr_id) = self.expr_queued.iter().next() {
 
            let next_expr_id = *next_expr_id;
 
            self.expr_queued.remove(&next_expr_id);
 
            self.progress_expr(ctx, next_expr_id)?;
 
        }
 

	
 
        // We check if we have all the types we need. If we're typechecking a 
 
        // polymorphic procedure more than once, then we have already annotated
 
        // the AST and have now performed typechecking for a different 
 
        // monomorph. In that case we just need to perform typechecking, no need
 
        // to annotate the AST again.
 
        let definition_id = match &self.definition_type {
 
            DefinitionType::Component(id) => id.upcast(),
 
            DefinitionType::Function(id) => id.upcast(),
 
            _ => unreachable!(),
 
        };
 

	
 
        let already_checked = ctx.types.get_base_definition(&definition_id).unwrap().has_any_monomorph();
 
        for (expr_id, expr_type) in self.expr_types.iter_mut() {
 
            if !expr_type.is_done {
 
                // Auto-infer numberlike/integerlike types to a regular int
 
                if expr_type.parts.len() == 1 && expr_type.parts[0] == InferenceTypePart::IntegerLike {
 
                    expr_type.parts[0] = InferenceTypePart::Int;
 
                    expr_type.parts[0] = InferenceTypePart::SInt32;
 
                } else {
 
                    let expr = &ctx.heap[*expr_id];
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module.source, expr.span(), format!(
 
                            "could not fully infer the type of this expression (got '{}')",
 
                            expr_type.display_name(&ctx.heap)
 
                        )
 
                    ));
 
                }
 
            }
 

	
 
            if !already_checked {
 
                let concrete_type = ctx.heap[*expr_id].get_type_mut();
 
                expr_type.write_concrete_type(concrete_type);
 
            } else {
 
                if cfg!(debug_assertions) {
 
                    let mut concrete_type = ConcreteType::default();
 
                    expr_type.write_concrete_type(&mut concrete_type);
 
                    debug_assert_eq!(*ctx.heap[*expr_id].get_type(), concrete_type);
 
                }
 
            }
 
        }
 

	
 
        // All types are fine
 
@@ -1383,96 +1404,96 @@ impl PassTyping {
 
                let mut monomorph_types = Vec::with_capacity(extra_data.poly_vars.len());
 
                for (poly_idx, poly_type) in extra_data.poly_vars.iter().enumerate() {
 
                    if !poly_type.is_done {
 
                        // TODO: Single clean function for function signatures and polyvars.
 
                        // TODO: Better error message
 
                        let expr = &ctx.heap[*expr_id];
 
                        return Err(ParseError::new_error_at_span(
 
                            &ctx.module.source, expr.span(), format!(
 
                                "could not fully infer the type of polymorphic variable {} of this expression (got '{}')",
 
                                poly_idx, poly_type.display_name(&ctx.heap)
 
                            )
 
                        ))
 
                    }
 

	
 
                    let mut concrete_type = ConcreteType::default();
 
                    poly_type.write_concrete_type(&mut concrete_type);
 
                    monomorph_types.insert(poly_idx, concrete_type);
 
                }
 

	
 
                // Resolve to the appropriate expression and instantiate 
 
                // monomorphs.
 
                match &ctx.heap[*expr_id] {
 
                    Expression::Call(call_expr) => {
 
                        // Add to type table if not yet typechecked
 
                        if let Method::Symbolic(symbolic) = &call_expr.method {
 
                            let definition_id = symbolic.definition.unwrap();
 
                        if call_expr.method == Method::UserFunction {
 
                            let definition_id = call_expr.definition;
 
                            if !ctx.types.has_monomorph(&definition_id, &monomorph_types) {
 
                                let root_id = ctx.types
 
                                    .get_base_definition(&definition_id)
 
                                    .unwrap()
 
                                    .ast_root;
 

	
 
                                // Pre-emptively add the monomorph to the type table, but
 
                                // we still need to perform typechecking on it
 
                                // TODO: Unsure about this, performance wise
 
                                let queue_element = ResolveQueueElement{
 
                                    root_id,
 
                                    definition_id,
 
                                    monomorph_types,
 
                                };
 
                                if !queue.contains(&queue_element) {
 
                                    queue.push(queue_element);
 
                                }
 
                            }
 
                        }
 
                    },
 
                    Expression::Literal(lit_expr) => {
 
                        let definition_id = match &lit_expr.value {
 
                            Literal::Struct(literal) => literal.definition.as_ref().unwrap(),
 
                            Literal::Enum(literal) => literal.definition.as_ref().unwrap(),
 
                            Literal::Union(literal) => literal.definition.as_ref().unwrap(),
 
                            Literal::Struct(literal) => &literal.definition,
 
                            Literal::Enum(literal) => &literal.definition,
 
                            Literal::Union(literal) => &literal.definition,
 
                            _ => unreachable!("post-inference monomorph for non-struct, non-enum literal")
 
                        };
 
                        if !ctx.types.has_monomorph(definition_id, &monomorph_types) {
 
                            ctx.types.add_monomorph(definition_id, monomorph_types);
 
                        }
 
                    },
 
                    _ => unreachable!("needs fully inference, but not a struct literal or call expression")
 
                }
 
            } // else: was just a helper structure...
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> Result<(), ParseError> {
 
        match &ctx.heap[id] {
 
            Expression::Assignment(expr) => {
 
                let id = expr.this;
 
                self.progress_assignment_expr(ctx, id)
 
            },
 
            Expression::Binding(expr) => {
 
            Expression::Binding(_expr) => {
 
                unimplemented!("progress binding expression");
 
            },
 
            Expression::Conditional(expr) => {
 
                let id = expr.this;
 
                self.progress_conditional_expr(ctx, id)
 
            },
 
            Expression::Binary(expr) => {
 
                let id = expr.this;
 
                self.progress_binary_expr(ctx, id)
 
            },
 
            Expression::Unary(expr) => {
 
                let id = expr.this;
 
                self.progress_unary_expr(ctx, id)
 
            },
 
            Expression::Indexing(expr) => {
 
                let id = expr.this;
 
                self.progress_indexing_expr(ctx, id)
 
            },
 
            Expression::Slicing(expr) => {
 
                let id = expr.this;
 
                self.progress_slicing_expr(ctx, id)
 
            },
 
            Expression::Select(expr) => {
 
                let id = expr.this;
 
@@ -1934,54 +1955,54 @@ impl PassTyping {
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 

	
 
        debug_log!("Literal expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let progress_expr = match &expr.value {
 
            Literal::Null => {
 
                self.apply_forced_constraint(ctx, upcast_id, &MESSAGE_TEMPLATE)?
 
            },
 
            Literal::Integer(_) => {
 
                self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?
 
            },
 
            Literal::True | Literal::False => {
 
                self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?
 
            },
 
            Literal::Character(_) => {
 
                self.apply_forced_constraint(ctx, upcast_id, &CHARACTER_TEMPLATE)?;
 
                todo!("check character literal type inference");
 
                self.apply_forced_constraint(ctx, upcast_id, &CHARACTER_TEMPLATE)?
 
            },
 
            Literal::String(_) => {
 
                self.apply_forced_constraint(ctx, upcast_id, &STRING_TEMPLATE)?;
 
                todo!("check string literal type inference");
 
                self.apply_forced_constraint(ctx, upcast_id, &STRING_TEMPLATE)?
 
            },
 
            Literal::Struct(data) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
                debug_assert_eq!(extra.embedded.len(), data.fields.len());
 

	
 
                debug_log!(" * During (inferring types from fields and struct type):");
 

	
 
                // Mutually infer field signature/expression types
 
                for (field_idx, field) in data.fields.iter().enumerate() {
 
                    let field_expr_id = field.value;
 
                    let signature_type: *mut _ = &mut extra.embedded[field_idx];
 
                    let field_type: *mut _ = self.expr_types.get_mut(&field_expr_id).unwrap();
 
                    let (_, progress_arg) = Self::apply_equal2_signature_constraint(
 
                        ctx, upcast_id, Some(field_expr_id), extra, &mut poly_progress,
 
                        signature_type, 0, field_type, 0
 
                    )?;
 

	
 
                    debug_log!(
 
                        "   - Field {} type | sig: {}, field: {}", field_idx,
 
                        unsafe{&*signature_type}.display_name(&ctx.heap),
 
@@ -2209,55 +2230,49 @@ impl PassTyping {
 
                debug_log!(" * After:");
 
                debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
                progress_expr
 
            },
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // TODO: FIX!!!!
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    // TODO: @cleanup, see how this can be cleaned up once I implement
 
    //  polymorphic struct/enum/union literals. These likely follow the same
 
    //  pattern as here.
 
    fn progress_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 

	
 
        debug_log!("Call expr '{}': {}", match &expr.method {
 
            Method::Create => String::from("create"),
 
            Method::Fires => String::from("fires"),
 
            Method::Get => String::from("get"),
 
            Method::Put => String::from("put"),
 
            Method::Symbolic(method) => String::from_utf8_lossy(&method.identifier.value).to_string()
 
        },upcast_id.index);
 
        debug_log!("Call expr '{}': {}", ctx.heap[expr.definition].identifier().value.as_str(), upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 
        debug_log!(" * During (inferring types from arguments and return type):");
 

	
 
        // Check if we can make progress using the arguments and/or return types
 
        // while keeping track of the polyvars we've extended
 
        let mut poly_progress = HashSet::new();
 
        debug_assert_eq!(extra.embedded.len(), expr.arguments.len());
 

	
 
        for (arg_idx, arg_id) in expr.arguments.clone().into_iter().enumerate() {
 
            let signature_type: *mut _ = &mut extra.embedded[arg_idx];
 
            let argument_type: *mut _ = self.expr_types.get_mut(&arg_id).unwrap();
 
            let (_, progress_arg) = Self::apply_equal2_signature_constraint(
 
                ctx, upcast_id, Some(arg_id), extra, &mut poly_progress,
 
                signature_type, 0, argument_type, 0
 
            )?;
 

	
 
            debug_log!(
 
                "   - Arg {} type | sig: {}, arg: {}", arg_idx,
 
                unsafe{&*signature_type}.display_name(&ctx.heap), 
 
                unsafe{&*argument_type}.display_name(&ctx.heap));
 

	
 
            if progress_arg {
 
                // Progressed argument expression
 
@@ -2322,49 +2337,49 @@ impl PassTyping {
 

	
 
        let progress_ret = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
            extra, &poly_progress, signature_type, ret_type
 
        );
 
        debug_log!(
 
            "   - Ret type | sig: {}, arg: {}", 
 
            unsafe{&*signature_type}.display_name(&ctx.heap), 
 
            unsafe{&*ret_type}.display_name(&ctx.heap)
 
        );
 
        if progress_ret {
 
            self.queue_expr_parent(ctx, upcast_id);
 
        }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let var_expr = &ctx.heap[id];
 
        let var_id = var_expr.declaration.unwrap();
 

	
 
        debug_log!("Variable expr '{}': {}", &String::from_utf8_lossy(&ctx.heap[var_id].identifier().value), upcast_id.index);
 
        debug_log!("Variable expr '{}': {}", ctx.heap[var_id].identifier().value.as_str(), upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Var  type: {}", self.var_types.get(&var_id).unwrap().var_type.display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // Retrieve shared variable type and expression type and apply inference
 
        let var_data = self.var_types.get_mut(&var_id).unwrap();
 
        let expr_type = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            &mut var_data.var_type as *mut _, 0, expr_type, 0
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            let var_decl = &ctx.heap[var_id];
 
            return Err(ParseError::new_error_at_span(
 
                &ctx.module.source, var_decl.identifier().span, format!(
 
                    "Conflicting types for this variable, previously assigned the type '{}'",
 
                    var_data.var_type.display_name(&ctx.heap)
 
                )
 
            ).with_info_at_span(
 
                &ctx.module.source, var_expr.identifier.span, format!(
 
                    "But inferred to have incompatible type '{}' here",
 
                    expr_type.display_name(&ctx.heap)
 
                )
 
            ))
 
@@ -2744,487 +2759,441 @@ impl PassTyping {
 
    /// Determines the `InferenceType` for the expression based on the
 
    /// expression parent. Note that if the parent is another expression, we do
 
    /// not take special action, instead we let parent expressions fix the type
 
    /// of subexpressions before they have a chance to call this function.
 
    fn insert_initial_expr_inference_type(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId
 
    ) -> Result<(), ParseError> {
 
        use ExpressionParent as EP;
 
        use InferenceTypePart as ITP;
 

	
 
        let expr = &ctx.heap[expr_id];
 
        let inference_type = match expr.parent() {
 
            EP::None =>
 
                // Should have been set by linker
 
                unreachable!(),
 
            EP::ExpressionStmt(_) | EP::Expression(_, _) =>
 
                // Determined during type inference
 
                InferenceType::new(false, false, vec![ITP::Unknown]),
 
            EP::If(_) | EP::While(_) =>
 
                // Must be a boolean
 
                InferenceType::new(false, true, vec![ITP::Bool]),
 
            EP::Return(_) =>
 
                // Must match the return type of the function
 
                if let DefinitionType::Function(func_id) = self.definition_type {
 
                    let return_parser_type_id = ctx.heap[func_id].return_type;
 
                    self.determine_inference_type_from_parser_type(ctx, return_parser_type_id, true)
 
                    debug_assert_eq!(ctx.heap[func_id].return_types.len(), 1);
 
                    let returned = &ctx.heap[func_id].return_types[0];
 
                    self.determine_inference_type_from_parser_type_elements(&returned.elements, true)
 
                } else {
 
                    // Cannot happen: definition always set upon body traversal
 
                    // and "return" calls in components are illegal.
 
                    unreachable!();
 
                },
 
            EP::New(_) =>
 
                // Must be a component call, which we assign a "Void" return
 
                // type
 
                InferenceType::new(false, true, vec![ITP::Void]),
 
        };
 

	
 
        match self.expr_types.entry(expr_id) {
 
            Entry::Vacant(vacant) => {
 
                vacant.insert(inference_type);
 
            },
 
            Entry::Occupied(mut preexisting) => {
 
                // We already have an entry, this happens if our parent fixed
 
                // our type (e.g. we're used in a conditional expression's test)
 
                // but we have a different type.
 
                // TODO: Is this ever called? Seems like it can't
 
                debug_assert!(false, "I am actually called, my ID is {}", expr_id.index);
 
                let old_type = preexisting.get_mut();
 
                if let SingleInferenceResult::Incompatible = InferenceType::infer_subtree_for_single_type(
 
                    old_type, 0, &inference_type.parts, 0
 
                ) {
 
                    return Err(self.construct_expr_type_error(ctx, expr_id, expr_id))
 
                }
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn insert_initial_call_polymorph_data(
 
        &mut self, ctx: &mut Ctx, call_id: CallExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 

	
 
        // Note: the polymorph variables may be partially specified and may
 
        // contain references to the wrapping definition's (i.e. the proctype
 
        // we are currently visiting) polymorphic arguments.
 
        //
 
        // The arguments of the call may refer to polymorphic variables in the
 
        // definition of the function we're calling, not of the wrapping
 
        // definition. We insert markers in these inferred types to be able to
 
        // map them back and forth to the polymorphic arguments of the function
 
        // we are calling.
 
        let call = &ctx.heap[call_id];
 

	
 
        // Handle the polymorphic variables themselves
 
        let mut poly_vars = Vec::with_capacity(call.poly_args.len());
 
        for poly_arg_type_id in call.poly_args.clone() { // TODO: @performance
 
            poly_vars.push(self.determine_inference_type_from_parser_type(ctx, poly_arg_type_id, true));
 
        // Handle the polymorphic arguments (if there are any)
 
        let num_poly_args = call.parser_type.elements[0].variant.num_embedded();
 
        let mut poly_args = Vec::with_capacity(num_poly_args);
 
        for embedded_elements in call.parser_type.iter_embedded(0) {
 
            poly_args.push(self.determine_inference_type_from_parser_type_elements(embedded_elements, true));
 
        }
 

	
 
        // Handle the arguments
 
        // TODO: @cleanup: Maybe factor this out for reuse in the validator/linker, should also
 
        //  make the code slightly more robust.
 
        let (embedded_types, return_type) = match &call.method {
 
            Method::Create => {
 
                // Not polymorphic
 
                (
 
                    vec![InferenceType::new(false, true, vec![ITP::Int])],
 
                    InferenceType::new(false, true, vec![ITP::Message, ITP::Byte])
 
                )
 
        // Handle the arguments and return types
 
        let definition = &ctx.heap[call.definition];
 
        let (parameters, returned) = match definition {
 
            Definition::Component(definition) => {
 
                debug_assert_eq!(poly_args.len(), definition.poly_vars.len());
 
                (&definition.parameters, None)
 
            },
 
            Method::Fires => {
 
                // bool fires<T>(PortLike<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::PortLike, ITP::MarkerBody(0), ITP::Unknown])],
 
                    InferenceType::new(false, true, vec![ITP::Bool])
 
                )
 
            Definition::Function(definition) => {
 
                debug_assert_eq!(poly_args.len(), definition.poly_vars.len());
 
                (&definition.parameters, Some(&definition.return_types))
 
            },
 
            Method::Get => {
 
                // T get<T>(input<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::Input, ITP::MarkerBody(0), ITP::Unknown])],
 
                    InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown])
 
                )
 
            Definition::Struct(_) | Definition::Enum(_) | Definition::Union(_) => {
 
                unreachable!("insert_initial_call_polymorph data for non-procedure type");
 
            },
 
            Method::Put => {
 
                // void Put<T>(output<T> port, T msg)
 
                (
 
                    vec![
 
                        InferenceType::new(true, false, vec![ITP::Output, ITP::MarkerBody(0), ITP::Unknown]),
 
                        InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown])
 
                    ],
 
                    InferenceType::new(false, true, vec![ITP::Void])
 
                )
 
            }
 
            Method::Symbolic(symbolic) => {
 
                let definition = &ctx.heap[symbolic.definition.unwrap()];
 

	
 
                match definition {
 
                    Definition::Component(definition) => {
 
                        debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
                        for param_id in definition.parameters.clone() {
 
                            let param = &ctx.heap[param_id];
 
                            parameter_types.push(self.determine_inference_type_from_parser_type(ctx, &param.parser_type, false));
 
                        }
 

	
 
                        (parameter_types, InferenceType::new(false, true, vec![InferenceTypePart::Void]))
 
                    },
 
                    Definition::Function(definition) => {
 
                        debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
                        for param_id in definition.parameters.clone() {
 
                            let param = &ctx.heap[param_id];
 
                            parameter_types.push(self.determine_inference_type_from_parser_type(ctx, &param.parser_type, false));
 
                        }
 
        };
 

	
 
                        debug_assert_eq!(definition.return_types.len(), 1, "multiple return types not yet implemented");
 
        let mut parameter_types = Vec::with_capacity(parameters.len());
 
        for parameter_id in parameters.clone().into_iter() { // TODO: @Performance
 
            let param = &ctx.heap[parameter_id];
 
            parameter_types.push(self.determine_inference_type_from_parser_type_elements(&param.parser_type.elements, false));
 
        }
 

	
 
                        let return_type = self.determine_inference_type_from_parser_type(ctx, &definition.return_types[0], false);
 
                        (parameter_types, return_type)
 
                    },
 
                    Definition::Struct(_) | Definition::Enum(_) | Definition::Union(_) => {
 
                        unreachable!("insert initial polymorph data for struct/enum/union");
 
                    }
 
                }
 
        let return_type = match returned {
 
            None => {
 
                // Component, so returns a "Void"
 
                InferenceType::new(false, true, vec![InferenceTypePart::Void])
 
            },
 
            Some(returned) => {
 
                debug_assert_eq!(returned.len(), 1);
 
                let returned = &returned[0];
 
                self.determine_inference_type_from_parser_type_elements(&returned.elements, false)
 
            }
 
        };
 

	
 
        self.extra_data.insert(call_id.upcast(), ExtraData {
 
            poly_vars,
 
            embedded: embedded_types,
 
            poly_vars: poly_args,
 
            embedded: parameter_types,
 
            returned: return_type
 
        });
 
    }
 

	
 
    fn insert_initial_struct_polymorph_data(
 
        &mut self, ctx: &mut Ctx, lit_id: LiteralExpressionId,
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_struct();
 

	
 
        // Handle polymorphic arguments
 
        let mut poly_vars = Vec::with_capacity(literal.poly_args2.len());
 
        let num_embedded = literal.parser_type.elements[0].variant.num_embedded();
 
        let mut total_num_poly_parts = 0;
 
        for poly_arg_type_id in literal.poly_args2.clone() { // TODO: @performance
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, poly_arg_type_id, true
 
            ); 
 
            total_num_poly_parts += inference_type.parts.len();
 
            poly_vars.push(inference_type);
 
        let mut poly_args = Vec::with_capacity(num_embedded);
 

	
 
        for embedded_elements in literal.parser_type.iter_embedded(0) {
 
            let poly_type = self.determine_inference_type_from_parser_type_elements(embedded_elements, true);
 
            total_num_poly_parts += poly_type.parts.len();
 
            poly_args.push(poly_type);
 
        }
 

	
 
        // Handle parser types on struct definition
 
        let definition = &ctx.heap[literal.definition.unwrap()];
 
        let definition = match definition {
 
            Definition::Struct(definition) => {
 
                debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                definition
 
            },
 
            _ => unreachable!("definition for struct literal does not point to struct definition")
 
        };
 
        let defined_type = ctx.types.get_base_definition(&literal.definition).unwrap();
 
        let struct_type = defined_type.definition.as_struct();
 
        debug_assert_eq!(poly_args.len(), defined_type.poly_vars.len());
 

	
 
        // Note: programmer is capable of specifying fields in a struct literal
 
        // in a different order than on the definition. We take the literal-
 
        // specified order to be leading.
 
        let mut embedded_types = Vec::with_capacity(definition.fields.len());
 
        let mut embedded_types = Vec::with_capacity(struct_type.fields.len());
 
        for lit_field in literal.fields.iter() {
 
            let def_field = &definition.fields[lit_field.field_idx];
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, &def_field.parser_type, false
 
            );
 
            let def_field = &struct_type.fields[lit_field.field_idx];
 
            let inference_type = self.determine_inference_type_from_parser_type_elements(&def_field.parser_type.elements, false);
 
            embedded_types.push(inference_type);
 
        }
 

	
 
        // Return type is the struct type itself, with the appropriate 
 
        // polymorphic variables. So:
 
        // - 1 part for definition
 
        // - N_poly_arg marker parts for each polymorphic argument
 
        // - all the parts for the currently known polymorphic arguments 
 
        let parts_reserved = 1 + poly_vars.len() + total_num_poly_parts;
 
        let parts_reserved = 1 + poly_args.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(definition.this.upcast(), poly_vars.len()));
 
        parts.push(ITP::Instance(literal.definition, poly_args.len()));
 
        let mut return_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
        for (poly_var_idx, poly_var) in poly_args.iter().enumerate() {
 
            if !poly_var.is_done { return_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let return_type = InferenceType::new(!poly_vars.is_empty(), return_type_done, parts);
 
        let return_type = InferenceType::new(!poly_args.is_empty(), return_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
            poly_vars, 
 
            poly_vars: poly_args,
 
            embedded: embedded_types,
 
            returned: return_type,
 
        });
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct for enum expressions. These
 
    /// can never be determined from the enum itself, but may be inferred from
 
    /// the use of the enum.
 
    fn insert_initial_enum_polymorph_data(
 
        &mut self, ctx: &Ctx, lit_id: LiteralExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_enum();
 

	
 
        // Handle polymorphic arguments to the enum
 
        let mut poly_vars = Vec::with_capacity(literal.poly_args2.len());
 
        let num_poly_args = literal.parser_type.elements[0].variant.num_embedded();
 
        let mut total_num_poly_parts = 0;
 
        for poly_arg_type_id in literal.poly_args2.clone() { // TODO: @performance
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, poly_arg_type_id, true
 
            );
 
            total_num_poly_parts += inference_type.parts.len();
 
            poly_vars.push(inference_type);
 
        let mut poly_args = Vec::with_capacity(num_poly_args);
 

	
 
        for embedded_elements in literal.parser_type.iter_embedded(0) {
 
            let poly_type = self.determine_inference_type_from_parser_type_elements(embedded_elements, true);
 
            total_num_poly_parts += poly_type.parts.len();
 
            poly_args.push(poly_type);
 
        }
 

	
 
        // Handle enum type itself
 
        let parts_reserved = 1 + poly_vars.len() + total_num_poly_parts;
 
        let parts_reserved = 1 + poly_args.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(literal.definition.unwrap(), poly_vars.len()));
 
        parts.push(ITP::Instance(literal.definition, poly_args.len()));
 
        let mut enum_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
        for (poly_var_idx, poly_var) in poly_args.iter().enumerate() {
 
            if !poly_var.is_done { enum_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let enum_type = InferenceType::new(!poly_vars.is_empty(), enum_type_done, parts);
 
        let enum_type = InferenceType::new(!poly_args.is_empty(), enum_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
            poly_vars,
 
            poly_vars: poly_args,
 
            embedded: Vec::new(),
 
            returned: enum_type,
 
        });
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct for unions. The polymorphic
 
    /// arguments may be partially determined from embedded values in the union.
 
    fn insert_initial_union_polymorph_data(
 
        &mut self, ctx: &Ctx, lit_id: LiteralExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_union();
 

	
 
        // Construct the polymorphic variables
 
        let mut poly_vars = Vec::with_capacity(literal.poly_args2.len());
 
        let num_poly_args = literal.parser_type.elements[0].variant.num_embedded();
 
        let mut total_num_poly_parts = 0;
 
        for poly_arg_type_id in literal.poly_args2.clone() { // TODO: @performance
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, poly_arg_type_id, true
 
            );
 
            total_num_poly_parts += inference_type.parts.len();
 
            poly_vars.push(inference_type);
 
        let mut poly_args = Vec::with_capacity(num_poly_args);
 

	
 
        for embedded_elements in literal.parser_type.iter_embedded(0) {
 
            let poly_type = self.determine_inference_type_from_parser_type_elements(embedded_elements, true);
 
            total_num_poly_parts += poly_type.parts.len();
 
            poly_args.push(poly_type);
 
        }
 

	
 
        // Handle any of the embedded values in the variant, if specified
 
        let definition_id = literal.definition.unwrap();
 
        let union_definition = ctx.types.get_base_definition(&definition_id)
 
            .unwrap()
 
            .definition.as_union();
 
        let definition_id = literal.definition;
 
        let type_definition = ctx.types.get_base_definition(&definition_id).unwrap();
 
        let union_definition = type_definition.definition.as_union();
 
        debug_assert_eq!(poly_args.len(), type_definition.poly_vars.len());
 

	
 
        let variant_definition = &union_definition.variants[literal.variant_idx];
 
        debug_assert_eq!(variant_definition.embedded.len(), literal.values.len());
 

	
 
        let mut embedded = Vec::with_capacity(variant_definition.embedded.len());
 
        for embedded_parser_type in &variant_definition.embedded {
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, embedded_parser_type, false
 
            );
 
            let inference_type = self.determine_inference_type_from_parser_type_elements(&embedded_parser_type.elements, false);
 
            embedded.push(inference_type);
 
        }
 

	
 
        // Handle the type of the union itself
 
        let parts_reserved = 1 + poly_vars.len() + total_num_poly_parts;
 
        let parts_reserved = 1 + poly_args.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(definition_id, poly_vars.len()));
 
        parts.push(ITP::Instance(definition_id, poly_args.len()));
 
        let mut union_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
        for (poly_var_idx, poly_var) in poly_args.iter().enumerate() {
 
            if !poly_var.is_done { union_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts_reserved, parts.len());
 
        let union_type = InferenceType::new(!poly_vars.is_empty(), union_type_done, parts);
 
        let union_type = InferenceType::new(!poly_args.is_empty(), union_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
            poly_vars,
 
            poly_vars: poly_args,
 
            embedded,
 
            returned: union_type
 
        });
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct. Assumes that the select
 
    /// expression's referenced (definition_id, field_idx) has been resolved.
 
    fn insert_initial_select_polymorph_data(
 
        &mut self, ctx: &Ctx, select_id: SelectExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 

	
 
        // Retrieve relevant data
 
        let expr = &ctx.heap[select_id];
 
        let field = expr.field.as_symbolic();
 

	
 
        let definition_id = field.definition.unwrap();
 
        let definition = ctx.heap[definition_id].as_struct();
 
        let field_idx = field.field_idx;
 

	
 
        // Generate initial polyvar types and struct type
 
        // TODO: @Performance: we can immediately set the polyvars of the subject's struct type
 
        let num_poly_vars = definition.poly_vars.len();
 
        let mut poly_vars = Vec::with_capacity(num_poly_vars);
 
        let struct_parts_reserved = 1 + 2 * num_poly_vars;
 
        let mut struct_parts = Vec::with_capacity(struct_parts_reserved);
 
        struct_parts.push(ITP::Instance(definition_id, num_poly_vars));        
 

	
 
        for poly_idx in 0..num_poly_vars {
 
            poly_vars.push(InferenceType::new(true, false, vec![
 
                ITP::MarkerBody(poly_idx), ITP::Unknown,
 
            ]));
 
            struct_parts.push(ITP::MarkerBody(poly_idx));
 
            struct_parts.push(ITP::Unknown);
 
        }
 
        debug_assert_eq!(struct_parts.len(), struct_parts_reserved);
 

	
 
        // Generate initial field type
 
        let field_type = self.determine_inference_type_from_parser_type(
 
            ctx, &definition.fields[field_idx].parser_type, false
 
        );
 

	
 
        let field_type = self.determine_inference_type_from_parser_type_elements(&definition.fields[field_idx].parser_type.elements, false);
 
        self.extra_data.insert(select_id.upcast(), ExtraData{
 
            poly_vars,
 
            embedded: vec![InferenceType::new(num_poly_vars != 0, num_poly_vars == 0, struct_parts)],
 
            returned: field_type
 
        });
 
    }
 

	
 
    /// Determines the initial InferenceType from the provided ParserType. This
 
    /// may be called with two kinds of intentions:
 
    /// 1. To resolve a ParserType within the body of a function, or on
 
    ///     polymorphic arguments to calls/instantiations within that body. This
 
    ///     means that the polymorphic variables are known and can be replaced
 
    ///     with the monomorph we're instantiating.
 
    /// 2. To resolve a ParserType on a called function's definition or on
 
    ///     an instantiated datatype's members. This means that the polymorphic
 
    ///     arguments inside those ParserTypes refer to the polymorphic
 
    ///     variables in the called/instantiated type's definition.
 
    /// In the second case we place InferenceTypePart::Marker instances such
 
    /// that we can perform type inference on the polymorphic variables.
 
    fn determine_inference_type_from_parser_type(
 
        &mut self, ctx: &Ctx, parser_type: &ParserType,
 
    fn determine_inference_type_from_parser_type_elements(
 
        &mut self, elements: &[ParserTypeElement],
 
        parser_type_in_body: bool
 
    ) -> InferenceType {
 
        use ParserTypeVariant as PTV;
 
        use InferenceTypePart as ITP;
 

	
 
        let mut infer_type = Vec::with_capacity(parser_type.elements.len());
 
        let mut infer_type = Vec::with_capacity(elements.len());
 
        let mut has_inferred = false;
 
        let mut has_markers = false;
 

	
 
        for element in &parser_type.elements {
 
        for element in elements {
 
            match &element.variant {
 
                PTV::Message => {
 
                    // TODO: @types Remove the Message -> Byte hack at some point...
 
                    infer_type.push(ITP::Message);
 
                    infer_type.push(ITP::UInt8);
 
                },
 
                PTV::Bool => { infer_type.push(ITP::Bool); },
 
                PTV::UInt8 => { infer_type.push(ITP::UInt8); },
 
                PTV::UInt16 => { infer_type.push(ITP::UInt16); },
 
                PTV::UInt32 => { infer_type.push(ITP::UInt32); },
 
                PTV::UInt64 => { infer_type.push(ITP::UInt64); },
 
                PTV::SInt8 => { infer_type.push(ITP::SInt8); },
 
                PTV::SInt16 => { infer_type.push(ITP::SInt16); },
 
                PTV::SInt32 => { infer_type.push(ITP::SInt32); },
 
                PTV::SInt64 => { infer_type.push(ITP::SInt64); },
 
                PTV::Character => { infer_type.push(ITP::Character); },
 
                PTV::String => { infer_type.push(ITP::String); },
 
                PTV::IntegerLiteral => { unreachable!("integer literal type on variable type"); },
 
                PTV::Inferred => {
 
                    infer_type.push(ITP::Unknown);
 
                    has_inferred = true;
 
                },
 
                PTV::Array => { infer_type.push(ITP::Array); },
 
                PTV::Input => { infer_type.push(ITP::Input); },
 
                PTV::Output => { infer_type.push(ITP::Output); },
 
                PTV::PolymorphicArgument(belongs_to_definition, poly_arg_idx) => {
 
                    let poly_arg_idx = *poly_arg_idx;
 
                    if parser_type_in_body {
 
                        // Refers to polymorphic argument on procedure we're currently processing.
 
                        // This argument is already known.
 
                        debug_assert_eq!(belongs_to_definition, self.definition_type.definition_id());
 
                        debug_assert_eq!(*belongs_to_definition, self.definition_type.definition_id());
 
                        debug_assert!((poly_arg_idx as usize) < self.poly_vars.len());
 

	
 
                        infer_type.push(ITP::MarkerDefinition(poly_arg_idx as usize));
 
                        for concrete_part in &self.poly_vars[poly_arg_idx].parts {
 
                            infer_type.push(ITP::from(*concrete_part));
 
                        }
 
                    } else {
 
                        // Polymorphic argument has to be inferred
 
                        has_markers = true;
 
                        has_inferred = true;
 
                        infer_type.push(ITP::MarkerBody(poly_arg_idx));
 
                        infer_type.push(ITP::Unknown)
 
                    }
 
                },
 
                PTV::Definition(definition_id, num_embedded) => {
 
                    infer_type.push(ITP::Instance(*definition_id, *num_embedded));
 
                }
 
            }
 
        }
 

	
 
        InferenceType::new(has_markers, !has_inferred, infer_type)
 
    }
 

	
 
    /// Construct an error when an expression's type does not match. This
 
    /// happens if we infer the expression type from its arguments (e.g. the
 
    /// expression type of an addition operator is the type of the arguments)
 
    /// But the expression type was already set due to our parent (e.g. an
 
    /// "if statement" or a "logical not" always expecting a boolean)
 
    fn construct_expr_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, arg_id: ExpressionId
 
    ) -> ParseError {
 
        // TODO: Expand and provide more meaningful information for humans
 
        let expr = &ctx.heap[expr_id];
 
        let arg_expr = &ctx.heap[arg_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 
        let arg_type = self.expr_types.get(&arg_id).unwrap();
 

	
 
        return ParseError::new_error_at_span(
 
            &ctx.module.source, expr.span(), format!(
 
                "incompatible types: this expression expected a '{}'",
 
                expr_type.display_name(&ctx.heap)
 
            )
 
        ).with_postfixed_info(
 
        ).with_info_at_span(
 
            &ctx.module.source, arg_expr.span(), format!(
 
                "but this expression yields a '{}'",
 
                arg_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_arg_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let arg1 = &ctx.heap[arg1_id];
 
        let arg2 = &ctx.heap[arg2_id];
 

	
 
        let arg1_type = self.expr_types.get(&arg1_id).unwrap();
 
        let arg2_type = self.expr_types.get(&arg2_id).unwrap();
 

	
 
        return ParseError::new_error_str_at_span(
 
            &ctx.module.source, expr.span(),
 
            "incompatible types: cannot apply this expression"
 
        ).with_info_at_span(
 
            &ctx.module.source, arg1.span(), format!(
 
                "Because this expression has type '{}'",
 
@@ -3292,67 +3261,67 @@ impl PassTyping {
 

	
 
        // Helpers function to retrieve polyvar name and definition name
 
        fn get_poly_var_and_definition_name<'a>(ctx: &'a Ctx, poly_var_idx: usize, definition_id: DefinitionId) -> (&'a str, &'a str) {
 
            let definition = &ctx.heap[definition_id];
 
            let poly_var = definition.poly_vars()[poly_var_idx].value.as_str();
 
            let func_name = definition.identifier().value.as_str();
 

	
 
            (poly_var, func_name)
 
        }
 

	
 
        // Helper function to construct initial error
 
        fn construct_main_error(ctx: &Ctx, poly_var_idx: usize, expr: &Expression) -> ParseError {
 
            match expr {
 
                Expression::Call(expr) => {
 
                    let (poly_var, func_name) = get_poly_var_and_definition_name(ctx, poly_var_idx, expr.definition);
 
                    return ParseError::new_error_at_span(
 
                        &ctx.module.source, expr.span, format!(
 
                            "Conflicting type for polymorphic variable '{}' of '{}'",
 
                            poly_var, func_name
 
                        )
 
                    )
 
                },
 
                Expression::Literal(expr) => {
 
                    let definition_id = match &expr.value {
 
                        Literal::Struct(v) => v.definition.unwrap(),
 
                        Literal::Enum(v) => v.definition.unwrap(),
 
                        Literal::Union(v) => v.definition.unwrap(),
 
                        Literal::Struct(v) => v.definition,
 
                        Literal::Enum(v) => v.definition,
 
                        Literal::Union(v) => v.definition,
 
                        _ => unreachable!(),
 
                    };
 

	
 
                    let (poly_var, type_name) = get_poly_var_and_definition_name(ctx, poly_var_idx, definition_id);
 
                    return ParseError::new_error_at_span(
 
                        &ctx.module.source, expr.span, format!(
 
                            "Conflicting type for polymorphic variable '{}' of instantiation of '{}'",
 
                            poly_var, type_name
 
                        )
 
                    );
 
                },
 
                Expression::Select(expr) => {
 
                    let field = expr.field.as_symbolic();
 
                    let (poly_var, struct_name) = get_poly_var_and_definition_name(ctx, poly_var_idx, field.definition.unwrap());
 
                    return ParseError::new_error_at_span(
 
                        &ctx.module.source, expr.position(), format!(
 
                        &ctx.module.source, expr.span, format!(
 
                            "Conflicting type for polymorphic variable '{}' while accessing field '{}' of '{}'",
 
                            poly_var, field.identifier.value.as_str(), struct_name
 
                        )
 
                    )
 
                }
 
                _ => unreachable!("called construct_poly_arg_error without an expected expression, got: {:?}", expr)
 
            }
 
        }
 

	
 
        // Actual checking
 
        let expr = &ctx.heap[expr_id];
 
        let (expr_args, expr_return_name) = match expr {
 
            Expression::Call(expr) => 
 
                (
 
                    expr.arguments.clone(),
 
                    "return type"
 
                ),
 
            Expression::Literal(expr) => {
 
                let expressions = match &expr.value {
 
                    Literal::Struct(v) => v.fields.iter()
 
                        .map(|f| f.value)
 
                        .collect(),
 
                    Literal::Enum(_) => Vec::new(),
 
                    Literal::Union(v) => v.values.clone(),
 
@@ -3412,49 +3381,49 @@ impl PassTyping {
 
                            &ctx.module.source, arg_a.span(), format!(
 
                                "This argument inferred it to '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_a)
 
                            )
 
                        ).with_info_at_span(
 
                            &ctx.module.source, arg_b.span(), format!(
 
                                "While this argument inferred it to '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_b)
 
                            )
 
                        )
 
                    }
 
                }
 
            }
 

	
 
            // Check with return type
 
            if let Some((poly_idx, section_arg, section_ret)) = has_poly_mismatch(arg_a, &poly_data.returned) {
 
                let arg = &ctx.heap[expr_args[arg_a_idx]];
 
                return construct_main_error(ctx, poly_idx, expr)
 
                    .with_info_at_span(
 
                        &ctx.module.source, arg.span(), format!(
 
                            "This argument inferred it to '{}'",
 
                            InferenceType::partial_display_name(&ctx.heap, section_arg)
 
                        )
 
                    )
 
                    .with_postfixed_info(
 
                    .with_info_at_span(
 
                        &ctx.module.source, expr.span(), format!(
 
                            "While the {} inferred it to '{}'",
 
                            expr_return_name,
 
                            InferenceType::partial_display_name(&ctx.heap, section_ret)
 
                        )
 
                    );
 
            }
 
        }
 

	
 
        unreachable!("construct_poly_arg_error without actual error found?")
 
    }
 
}
 

	
 
#[cfg(test)]
 
mod tests {
 
    use super::*;
 
    use crate::protocol::arena::Id;
 
    use InferenceTypePart as ITP;
 
    use InferenceType as IT;
 

	
 
    #[test]
 
    fn test_single_part_inference() {
 
        // lhs argument inferred from rhs
 
        let pairs = [

Changeset was too big and was cut off... Show full diff anyway

0 comments (0 inline, 0 general)