Changeset - f7c20c8ac012
[Not reviewed]
0 12 0
MH - 4 years ago 2021-04-02 19:04:34
contact@maxhenger.nl
WIP on enum inference and monomorph testing
12 files changed with 605 insertions and 76 deletions:
0 comments (0 inline, 0 general)
src/protocol/ast.rs
Show inline comments
 
@@ -690,815 +690,824 @@ impl NamespacedIdentifier {
 
    /// arguments.
 
    pub fn strip_poly_args(&self) -> Vec<u8> {
 
        debug_assert!(!self.parts.is_empty() && self.parts[0].is_identifier());
 

	
 
        let mut result = Vec::with_capacity(self.value.len());
 
        let mut iter = self.iter();
 
        let (first_ident, _) = iter.next().unwrap();
 
        result.extend(first_ident);
 

	
 
        for (ident, _) in iter.next() {
 
            result.push(b':');
 
            result.push(b':');
 
            result.extend(ident);
 
        }
 

	
 
        result
 
    }
 

	
 
    /// Returns an iterator of the elements in the namespaced identifier
 
    pub fn iter(&self) -> NamespacedIdentifierIter {
 
        return NamespacedIdentifierIter{
 
            identifier: self,
 
            element_idx: 0
 
        }
 
    }
 

	
 
    pub fn get_poly_args(&self) -> Option<&[ParserTypeId]> {
 
        let has_poly_args = self.parts.iter().any(|v| !v.is_identifier());
 
        if has_poly_args {
 
            Some(&self.poly_args)
 
        } else {
 
            None
 
        }
 
    }
 

	
 
    // Check if two namespaced identifiers match eachother when not considering
 
    // the polymorphic arguments
 
    pub fn matches_namespaced_identifier(&self, other: &Self) -> bool {
 
        let mut iter_self = self.iter();
 
        let mut iter_other = other.iter();
 

	
 
        loop {
 
            let val_self = iter_self.next();
 
            let val_other = iter_other.next();
 
            if val_self.is_some() != val_other.is_some() {
 
                // One is longer than the other
 
                return false;
 
            }
 
            if val_self.is_none() {
 
                // Both are none
 
                return true;
 
            }
 

	
 
            // Both are something
 
            let (val_self, _) = val_self.unwrap();
 
            let (val_other, _) = val_other.unwrap();
 
            if val_self != val_other { return false; }
 
        }
 
    }
 

	
 
    // Check if the namespaced identifier matches an identifier when not 
 
    // considering the polymorphic arguments
 
    pub fn matches_identifier(&self, other: &Identifier) -> bool {
 
        let mut iter = self.iter();
 
        let (first_ident, _) = iter.next().unwrap();
 
        if first_ident != other.value { 
 
            return false;
 
        }
 

	
 
        if iter.next().is_some() {
 
            return false;
 
        }
 

	
 
        return true;
 
    }
 
}
 

	
 
/// Iterator over elements of the namespaced identifier. The element index will
 
/// only ever be at the start of an identifier element.
 
#[derive(Debug)]
 
pub struct NamespacedIdentifierIter<'a> {
 
    identifier: &'a NamespacedIdentifier,
 
    element_idx: usize,
 
}
 

	
 
impl<'a> Iterator for NamespacedIdentifierIter<'a> {
 
    type Item = (&'a [u8], Option<&'a [ParserTypeId]>);
 
    fn next(&mut self) -> Option<Self::Item> {
 
        match self.get(self.element_idx) {
 
            Some((ident, poly)) => {
 
                self.element_idx += 1;
 
                if poly.is_some() {
 
                    self.element_idx += 1;
 
                }
 
                Some((ident, poly))
 
            },
 
            None => None
 
        }
 
    }
 
}
 

	
 
impl<'a> NamespacedIdentifierIter<'a> {
 
    /// Returns number of parts iterated over, may not correspond to number of
 
    /// times one called `next()` because returning an identifier with 
 
    /// polymorphic arguments increments the internal counter by 2.
 
    pub fn num_returned(&self) -> usize {
 
        return self.element_idx;
 
    }
 

	
 
    pub fn num_remaining(&self) -> usize {
 
        return self.identifier.parts.len() - self.element_idx;
 
    }
 

	
 
    pub fn returned_section(&self) -> &[u8] {
 
        if self.element_idx == 0 { return &self.identifier.value[0..0]; }
 

	
 
        let last_idx = match &self.identifier.parts[self.element_idx - 1] {
 
            NamespacedIdentifierPart::Identifier{end, ..} => *end,
 
            NamespacedIdentifierPart::PolyArgs{end, ..} => *end,
 
        };
 

	
 
        return &self.identifier.value[..last_idx as usize];
 
    }
 

	
 
    /// Returns a specific element from the namespaced identifier
 
    pub fn get(&self, idx: usize) -> Option<<Self as Iterator>::Item> {
 
        if idx >= self.identifier.parts.len() { 
 
            return None 
 
        }
 

	
 
        let cur_part = &self.identifier.parts[idx];
 
        let next_part = self.identifier.parts.get(idx + 1);
 

	
 
        let (ident_start, ident_end) = cur_part.as_identifier();
 
        let poly_slice = match next_part {
 
            Some(part) => match part {
 
                NamespacedIdentifierPart::Identifier{..} => None,
 
                NamespacedIdentifierPart::PolyArgs{start, end} => Some(
 
                    &self.identifier.poly_args[*start as usize..*end as usize]
 
                ),
 
            },
 
            None => None
 
        };
 

	
 
        Some((
 
            &self.identifier.value[ident_start as usize..ident_end as usize],
 
            poly_slice
 
        ))
 
    }
 

	
 
    /// Returns the previously returend index into the parts array of the 
 
    /// identifier.
 
    pub fn prev_idx(&self) -> Option<usize> {
 
        if self.element_idx == 0 { 
 
            return None;
 
        };
 
        
 
        if self.identifier.parts[self.element_idx - 1].is_identifier() { 
 
            return Some(self.element_idx - 1);
 
        }
 

	
 
        // Previous part had polymorphic arguments, so the one before that must
 
        // be an identifier (if well formed)
 
        debug_assert!(self.element_idx >= 2 && self.identifier.parts[self.element_idx - 2].is_identifier());
 
        return Some(self.element_idx - 2)
 
    }
 

	
 
    /// Returns the previously returned result from `next()`
 
    pub fn prev(&self) -> Option<<Self as Iterator>::Item> {
 
        match self.prev_idx() {
 
            None => None,
 
            Some(idx) => self.get(idx)
 
        }
 
    }
 
}
 

	
 
/// TODO: @types Remove the Message -> Byte hack at some point...
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum ParserTypeVariant {
 
    // Basic builtin
 
    Message,
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    // Literals (need to get concrete builtin type during typechecking)
 
    IntegerLiteral,
 
    Inferred,
 
    // Complex builtins
 
    Array(ParserTypeId), // array of a type
 
    Input(ParserTypeId), // typed input endpoint of a channel
 
    Output(ParserTypeId), // typed output endpoint of a channel
 
    Symbolic(SymbolicParserType), // symbolic type (definition or polyarg)
 
}
 

	
 
impl ParserTypeVariant {
 
    pub(crate) fn supports_polymorphic_args(&self) -> bool {
 
        use ParserTypeVariant::*;
 
        match self {
 
            Message | Bool | Byte | Short | Int | Long | String | IntegerLiteral | Inferred => false,
 
            _ => true
 
        }
 
    }
 
}
 

	
 
/// ParserType is a specification of a type during the parsing phase and initial
 
/// linker/validator phase of the compilation process. These types may be
 
/// (partially) inferred or represent literals (e.g. a integer whose bytesize is
 
/// not yet determined).
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct ParserType {
 
    pub this: ParserTypeId,
 
    pub pos: InputPosition,
 
    pub variant: ParserTypeVariant,
 
}
 

	
 
/// SymbolicParserType is the specification of a symbolic type. During the
 
/// parsing phase we will only store the identifier of the type. During the
 
/// validation phase we will determine whether it refers to a user-defined type,
 
/// or a polymorphic argument. After the validation phase it may still be the
 
/// case that the resulting `variant` will not pass the typechecker.
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct SymbolicParserType {
 
    // Phase 1: parser
 
    pub identifier: NamespacedIdentifier,
 
    // Phase 2: validation/linking (for types in function/component bodies) and
 
    //  type table construction (for embedded types of structs/unions)
 
    pub poly_args2: Vec<ParserTypeId>, // taken from identifier or inferred
 
    pub variant: Option<SymbolicParserTypeVariant>
 
}
 

	
 
/// Specifies whether the symbolic type points to an actual user-defined type,
 
/// or whether it points to a polymorphic argument within the definition (e.g.
 
/// a defined variable `T var` within a function `int func<T>()`
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum SymbolicParserTypeVariant {
 
    Definition(DefinitionId),
 
    // TODO: figure out if I need the DefinitionId here
 
    PolyArg(DefinitionId, usize), // index of polyarg in the definition
 
}
 

	
 
/// ConcreteType is the representation of a type after resolving symbolic types
 
/// and performing type inference
 
#[derive(Debug, Clone, Copy, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
 
pub enum ConcreteTypePart {
 
    // Markers for the use of polymorphic types within a procedure's body that
 
    // refer to polymorphic variables on the procedure's definition. Different
 
    // from markers in the `InferenceType`, these will not contain nested types.
 
    Marker(usize),
 
    // Special types (cannot be explicitly constructed by the programmer)
 
    Void,
 
    // Builtin types without nested types
 
    Message,
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    // Builtin types with one nested type
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // User defined type with any number of nested types
 
    Instance(DefinitionId, usize),
 
}
 

	
 
#[derive(Debug, Clone, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
 
pub struct ConcreteType {
 
    pub(crate) parts: Vec<ConcreteTypePart>
 
}
 

	
 
impl Default for ConcreteType {
 
    fn default() -> Self {
 
        Self{ parts: Vec::new() }
 
    }
 
}
 

	
 
impl ConcreteType {
 
    pub(crate) fn has_marker(&self) -> bool {
 
        self.parts
 
            .iter()
 
            .any(|p| {
 
                if let ConcreteTypePart::Marker(_) = p { true } else { false }
 
            })
 
    }
 
}
 

	
 
// TODO: Remove at some point
 
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
 
pub enum PrimitiveType {
 
    Unassigned,
 
    Input,
 
    Output,
 
    Message,
 
    Boolean,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
 
pub struct Type {
 
    pub primitive: PrimitiveType,
 
    pub array: bool,
 
}
 

	
 
#[allow(dead_code)]
 
impl Type {
 
    pub const UNASSIGNED: Type = Type { primitive: PrimitiveType::Unassigned, array: false };
 

	
 
    pub const INPUT: Type = Type { primitive: PrimitiveType::Input, array: false };
 
    pub const OUTPUT: Type = Type { primitive: PrimitiveType::Output, array: false };
 
    pub const MESSAGE: Type = Type { primitive: PrimitiveType::Message, array: false };
 
    pub const BOOLEAN: Type = Type { primitive: PrimitiveType::Boolean, array: false };
 
    pub const BYTE: Type = Type { primitive: PrimitiveType::Byte, array: false };
 
    pub const SHORT: Type = Type { primitive: PrimitiveType::Short, array: false };
 
    pub const INT: Type = Type { primitive: PrimitiveType::Int, array: false };
 
    pub const LONG: Type = Type { primitive: PrimitiveType::Long, array: false };
 

	
 
    pub const INPUT_ARRAY: Type = Type { primitive: PrimitiveType::Input, array: true };
 
    pub const OUTPUT_ARRAY: Type = Type { primitive: PrimitiveType::Output, array: true };
 
    pub const MESSAGE_ARRAY: Type = Type { primitive: PrimitiveType::Message, array: true };
 
    pub const BOOLEAN_ARRAY: Type = Type { primitive: PrimitiveType::Boolean, array: true };
 
    pub const BYTE_ARRAY: Type = Type { primitive: PrimitiveType::Byte, array: true };
 
    pub const SHORT_ARRAY: Type = Type { primitive: PrimitiveType::Short, array: true };
 
    pub const INT_ARRAY: Type = Type { primitive: PrimitiveType::Int, array: true };
 
    pub const LONG_ARRAY: Type = Type { primitive: PrimitiveType::Long, array: true };
 
}
 

	
 
impl Display for Type {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        match &self.primitive {
 
            PrimitiveType::Unassigned => {
 
                write!(f, "unassigned")?;
 
            }
 
            PrimitiveType::Input => {
 
                write!(f, "in")?;
 
            }
 
            PrimitiveType::Output => {
 
                write!(f, "out")?;
 
            }
 
            PrimitiveType::Message => {
 
                write!(f, "msg")?;
 
            }
 
            PrimitiveType::Boolean => {
 
                write!(f, "boolean")?;
 
            }
 
            PrimitiveType::Byte => {
 
                write!(f, "byte")?;
 
            }
 
            PrimitiveType::Short => {
 
                write!(f, "short")?;
 
            }
 
            PrimitiveType::Int => {
 
                write!(f, "int")?;
 
            }
 
            PrimitiveType::Long => {
 
                write!(f, "long")?;
 
            }
 
        }
 
        if self.array {
 
            write!(f, "[]")
 
        } else {
 
            Ok(())
 
        }
 
    }
 
}
 

	
 
type LiteralCharacter = Vec<u8>;
 
type LiteralInteger = i64; // TODO: @int_literal
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Literal {
 
    Null, // message
 
    True,
 
    False,
 
    Character(LiteralCharacter),
 
    Integer(LiteralInteger),
 
    Struct(LiteralStruct),
 
    Enum(LiteralEnum),
 
}
 

	
 
impl Literal {
 
    pub(crate) fn as_struct(&self) -> &LiteralStruct {
 
        if let Literal::Struct(literal) = self{
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Struct", self)
 
        }
 
    }
 

	
 
    pub(crate) fn as_struct_mut(&mut self) -> &mut LiteralStruct {
 
        if let Literal::Struct(literal) = self{
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Struct", self)
 
        }
 
    }
 

	
 
    pub(crate) fn as_enum(&self) -> &LiteralEnum {
 
        if let Literal::Enum(literal) = self {
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Enum", self)
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct LiteralStructField {
 
    // Phase 1: parser
 
    pub(crate) identifier: Identifier,
 
    pub(crate) value: ExpressionId,
 
    // Phase 2: linker
 
    pub(crate) field_idx: usize, // in struct definition
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct LiteralStruct {
 
    // Phase 1: parser
 
    pub(crate) identifier: NamespacedIdentifier,
 
    pub(crate) fields: Vec<LiteralStructField>,
 
    // Phase 2: linker
 
    pub(crate) poly_args2: Vec<ParserTypeId>, // taken from identifier
 
    pub(crate) poly_args2: Vec<ParserTypeId>, // taken from identifier once linked to a definition
 
    pub(crate) definition: Option<DefinitionId>
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct LiteralEnum {
 
    // Phase 1: parser
 
    pub(crate) identifier: NamespacedIdentifier,
 
    // Phase 2: linker
 
    pub(crate) poly_args2: Vec<ParserTypeId>,
 
    pub(crate) poly_args2: Vec<ParserTypeId>, // taken from identifier once linked to a definition
 
    pub(crate) definition: Option<DefinitionId>,
 
    pub(crate) variant_idx: usize,
 
    pub(crate) variant_idx: usize, // as present in the type table
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Method {
 
    Get,
 
    Put,
 
    Fires,
 
    Create,
 
    Symbolic(MethodSymbolic)
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct MethodSymbolic {
 
    pub(crate) identifier: NamespacedIdentifier,
 
    pub(crate) definition: Option<DefinitionId>
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Field {
 
    Length,
 
    Symbolic(FieldSymbolic),
 
}
 
impl Field {
 
    pub fn is_length(&self) -> bool {
 
        match self {
 
            Field::Length => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    pub fn as_symbolic(&self) -> &FieldSymbolic {
 
        match self {
 
            Field::Symbolic(v) => v,
 
            _ => unreachable!("attempted to get Field::Symbolic from {:?}", self)
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct FieldSymbolic {
 
    // Phase 1: Parser
 
    pub(crate) identifier: Identifier,
 
    // Phase 3: Typing
 
    pub(crate) definition: Option<DefinitionId>,
 
    pub(crate) field_idx: usize,
 
}
 

	
 
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
 
pub enum Scope {
 
    Definition(DefinitionId),
 
    Regular(BlockStatementId),
 
    Synchronous((SynchronousStatementId, BlockStatementId)),
 
}
 

	
 
impl Scope {
 
    pub fn is_block(&self) -> bool {
 
        match &self {
 
            Scope::Definition(_) => false,
 
            Scope::Regular(_) => true,
 
            Scope::Synchronous(_) => true,
 
        }
 
    }
 
    pub fn to_block(&self) -> BlockStatementId {
 
        match &self {
 
            Scope::Regular(id) => *id,
 
            Scope::Synchronous((_, id)) => *id,
 
            _ => panic!("unable to get BlockStatement from Scope")
 
        }
 
    }
 
}
 

	
 
pub trait VariableScope {
 
    fn parent_scope(&self, h: &Heap) -> Option<Scope>;
 
    fn get_variable(&self, h: &Heap, id: &Identifier) -> Option<VariableId>;
 
}
 

	
 
impl VariableScope for Scope {
 
    fn parent_scope(&self, h: &Heap) -> Option<Scope> {
 
        match self {
 
            Scope::Definition(def) => h[*def].parent_scope(h),
 
            Scope::Regular(stmt) => h[*stmt].parent_scope(h),
 
            Scope::Synchronous((stmt, _)) => h[*stmt].parent_scope(h),
 
        }
 
    }
 
    fn get_variable(&self, h: &Heap, id: &Identifier) -> Option<VariableId> {
 
        match self {
 
            Scope::Definition(def) => h[*def].get_variable(h, id),
 
            Scope::Regular(stmt) => h[*stmt].get_variable(h, id),
 
            Scope::Synchronous((stmt, _)) => h[*stmt].get_variable(h, id),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Variable {
 
    Parameter(Parameter),
 
    Local(Local),
 
}
 

	
 
impl Variable {
 
    pub fn identifier(&self) -> &Identifier {
 
        match self {
 
            Variable::Parameter(var) => &var.identifier,
 
            Variable::Local(var) => &var.identifier,
 
        }
 
    }
 
    pub fn is_parameter(&self) -> bool {
 
        match self {
 
            Variable::Parameter(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub fn as_parameter(&self) -> &Parameter {
 
        match self {
 
            Variable::Parameter(result) => result,
 
            _ => panic!("Unable to cast `Variable` to `Parameter`"),
 
        }
 
    }
 
    pub fn as_local(&self) -> &Local {
 
        match self {
 
            Variable::Local(result) => result,
 
            _ => panic!("Unable to cast `Variable` to `Local`"),
 
        }
 
    }
 
    pub fn as_local_mut(&mut self) -> &mut Local {
 
        match self {
 
            Variable::Local(result) => result,
 
            _ => panic!("Unable to cast 'Variable' to 'Local'"),
 
        }
 
    }
 
}
 

	
 
impl SyntaxElement for Variable {
 
    fn position(&self) -> InputPosition {
 
        match self {
 
            Variable::Parameter(decl) => decl.position(),
 
            Variable::Local(decl) => decl.position(),
 
        }
 
    }
 
}
 

	
 
/// TODO: Remove distinction between parameter/local and add an enum to indicate
 
///     the distinction between the two
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct Parameter {
 
    pub this: ParameterId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub parser_type: ParserTypeId,
 
    pub identifier: Identifier,
 
}
 

	
 
impl SyntaxElement for Parameter {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct Local {
 
    pub this: LocalId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub parser_type: ParserTypeId,
 
    pub identifier: Identifier,
 
    // Phase 2: linker
 
    pub relative_pos_in_block: u32,
 
}
 
impl SyntaxElement for Local {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Definition {
 
    Struct(StructDefinition),
 
    Enum(EnumDefinition),
 
    Component(Component),
 
    Function(Function),
 
}
 

	
 
impl Definition {
 
    pub fn is_struct(&self) -> bool {
 
        match self {
 
            Definition::Struct(_) => true,
 
            _ => false
 
        }
 
    }
 
    pub fn as_struct(&self) -> &StructDefinition {
 
        match self {
 
            Definition::Struct(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'StructDefinition'"),
 
        }
 
    }
 
    pub fn is_enum(&self) -> bool {
 
        match self {
 
            Definition::Enum(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub fn as_enum(&self) -> &EnumDefinition {
 
        match self {
 
            Definition::Enum(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'EnumDefinition'"),
 
        }
 
    }
 
    pub fn is_component(&self) -> bool {
 
        match self {
 
            Definition::Component(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub fn as_component(&self) -> &Component {
 
        match self {
 
            Definition::Component(result) => result,
 
            _ => panic!("Unable to cast `Definition` to `Component`"),
 
        }
 
    }
 
    pub fn is_function(&self) -> bool {
 
        match self {
 
            Definition::Function(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub fn as_function(&self) -> &Function {
 
        match self {
 
            Definition::Function(result) => result,
 
            _ => panic!("Unable to cast `Definition` to `Function`"),
 
        }
 
    }
 
    pub fn identifier(&self) -> &Identifier {
 
        match self {
 
            Definition::Struct(def) => &def.identifier,
 
            Definition::Enum(def) => &def.identifier,
 
            Definition::Component(com) => &com.identifier,
 
            Definition::Function(fun) => &fun.identifier,
 
        }
 
    }
 
    pub fn parameters(&self) -> &Vec<ParameterId> {
 
        // TODO: Fix this
 
        static EMPTY_VEC: Vec<ParameterId> = Vec::new();
 
        match self {
 
            Definition::Component(com) => &com.parameters,
 
            Definition::Function(fun) => &fun.parameters,
 
            _ => &EMPTY_VEC,
 
        }
 
    }
 
    pub fn body(&self) -> StatementId {
 
        // TODO: Fix this
 
        match self {
 
            Definition::Component(com) => com.body,
 
            Definition::Function(fun) => fun.body,
 
            _ => panic!("cannot retrieve body (for EnumDefinition or StructDefinition)")
 
        }
 
    }
 
}
 

	
 
impl SyntaxElement for Definition {
 
    fn position(&self) -> InputPosition {
 
        match self {
 
            Definition::Struct(def) => def.position,
 
            Definition::Enum(def) => def.position,
 
            Definition::Component(def) => def.position(),
 
            Definition::Function(def) => def.position(),
 
        }
 
    }
 
}
 

	
 
impl VariableScope for Definition {
 
    fn parent_scope(&self, _h: &Heap) -> Option<Scope> {
 
        None
 
    }
 
    fn get_variable(&self, h: &Heap, id: &Identifier) -> Option<VariableId> {
 
        for &parameter_id in self.parameters().iter() {
 
            let parameter = &h[parameter_id];
 
            if parameter.identifier == *id {
 
                return Some(parameter_id.0);
 
            }
 
        }
 
        None
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct StructFieldDefinition {
 
    pub position: InputPosition,
 
    pub field: Identifier,
 
    pub parser_type: ParserTypeId,
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct StructDefinition {
 
    pub this: StructId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    pub fields: Vec<StructFieldDefinition>
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)]
 
pub enum EnumVariantValue {
 
    None,
 
    Integer(i64),
 
    Type(ParserTypeId),
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct EnumVariantDefinition {
 
    pub position: InputPosition,
 
    pub identifier: Identifier,
 
    pub value: EnumVariantValue,
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct EnumDefinition {
 
    pub this: EnumId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    pub variants: Vec<EnumVariantDefinition>,
 
}
 

	
 
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
 
pub enum ComponentVariant {
 
    Primitive,
 
    Composite,
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct Component {
 
    pub this: ComponentId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub variant: ComponentVariant,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    pub parameters: Vec<ParameterId>,
 
    pub body: StatementId,
 
}
 

	
 
impl SyntaxElement for Component {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct Function {
 
    pub this: FunctionId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub return_type: ParserTypeId,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    pub parameters: Vec<ParameterId>,
 
    pub body: StatementId,
 
}
 

	
 
impl SyntaxElement for Function {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Statement {
 
    Block(BlockStatement),
 
    Local(LocalStatement),
 
    Skip(SkipStatement),
 
    Labeled(LabeledStatement),
 
    If(IfStatement),
 
    EndIf(EndIfStatement),
 
    While(WhileStatement),
 
    EndWhile(EndWhileStatement),
 
    Break(BreakStatement),
 
    Continue(ContinueStatement),
 
    Synchronous(SynchronousStatement),
 
    EndSynchronous(EndSynchronousStatement),
 
    Return(ReturnStatement),
 
    Assert(AssertStatement),
 
    Goto(GotoStatement),
src/protocol/ast_printer.rs
Show inline comments
 
use std::fmt::{Debug, Display, Write};
 
use std::io::Write as IOWrite;
 

	
 
use super::ast::*;
 

	
 
const INDENT: usize = 2;
 

	
 
const PREFIX_EMPTY: &'static str = "    ";
 
const PREFIX_ROOT_ID: &'static str = "Root";
 
const PREFIX_PRAGMA_ID: &'static str = "Prag";
 
const PREFIX_IMPORT_ID: &'static str = "Imp ";
 
const PREFIX_TYPE_ANNOT_ID: &'static str = "TyAn";
 
const PREFIX_VARIABLE_ID: &'static str = "Var ";
 
const PREFIX_PARAMETER_ID: &'static str = "Par ";
 
const PREFIX_LOCAL_ID: &'static str = "Loc ";
 
const PREFIX_DEFINITION_ID: &'static str = "Def ";
 
const PREFIX_STRUCT_ID: &'static str = "DefS";
 
const PREFIX_ENUM_ID: &'static str = "DefE";
 
const PREFIX_COMPONENT_ID: &'static str = "DefC";
 
const PREFIX_FUNCTION_ID: &'static str = "DefF";
 
const PREFIX_STMT_ID: &'static str = "Stmt";
 
const PREFIX_BLOCK_STMT_ID: &'static str = "SBl ";
 
const PREFIX_LOCAL_STMT_ID: &'static str = "SLoc";
 
const PREFIX_MEM_STMT_ID: &'static str = "SMem";
 
const PREFIX_CHANNEL_STMT_ID: &'static str = "SCha";
 
const PREFIX_SKIP_STMT_ID: &'static str = "SSki";
 
const PREFIX_LABELED_STMT_ID: &'static str = "SLab";
 
const PREFIX_IF_STMT_ID: &'static str = "SIf ";
 
const PREFIX_ENDIF_STMT_ID: &'static str = "SEIf";
 
const PREFIX_WHILE_STMT_ID: &'static str = "SWhi";
 
const PREFIX_ENDWHILE_STMT_ID: &'static str = "SEWh";
 
const PREFIX_BREAK_STMT_ID: &'static str = "SBre";
 
const PREFIX_CONTINUE_STMT_ID: &'static str = "SCon";
 
const PREFIX_SYNC_STMT_ID: &'static str = "SSyn";
 
const PREFIX_ENDSYNC_STMT_ID: &'static str = "SESy";
 
const PREFIX_RETURN_STMT_ID: &'static str = "SRet";
 
const PREFIX_ASSERT_STMT_ID: &'static str = "SAsr";
 
const PREFIX_GOTO_STMT_ID: &'static str = "SGot";
 
const PREFIX_NEW_STMT_ID: &'static str = "SNew";
 
const PREFIX_PUT_STMT_ID: &'static str = "SPut";
 
const PREFIX_EXPR_STMT_ID: &'static str = "SExp";
 
const PREFIX_ASSIGNMENT_EXPR_ID: &'static str = "EAsi";
 
const PREFIX_CONDITIONAL_EXPR_ID: &'static str = "ECnd";
 
const PREFIX_BINARY_EXPR_ID: &'static str = "EBin";
 
const PREFIX_UNARY_EXPR_ID: &'static str = "EUna";
 
const PREFIX_INDEXING_EXPR_ID: &'static str = "EIdx";
 
const PREFIX_SLICING_EXPR_ID: &'static str = "ESli";
 
const PREFIX_SELECT_EXPR_ID: &'static str = "ESel";
 
const PREFIX_ARRAY_EXPR_ID: &'static str = "EArr";
 
const PREFIX_CONST_EXPR_ID: &'static str = "ECns";
 
const PREFIX_CALL_EXPR_ID: &'static str = "ECll";
 
const PREFIX_VARIABLE_EXPR_ID: &'static str = "EVar";
 

	
 
struct KV<'a> {
 
    buffer: &'a mut String,
 
    prefix: Option<(&'static str, u32)>,
 
    indent: usize,
 
    temp_key: &'a mut String,
 
    temp_val: &'a mut String,
 
}
 

	
 
impl<'a> KV<'a> {
 
    fn new(buffer: &'a mut String, temp_key: &'a mut String, temp_val: &'a mut String, indent: usize) -> Self {
 
        temp_key.clear();
 
        temp_val.clear();
 
        KV{
 
            buffer,
 
            prefix: None,
 
            indent,
 
            temp_key,
 
            temp_val
 
        }
 
    }
 

	
 
    fn with_id(mut self, prefix: &'static str, id: u32) -> Self {
 
        self.prefix = Some((prefix, id));
 
        self
 
    }
 

	
 
    fn with_s_key(self, key: &str) -> Self {
 
        self.temp_key.push_str(key);
 
        self
 
    }
 

	
 
    fn with_d_key<D: Display>(self, key: &D) -> Self {
 
        self.temp_key.push_str(&key.to_string());
 
        self
 
    }
 

	
 
    fn with_s_val(self, val: &str) -> Self {
 
        self.temp_val.push_str(val);
 
        self
 
    }
 

	
 
    fn with_disp_val<D: Display>(self, val: &D) -> Self {
 
        self.temp_val.push_str(&format!("{}", val));
 
        self
 
    }
 

	
 
    fn with_debug_val<D: Debug>(self, val: &D) -> Self {
 
        self.temp_val.push_str(&format!("{:?}", val));
 
        self
 
    }
 

	
 
    fn with_ascii_val(self, val: &[u8]) -> Self {
 
        self.temp_val.push_str(&*String::from_utf8_lossy(val));
 
        self
 
    }
 

	
 
    fn with_opt_disp_val<D: Display>(self, val: Option<&D>) -> Self {
 
        match val {
 
            Some(v) => { self.temp_val.push_str(&format!("Some({})", v)); },
 
            None => { self.temp_val.push_str("None"); }
 
        }
 
        self
 
    }
 

	
 
    fn with_opt_ascii_val(self, val: Option<&[u8]>) -> Self {
 
        match val {
 
            Some(v) => {
 
                self.temp_val.push_str("Some(");
 
                self.temp_val.push_str(&*String::from_utf8_lossy(v));
 
                self.temp_val.push(')');
 
            },
 
            None => {
 
                self.temp_val.push_str("None");
 
            }
 
        }
 
        self
 
    }
 

	
 
    fn with_custom_val<F: Fn(&mut String)>(mut self, val_fn: F) -> Self {
 
        val_fn(&mut self.temp_val);
 
        self
 
    }
 
}
 

	
 
impl<'a> Drop for KV<'a> {
 
    fn drop(&mut self) {
 
        // Prefix and indent
 
        if let Some((prefix, id)) = &self.prefix {
 
            self.buffer.push_str(&format!("{}[{:04}]", prefix, id));
 
        } else {
 
            self.buffer.push_str("           ");
 
        }
 

	
 
        for _ in 0..self.indent * INDENT {
 
            self.buffer.push(' ');
 
        }
 

	
 
        // Leading dash
 
        self.buffer.push_str("- ");
 

	
 
        // Key and value
 
        self.buffer.push_str(self.temp_key);
 
        if self.temp_val.is_empty() {
 
            self.buffer.push(':');
 
        } else {
 
            self.buffer.push_str(": ");
 
            self.buffer.push_str(&self.temp_val);
 
        }
 
        self.buffer.push('\n');
 
    }
 
}
 

	
 
pub(crate) struct ASTWriter {
 
    cur_definition: Option<DefinitionId>,
 
    buffer: String,
 
    temp1: String,
 
    temp2: String,
 
}
 

	
 
impl ASTWriter {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            cur_definition: None,
 
            buffer: String::with_capacity(4096),
 
            temp1: String::with_capacity(256),
 
            temp2: String::with_capacity(256),
 
        }
 
    }
 
    pub(crate) fn write_ast<W: IOWrite>(&mut self, w: &mut W, heap: &Heap) {
 
        for root_id in heap.protocol_descriptions.iter().map(|v| v.this) {
 
            self.write_module(heap, root_id);
 
            w.write_all(self.buffer.as_bytes()).expect("flush buffer");
 
            self.buffer.clear();
 
        }
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Top-level module writing
 
    //--------------------------------------------------------------------------
 

	
 
    fn write_module(&mut self, heap: &Heap, root_id: RootId) {
 
        self.kv(0).with_id(PREFIX_ROOT_ID, root_id.index)
 
            .with_s_key("Module");
 

	
 
        let root = &heap[root_id];
 
        self.kv(1).with_s_key("Pragmas");
 
        for pragma_id in &root.pragmas {
 
            self.write_pragma(heap, *pragma_id, 2);
 
        }
 

	
 
        self.kv(1).with_s_key("Imports");
 
        for import_id in &root.imports {
 
            self.write_import(heap, *import_id, 2);
 
        }
 

	
 
        self.kv(1).with_s_key("Definitions");
 
        for def_id in &root.definitions {
 
            self.write_definition(heap, *def_id, 2);
 
        }
 
    }
 

	
 
    fn write_pragma(&mut self, heap: &Heap, pragma_id: PragmaId, indent: usize) {
 
        match &heap[pragma_id] {
 
            Pragma::Version(pragma) => {
 
                self.kv(indent).with_id(PREFIX_PRAGMA_ID, pragma.this.index)
 
                    .with_s_key("PragmaVersion")
 
                    .with_disp_val(&pragma.version);
 
            },
 
            Pragma::Module(pragma) => {
 
                self.kv(indent).with_id(PREFIX_PRAGMA_ID, pragma.this.index)
 
                    .with_s_key("PragmaModule")
 
                    .with_ascii_val(&pragma.value);
 
            }
 
        }
 
    }
 

	
 
    fn write_import(&mut self, heap: &Heap, import_id: ImportId, indent: usize) {
 
        let import = &heap[import_id];
 
        let indent2 = indent + 1;
 

	
 
        match import {
 
            Import::Module(import) => {
 
                self.kv(indent).with_id(PREFIX_IMPORT_ID, import.this.index)
 
                    .with_s_key("ImportModule");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&import.module_name);
 
                self.kv(indent2).with_s_key("Alias").with_ascii_val(&import.alias.value);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(import.module_id.as_ref().map(|v| &v.index));
 
            },
 
            Import::Symbols(import) => {
 
                self.kv(indent).with_id(PREFIX_IMPORT_ID, import.this.index)
 
                    .with_s_key("ImportSymbol");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&import.module_name);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(import.module_id.as_ref().map(|v| &v.index));
 

	
 
                self.kv(indent2).with_s_key("Symbols");
 

	
 
                let indent3 = indent2 + 1;
 
                let indent4 = indent3 + 1;
 
                for symbol in &import.symbols {
 
                    self.kv(indent3).with_s_key("AliasedSymbol");
 
                    self.kv(indent4).with_s_key("Name").with_ascii_val(&symbol.name.value);
 
                    self.kv(indent4).with_s_key("Alias").with_ascii_val(&symbol.alias.value);
 
                    self.kv(indent4).with_s_key("Definition")
 
                        .with_opt_disp_val(symbol.definition_id.as_ref().map(|v| &v.index));
 
                }
 
            }
 
        }
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Top-level definition writing
 
    //--------------------------------------------------------------------------
 

	
 
    fn write_definition(&mut self, heap: &Heap, def_id: DefinitionId, indent: usize) {
 
        self.cur_definition = Some(def_id);
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 
        let indent4 = indent3 + 1;
 

	
 
        match &heap[def_id] {
 
            Definition::Struct(_) => todo!("implement Definition::Struct"),
 
            Definition::Enum(_) => todo!("implement Definition::Enum"),
 
            Definition::Enum(def) => {
 
                self.kv(indent).with_id(PREFIX_ENUM_ID, def.this.0.index)
 
                    .with_s_key("DefinitionEnum");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar").with_ascii_val(&poly_var_id.value);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Variants");
 
                for variant in &def.variants {
 
                    self.kv(indent3).with_s_key("Variant");
 
                    self.kv(indent4).with_s_key("Name")
 
                        .with_ascii_val(&variant.identifier.value);
 
                    // TODO: Attached value
 
                }
 
            },
 
            Definition::Function(def) => {
 
                self.kv(indent).with_id(PREFIX_FUNCTION_ID, def.this.0.index)
 
                    .with_s_key("DefinitionFunction");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar");
 
                    self.kv(indent4).with_s_key("Name").with_ascii_val(&poly_var_id.value);
 
                    self.kv(indent3).with_s_key("PolyVar").with_ascii_val(&poly_var_id.value);
 
                }
 

	
 
                self.kv(indent2).with_s_key("ReturnParserType").with_custom_val(|s| write_parser_type(s, heap, &heap[def.return_type]));
 

	
 
                self.kv(indent2).with_s_key("Parameters");
 
                for param_id in &def.parameters {
 
                    self.write_parameter(heap, *param_id, indent3);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, def.body, indent3);
 
            },
 
            Definition::Component(def) => {
 
                self.kv(indent).with_id(PREFIX_COMPONENT_ID,def.this.0.index)
 
                    .with_s_key("DefinitionComponent");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
                self.kv(indent2).with_s_key("Variant").with_debug_val(&def.variant);
 

	
 
                self.kv(indent2).with_s_key("PolymorphicVariables");
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar");
 
                    self.kv(indent4).with_s_key("Name").with_ascii_val(&poly_var_id.value);
 
                    self.kv(indent3).with_s_key("PolyVar").with_ascii_val(&poly_var_id.value);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parameters");
 
                for param_id in &def.parameters {
 
                    self.write_parameter(heap, *param_id, indent3)
 
                }
 

	
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, def.body, indent3);
 
            }
 
        }
 
    }
 

	
 
    fn write_parameter(&mut self, heap: &Heap, param_id: ParameterId, indent: usize) {
 
        let indent2 = indent + 1;
 
        let param = &heap[param_id];
 

	
 
        self.kv(indent).with_id(PREFIX_PARAMETER_ID, param_id.0.index)
 
            .with_s_key("Parameter");
 
        self.kv(indent2).with_s_key("Name").with_ascii_val(&param.identifier.value);
 
        self.kv(indent2).with_s_key("ParserType").with_custom_val(|w| write_parser_type(w, heap, &heap[param.parser_type]));
 
    }
 

	
 
    fn write_stmt(&mut self, heap: &Heap, stmt_id: StatementId, indent: usize) {
 
        let stmt = &heap[stmt_id];
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 

	
 
        match stmt {
 
            Statement::Block(stmt) => {
 
                self.kv(indent).with_id(PREFIX_BLOCK_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Block");
 

	
 
                for stmt_id in &stmt.statements {
 
                    self.write_stmt(heap, *stmt_id, indent2);
 
                }
 
            },
 
            Statement::Local(stmt) => {
 
                match stmt {
 
                    LocalStatement::Channel(stmt) => {
 
                        self.kv(indent).with_id(PREFIX_CHANNEL_STMT_ID, stmt.this.0.0.index)
 
                            .with_s_key("LocalChannel");
 

	
 
                        self.kv(indent2).with_s_key("From");
 
                        self.write_local(heap, stmt.from, indent3);
 
                        self.kv(indent2).with_s_key("To");
 
                        self.write_local(heap, stmt.to, indent3);
 
                        self.kv(indent2).with_s_key("Next")
 
                            .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
                    },
 
                    LocalStatement::Memory(stmt) => {
 
                        self.kv(indent).with_id(PREFIX_MEM_STMT_ID, stmt.this.0.0.index)
 
                            .with_s_key("LocalMemory");
 

	
 
                        self.kv(indent2).with_s_key("Variable");
 
                        self.write_local(heap, stmt.variable, indent3);
 
                        self.kv(indent2).with_s_key("Next")
 
                            .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
                    }
 
                }
 
            },
 
            Statement::Skip(stmt) => {
 
                self.kv(indent).with_id(PREFIX_SKIP_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Skip");
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Labeled(stmt) => {
 
                self.kv(indent).with_id(PREFIX_LABELED_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Labeled");
 

	
 
                self.kv(indent2).with_s_key("Label").with_ascii_val(&stmt.label.value);
 
                self.kv(indent2).with_s_key("Statement");
 
                self.write_stmt(heap, stmt.body, indent3);
 
            },
 
            Statement::If(stmt) => {
 
                self.kv(indent).with_id(PREFIX_IF_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("If");
 

	
 
                self.kv(indent2).with_s_key("EndIf")
 
                    .with_opt_disp_val(stmt.end_if.as_ref().map(|v| &v.0.index));
 

	
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, stmt.test, indent3);
 

	
 
                self.kv(indent2).with_s_key("TrueBody");
 
                self.write_stmt(heap, stmt.true_body, indent3);
 

	
 
                self.kv(indent2).with_s_key("FalseBody");
 
                self.write_stmt(heap, stmt.false_body, indent3);
 
            },
 
            Statement::EndIf(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDIF_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndIf");
 
                self.kv(indent2).with_s_key("StartIf").with_disp_val(&stmt.start_if.0.index);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::While(stmt) => {
 
                self.kv(indent).with_id(PREFIX_WHILE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("While");
 

	
 
                self.kv(indent2).with_s_key("EndWhile")
 
                    .with_opt_disp_val(stmt.end_while.as_ref().map(|v| &v.0.index));
 
                self.kv(indent2).with_s_key("InSync")
 
                    .with_opt_disp_val(stmt.in_sync.as_ref().map(|v| &v.0.index));
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, stmt.test, indent3);
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, stmt.body, indent3);
 
            },
 
            Statement::EndWhile(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDWHILE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndWhile");
 
                self.kv(indent2).with_s_key("StartWhile").with_disp_val(&stmt.start_while.0.index);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Break(stmt) => {
 
                self.kv(indent).with_id(PREFIX_BREAK_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Break");
 
                self.kv(indent2).with_s_key("Label")
 
                    .with_opt_ascii_val(stmt.label.as_ref().map(|v| v.value.as_slice()));
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::Continue(stmt) => {
 
                self.kv(indent).with_id(PREFIX_CONTINUE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Continue");
 
                self.kv(indent2).with_s_key("Label")
 
                    .with_opt_ascii_val(stmt.label.as_ref().map(|v| v.value.as_slice()));
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::Synchronous(stmt) => {
 
                self.kv(indent).with_id(PREFIX_SYNC_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Synchronous");
 
                self.kv(indent2).with_s_key("EndSync")
 
                    .with_opt_disp_val(stmt.end_sync.as_ref().map(|v| &v.0.index));
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, stmt.body, indent3);
 
            },
 
            Statement::EndSynchronous(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDSYNC_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndSynchronous");
 
                self.kv(indent2).with_s_key("StartSync").with_disp_val(&stmt.start_sync.0.index);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Return(stmt) => {
 
                self.kv(indent).with_id(PREFIX_RETURN_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Return");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression, indent3);
 
            },
 
            Statement::Assert(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ASSERT_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Assert");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression, indent3);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Goto(stmt) => {
 
                self.kv(indent).with_id(PREFIX_GOTO_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Goto");
 
                self.kv(indent2).with_s_key("Label").with_ascii_val(&stmt.label.value);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::New(stmt) => {
 
                self.kv(indent).with_id(PREFIX_NEW_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("New");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression.upcast(), indent3);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Expression(stmt) => {
 
                self.kv(indent).with_id(PREFIX_EXPR_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("ExpressionStatement");
 
                self.write_expr(heap, stmt.expression, indent2);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            }
 
        }
 
    }
 

	
 
    fn write_expr(&mut self, heap: &Heap, expr_id: ExpressionId, indent: usize) {
 
        let expr = &heap[expr_id];
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 
        let def_id = self.cur_definition.unwrap();
 

	
 
        match expr {
 
            Expression::Assignment(expr) => {
 
                self.kv(indent).with_id(PREFIX_ASSIGNMENT_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("AssignmentExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Left");
 
                self.write_expr(heap, expr.left, indent3);
 
                self.kv(indent2).with_s_key("Right");
 
                self.write_expr(heap, expr.right, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Conditional(expr) => {
 
                self.kv(indent).with_id(PREFIX_CONDITIONAL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ConditionalExpr");
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, expr.test, indent3);
 
                self.kv(indent2).with_s_key("TrueExpression");
 
                self.write_expr(heap, expr.true_expression, indent3);
 
                self.kv(indent2).with_s_key("FalseExpression");
 
                self.write_expr(heap, expr.false_expression, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Binary(expr) => {
 
                self.kv(indent).with_id(PREFIX_BINARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("BinaryExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Left");
 
                self.write_expr(heap, expr.left, indent3);
 
                self.kv(indent2).with_s_key("Right");
 
                self.write_expr(heap, expr.right, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Unary(expr) => {
 
                self.kv(indent).with_id(PREFIX_UNARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("UnaryExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Argument");
 
                self.write_expr(heap, expr.expression, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Indexing(expr) => {
 
                self.kv(indent).with_id(PREFIX_INDEXING_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("IndexingExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 
                self.kv(indent2).with_s_key("Index");
 
                self.write_expr(heap, expr.index, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Slicing(expr) => {
 
                self.kv(indent).with_id(PREFIX_SLICING_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("SlicingExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 
                self.kv(indent2).with_s_key("FromIndex");
 
                self.write_expr(heap, expr.from_index, indent3);
 
                self.kv(indent2).with_s_key("ToIndex");
 
                self.write_expr(heap, expr.to_index, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Select(expr) => {
 
                self.kv(indent).with_id(PREFIX_SELECT_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("SelectExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 

	
 
                match &expr.field {
 
                    Field::Length => {
 
                        self.kv(indent2).with_s_key("Field").with_s_val("length");
 
                    },
 
                    Field::Symbolic(field) => {
 
                        self.kv(indent2).with_s_key("Field").with_ascii_val(&field.identifier.value);
 
                        self.kv(indent3).with_s_key("Definition").with_opt_disp_val(field.definition.as_ref().map(|v| &v.index));
 
                        self.kv(indent3).with_s_key("Index").with_disp_val(&field.field_idx);
 
                    }
 
                }
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Array(expr) => {
 
                self.kv(indent).with_id(PREFIX_ARRAY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ArrayExpr");
 
                self.kv(indent2).with_s_key("Elements");
 
                for expr_id in &expr.elements {
 
                    self.write_expr(heap, *expr_id, indent3);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Literal(expr) => {
 
                self.kv(indent).with_id(PREFIX_CONST_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ConstantExpr");
 

	
 
                let val = self.kv(indent2).with_s_key("Value");
 
                match &expr.value {
 
                    Literal::Null => { val.with_s_val("null"); },
 
                    Literal::True => { val.with_s_val("true"); },
 
                    Literal::False => { val.with_s_val("false"); },
 
                    Literal::Character(data) => { val.with_ascii_val(data); },
 
                    Literal::Integer(data) => { val.with_disp_val(data); },
 
                    Literal::Struct(data) => {
 
                        val.with_s_val("Struct");
 
                        let indent4 = indent3 + 1;
 

	
 
                        // Polymorphic arguments
 
                        if !data.poly_args2.is_empty() {
 
                            self.kv(indent3).with_s_key("PolymorphicArguments");
 
                            for poly_arg in &data.poly_args2 {
 
                                self.kv(indent4).with_s_key("Argument")
 
                                    .with_custom_val(|v| write_parser_type(v, heap, &heap[*poly_arg]));
 
                            }
 
                        }
 

	
 
                        for field in &data.fields {
 
                            self.kv(indent3).with_s_key("Field");
 
                            self.kv(indent4).with_s_key("Name").with_ascii_val(&field.identifier.value);
 
                            self.kv(indent4).with_s_key("Index").with_disp_val(&field.field_idx);
 
                            self.kv(indent4).with_s_key("ParserType");
 
                            self.write_expr(heap, field.value, indent4 + 1);
 
                        }
 
                    },
 
                    Literal::Enum(data) => {
 
                        val.with_s_val("Enum");
 
                        let indent4 = indent3 + 1;
 

	
 
                        // Polymorphic arguments
 
                        if !data.poly_args2.is_empty() {
 
                            self.kv(indent3).with_s_key("PolymorphicArguments");
 
                            for poly_arg in &data.poly_args2 {
 
                                self.kv(indent4).with_s_key("Argument")
 
                                    .with_custom_val(|v| write_parser_type(v, heap, &heap[*poly_arg]));
 
                            }
 
                        }
 
                    }
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Call(expr) => {
 
                self.kv(indent).with_id(PREFIX_CALL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("CallExpr");
 

	
 
                // Method
 
                let method = self.kv(indent2).with_s_key("Method");
 
                match &expr.method {
 
                    Method::Get => { method.with_s_val("get"); },
 
                    Method::Put => { method.with_s_val("put"); },
 
                    Method::Fires => { method.with_s_val("fires"); },
 
                    Method::Create => { method.with_s_val("create"); },
 
                    Method::Symbolic(symbolic) => {
 
                        method.with_s_val("symbolic");
 
                        self.kv(indent3).with_s_key("Name").with_ascii_val(&symbolic.identifier.value);
 
                        self.kv(indent3).with_s_key("Definition")
 
                            .with_opt_disp_val(symbolic.definition.as_ref().map(|v| &v.index));
 
                    }
 
                }
 

	
 
                // Polymorphic arguments
 
                if !expr.poly_args.is_empty() {
 
                    self.kv(indent2).with_s_key("PolymorphicArguments");
 
                    for poly_arg in &expr.poly_args {
 
                        self.kv(indent3).with_s_key("Argument")
 
                            .with_custom_val(|v| write_parser_type(v, heap, &heap[*poly_arg]));
 
                    }
 
                }
 

	
 
                // Arguments
 
                self.kv(indent2).with_s_key("Arguments");
 
                for arg_id in &expr.arguments {
 
                    self.write_expr(heap, *arg_id, indent3);
 
                }
 

	
 
                // Parent
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Variable(expr) => {
 
                self.kv(indent).with_id(PREFIX_VARIABLE_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("VariableExpr");
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&expr.identifier.value);
 
                self.kv(indent2).with_s_key("Definition")
 
                    .with_opt_disp_val(expr.declaration.as_ref().map(|v| &v.index));
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            }
 
        }
 
    }
 

	
 
    fn write_local(&mut self, heap: &Heap, local_id: LocalId, indent: usize) {
 
        let local = &heap[local_id];
 
        let indent2 = indent + 1;
 

	
 
        self.kv(indent).with_id(PREFIX_LOCAL_ID, local_id.0.index)
 
            .with_s_key("Local");
 

	
 
        self.kv(indent2).with_s_key("Name").with_ascii_val(&local.identifier.value);
 
        self.kv(indent2).with_s_key("ParserType")
 
            .with_custom_val(|w| write_parser_type(w, heap, &heap[local.parser_type]));
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Printing Utilities
 
    //--------------------------------------------------------------------------
 

	
 
    fn kv(&mut self, indent: usize) -> KV {
 
        KV::new(&mut self.buffer, &mut self.temp1, &mut self.temp2, indent)
 
    }
 

	
 
    fn flush<W: IOWrite>(&mut self, w: &mut W) {
 
        w.write(self.buffer.as_bytes()).unwrap();
 
        self.buffer.clear()
 
    }
 
}
 

	
 
fn write_option<V: Display>(target: &mut String, value: Option<V>) {
 
    target.clear();
 
    match &value {
 
        Some(v) => target.push_str(&format!("Some({})", v)),
 
        None => target.push_str("None")
 
    };
 
}
 

	
 
fn write_parser_type(target: &mut String, heap: &Heap, t: &ParserType) {
 
    use ParserTypeVariant as PTV;
 

	
 
    let mut embedded = Vec::new();
 
    match &t.variant {
 
        PTV::Input(id) => { target.push_str("in"); embedded.push(*id); }
 
        PTV::Output(id) => { target.push_str("out"); embedded.push(*id) }
 
        PTV::Array(id) => { target.push_str("array"); embedded.push(*id) }
 
        PTV::Message => { target.push_str("msg"); }
 
        PTV::Bool => { target.push_str("bool"); }
 
        PTV::Byte => { target.push_str("byte"); }
 
        PTV::Short => { target.push_str("short"); }
 
        PTV::Int => { target.push_str("int"); }
 
        PTV::Long => { target.push_str("long"); }
 
        PTV::String => { target.push_str("str"); }
 
        PTV::IntegerLiteral => { target.push_str("int_lit"); }
 
        PTV::Inferred => { target.push_str("auto"); }
 
        PTV::Symbolic(symbolic) => {
 
            target.push_str(&String::from_utf8_lossy(&symbolic.identifier.value));
 
            match symbolic.variant {
 
                Some(SymbolicParserTypeVariant::PolyArg(def_id, idx)) => {
 
                    target.push_str(&format!("{{def: {}, idx: {}}}", def_id.index, idx));
 
                },
 
                Some(SymbolicParserTypeVariant::Definition(def_id)) => {
 
                    target.push_str(&format!("{{def: {}}}", def_id.index));
 
                },
 
                None => {
 
                    target.push_str("{None}");
 
                }
 
            }
 
            embedded.extend(&symbolic.poly_args2);
 
        }
 
    };
 

	
 
    if !embedded.is_empty() {
 
        target.push_str("<");
 
        for (idx, embedded_id) in embedded.into_iter().enumerate() {
 
            if idx != 0 { target.push_str(", "); }
 
            write_parser_type(target, heap, &heap[embedded_id]);
 
        }
 
        target.push_str(">");
 
    }
 
}
 

	
 
fn write_concrete_type(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType) {
 
    use ConcreteTypePart as CTP;
 

	
 
    fn write_concrete_part(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType, mut idx: usize) -> usize {
 
        if idx >= t.parts.len() {
 
            return idx;
 
        }
 

	
 
        match &t.parts[idx] {
 
            CTP::Marker(marker) => {
 
                // Marker points to polymorphic variable index
 
                let definition = &heap[def_id];
 
                let poly_var_ident = match definition {
 
                    Definition::Struct(_) | Definition::Enum(_) => unreachable!(),
 
                    Definition::Function(definition) => &definition.poly_vars[*marker].value,
 
                    Definition::Component(definition) => &definition.poly_vars[*marker].value,
 
                };
 
                target.push_str(&String::from_utf8_lossy(&poly_var_ident));
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
            },
 
            CTP::Void => target.push_str("void"),
 
            CTP::Message => target.push_str("msg"),
 
            CTP::Bool => target.push_str("bool"),
 
            CTP::Byte => target.push_str("byte"),
 
            CTP::Short => target.push_str("short"),
 
            CTP::Int => target.push_str("int"),
 
            CTP::Long => target.push_str("long"),
 
            CTP::String => target.push_str("string"),
 
            CTP::Array => {
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push_str("[]");
 
            },
 
            CTP::Slice => {
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push_str("[..]");
 
            }
 
            CTP::Input => {
 
                target.push_str("in<");
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push('>');
 
            },
 
            CTP::Output => {
 
                target.push_str("out<");
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push('>')
 
            },
 
            CTP::Instance(definition_id, num_embedded) => {
 
                let identifier = heap[*definition_id].identifier();
 
                target.push_str(&String::from_utf8_lossy(&identifier.value));
 
                target.push('<');
 
                for idx_embedded in 0..*num_embedded {
 
                    if idx_embedded != 0 {
 
                        target.push_str(", ");
 
                    }
 
                    idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                }
 
                target.push('>');
 
            }
 
        }
 

	
 
        idx + 1
 
    }
 

	
 
    write_concrete_part(target, heap, def_id, t, 0);
 
}
 

	
 
fn write_expression_parent(target: &mut String, parent: &ExpressionParent) {
 
    use ExpressionParent as EP;
 

	
 
    *target = match parent {
 
        EP::None => String::from("None"),
 
        EP::If(id) => format!("IfStmt({})", id.0.index),
 
        EP::While(id) => format!("WhileStmt({})", id.0.index),
 
        EP::Return(id) => format!("ReturnStmt({})", id.0.index),
 
        EP::Assert(id) => format!("AssertStmt({})", id.0.index),
 
        EP::New(id) => format!("NewStmt({})", id.0.index),
 
        EP::ExpressionStmt(id) => format!("ExprStmt({})", id.0.index),
 
        EP::Expression(id, idx) => format!("Expr({}, {})", id.index, idx)
 
    };
 
}
 
\ No newline at end of file
src/protocol/eval.rs
Show inline comments
 
use std::collections::HashMap;
 
use std::fmt;
 
use std::fmt::{Debug, Display, Formatter};
 
use std::{i16, i32, i64, i8};
 

	
 
use crate::common::*;
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::EvalContext;
 

	
 
// const MAX_RECURSION: usize = 1024;
 

	
 
const BYTE_MIN: i64 = i8::MIN as i64;
 
const BYTE_MAX: i64 = i8::MAX as i64;
 
const SHORT_MIN: i64 = i16::MIN as i64;
 
const SHORT_MAX: i64 = i16::MAX as i64;
 
const INT_MIN: i64 = i32::MIN as i64;
 
const INT_MAX: i64 = i32::MAX as i64;
 

	
 
const MESSAGE_MAX_LENGTH: i64 = SHORT_MAX;
 

	
 
const ONE: Value = Value::Byte(ByteValue(1));
 

	
 
// TODO: All goes one day anyway, so dirty typechecking hack
 
trait ValueImpl {
 
    fn exact_type(&self) -> Type;
 
    fn is_type_compatible(&self, h: &Heap, t: &ParserType) -> bool {
 
        Self::is_type_compatible_hack(h, t)
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool;
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Value {
 
    Unassigned,
 
    Input(InputValue),
 
    Output(OutputValue),
 
    Message(MessageValue),
 
    Boolean(BooleanValue),
 
    Byte(ByteValue),
 
    Short(ShortValue),
 
    Int(IntValue),
 
    Long(LongValue),
 
    InputArray(InputArrayValue),
 
    OutputArray(OutputArrayValue),
 
    MessageArray(MessageArrayValue),
 
    BooleanArray(BooleanArrayValue),
 
    ByteArray(ByteArrayValue),
 
    ShortArray(ShortArrayValue),
 
    IntArray(IntArrayValue),
 
    LongArray(LongArrayValue),
 
}
 
impl Value {
 
    pub fn receive_message(buffer: &Payload) -> Value {
 
        Value::Message(MessageValue(Some(buffer.clone())))
 
    }
 
    fn create_message(length: Value) -> Value {
 
        match length {
 
            Value::Byte(_) | Value::Short(_) | Value::Int(_) | Value::Long(_) => {
 
                let length: i64 = i64::from(length);
 
                if length < 0 || length > MESSAGE_MAX_LENGTH {
 
                    // Only messages within the expected length are allowed
 
                    Value::Message(MessageValue(None))
 
                } else {
 
                    Value::Message(MessageValue(Some(Payload::new(length as usize))))
 
                }
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn from_constant(constant: &Literal) -> Value {
 
        match constant {
 
            Literal::Null => Value::Message(MessageValue(None)),
 
            Literal::True => Value::Boolean(BooleanValue(true)),
 
            Literal::False => Value::Boolean(BooleanValue(false)),
 
            Literal::Integer(val) => {
 
                // Convert raw ASCII data to UTF-8 string
 
                let val = *val;
 
                if val >= BYTE_MIN && val <= BYTE_MAX {
 
                    Value::Byte(ByteValue(val as i8))
 
                } else if val >= SHORT_MIN && val <= SHORT_MAX {
 
                    Value::Short(ShortValue(val as i16))
 
                } else if val >= INT_MIN && val <= INT_MAX {
 
                    Value::Int(IntValue(val as i32))
 
                } else {
 
                    Value::Long(LongValue(val))
 
                }
 
            }
 
            Literal::Character(_data) => unimplemented!(),
 
            Literal::Struct(_data) => unimplemented!(),
 
            Literal::Enum(_data) => unimplemented!(),
 
        }
 
    }
 
    fn set(&mut self, index: &Value, value: &Value) -> Option<Value> {
 
        // The index must be of integer type, and non-negative
 
        let the_index: usize;
 
        match index {
 
            Value::Byte(_) | Value::Short(_) | Value::Int(_) | Value::Long(_) => {
 
                let index = i64::from(index);
 
                if index < 0 || index >= MESSAGE_MAX_LENGTH {
 
                    // It is inconsistent to update out of bounds
 
                    return None;
 
                }
 
                the_index = index.try_into().unwrap();
 
            }
 
            _ => unreachable!(),
 
        }
 
        // The subject must be either a message or an array
 
        // And the value and the subject must be compatible
 
        match (self, value) {
 
            (Value::Message(MessageValue(None)), _) => {
 
                // It is inconsistent to update the null message
 
                None
 
            }
 
            (Value::Message(MessageValue(Some(payload))), Value::Byte(ByteValue(b))) => {
 
                if *b < 0 {
 
                    // It is inconsistent to update with a negative value
 
                    return None;
 
                }
 
                if let Some(slot) = payload.as_mut_vec().get_mut(the_index) {
 
                    *slot = (*b).try_into().unwrap();
 
                    Some(value.clone())
 
                } else {
 
                    // It is inconsistent to update out of bounds
 
                    None
 
                }
 
            }
 
            (Value::Message(MessageValue(Some(payload))), Value::Short(ShortValue(b))) => {
 
                if *b < 0 || *b > BYTE_MAX as i16 {
 
                    // It is inconsistent to update with a negative value or a too large value
 
                    return None;
 
                }
 
                if let Some(slot) = payload.as_mut_vec().get_mut(the_index) {
 
                    *slot = (*b).try_into().unwrap();
 
                    Some(value.clone())
 
                } else {
 
                    // It is inconsistent to update out of bounds
 
                    None
 
                }
 
            }
 
            (Value::InputArray(_), Value::Input(_)) => todo!(),
 
            (Value::OutputArray(_), Value::Output(_)) => todo!(),
 
            (Value::MessageArray(_), Value::Message(_)) => todo!(),
 
            (Value::BooleanArray(_), Value::Boolean(_)) => todo!(),
 
            (Value::ByteArray(_), Value::Byte(_)) => todo!(),
 
            (Value::ShortArray(_), Value::Short(_)) => todo!(),
 
            (Value::IntArray(_), Value::Int(_)) => todo!(),
 
            (Value::LongArray(_), Value::Long(_)) => todo!(),
 
            _ => unreachable!(),
 
        }
 
    }
 
    fn get(&self, index: &Value) -> Option<Value> {
 
        // The index must be of integer type, and non-negative
 
        let the_index: usize;
 
        match index {
 
            Value::Byte(_) | Value::Short(_) | Value::Int(_) | Value::Long(_) => {
 
                let index = i64::from(index);
 
                if index < 0 || index >= MESSAGE_MAX_LENGTH {
 
                    // It is inconsistent to update out of bounds
 
                    return None;
 
                }
 
                the_index = index.try_into().unwrap();
 
            }
 
            _ => unreachable!(),
 
        }
 
        // The subject must be either a message or an array
 
        match self {
 
            Value::Message(MessageValue(None)) => {
 
                // It is inconsistent to read from the null message
 
                None
 
            }
 
            Value::Message(MessageValue(Some(payload))) => {
 
                if let Some(slot) = payload.as_slice().get(the_index) {
 
                    Some(Value::Short(ShortValue((*slot).try_into().unwrap())))
 
                } else {
 
                    // It is inconsistent to update out of bounds
 
                    None
 
                }
 
            }
 
            _ => panic!("Can only get from port value"),
 
        }
 
    }
 
    fn length(&self) -> Option<Value> {
 
        // The subject must be either a message or an array
 
        match self {
 
            Value::Message(MessageValue(None)) => {
 
                // It is inconsistent to get length from the null message
 
                None
 
            }
 
            Value::Message(MessageValue(Some(buffer))) => {
 
                Some(Value::Int(IntValue((buffer.len()).try_into().unwrap())))
 
            }
 
            Value::InputArray(InputArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::OutputArray(OutputArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::MessageArray(MessageArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::BooleanArray(BooleanArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::ByteArray(ByteArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::ShortArray(ShortArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::IntArray(IntArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::LongArray(LongArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            _ => unreachable!(),
 
        }
 
    }
 
    fn plus(&self, other: &Value) -> Value {
 
        match (self, other) {
 
            (Value::Byte(ByteValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Byte(ByteValue(*s + *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s as i16 + *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 + *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 + *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Short(ShortValue(*s + *o as i16))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s + *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 + *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 + *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Int(IntValue(*s + *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Int(IntValue(*s + *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Int(IntValue(o))) => Value::Int(IntValue(*s + *o)),
 
            (Value::Int(IntValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 + *o))
 
            }
 
            (Value::Long(LongValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Long(LongValue(*s + *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Long(LongValue(*s + *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Long(LongValue(*s + *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s + *o))
 
            }
 

	
 
            (Value::Message(MessageValue(s)), Value::Message(MessageValue(o))) => {
 
                let payload = if let [Some(s), Some(o)] = [s, o] {
 
                    let mut payload = s.clone();
 
                    payload.concatenate_with(o);
 
                    Some(payload)
 
                } else {
 
                    None
 
                };
 
                Value::Message(MessageValue(payload))
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn minus(&self, other: &Value) -> Value {
 
        match (self, other) {
 
            (Value::Byte(ByteValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Byte(ByteValue(*s - *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s as i16 - *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 - *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 - *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Short(ShortValue(*s - *o as i16))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s - *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 - *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 - *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Int(IntValue(*s - *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Int(IntValue(*s - *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Int(IntValue(o))) => Value::Int(IntValue(*s - *o)),
 
            (Value::Int(IntValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 - *o))
 
            }
 
            (Value::Long(LongValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Long(LongValue(*s - *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Long(LongValue(*s - *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Long(LongValue(*s - *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s - *o))
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn modulus(&self, other: &Value) -> Value {
 
        match (self, other) {
 
            (Value::Byte(ByteValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Byte(ByteValue(*s % *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s as i16 % *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 % *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 % *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Short(ShortValue(*s % *o as i16))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s % *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 % *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 % *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Int(IntValue(*s % *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Int(IntValue(*s % *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Int(IntValue(o))) => Value::Int(IntValue(*s % *o)),
 
            (Value::Int(IntValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 % *o))
 
            }
 
            (Value::Long(LongValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Long(LongValue(*s % *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Long(LongValue(*s % *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Long(LongValue(*s % *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s % *o))
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn eq(&self, other: &Value) -> Value {
 
        match (self, other) {
 
            (Value::Byte(ByteValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i16 == *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i32 == *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i64 == *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o as i16))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i32 == *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i64 == *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i64 == *o))
 
            }
 
            (Value::Long(LongValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o))
 
            }
 
            (Value::Message(MessageValue(s)), Value::Message(MessageValue(o))) => {
 
                Value::Boolean(BooleanValue(*s == *o))
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn neq(&self, other: &Value) -> Value {
 
        match (self, other) {
 
            (Value::Byte(ByteValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s != *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i16 != *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i32 != *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i64 != *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s != *o as i16))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s != *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i32 != *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Boolean(BooleanValue(*s as i64 != *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Boolean(BooleanValue(*s != *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Boolean(BooleanValue(*s != *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Boolean(BooleanValue(*s != *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Long(LongValue(o))) => {
src/protocol/lexer.rs
Show inline comments
 
@@ -133,799 +133,803 @@ impl Lexer<'_> {
 
        if next.is_some() {
 
            self.source.consume();
 
        }
 
        if next == Some(b'\r') && self.source.next() == Some(b'\n') {
 
            self.source.consume();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_whitespace(&mut self, expected: bool) -> Result<(), ParseError> {
 
        let mut found = false;
 
        let mut next = self.source.next();
 
        while next.is_some() {
 
            if next == Some(b' ')
 
                || next == Some(b'\t')
 
                || next == Some(b'\r')
 
                || next == Some(b'\n')
 
            {
 
                self.source.consume();
 
                next = self.source.next();
 
                found = true;
 
                continue;
 
            }
 
            if next == Some(b'/') {
 
                next = self.source.lookahead(1);
 
                if next == Some(b'/') {
 
                    self.source.consume(); // slash
 
                    self.source.consume(); // slash
 
                    self.consume_line()?;
 
                    next = self.source.next();
 
                    found = true;
 
                    continue;
 
                }
 
                if next == Some(b'*') {
 
                    self.source.consume(); // slash
 
                    self.source.consume(); // star
 
                    next = self.source.next();
 
                    while next.is_some() {
 
                        if next == Some(b'*') {
 
                            next = self.source.lookahead(1);
 
                            if next == Some(b'/') {
 
                                self.source.consume(); // star
 
                                self.source.consume(); // slash
 
                                break;
 
                            }
 
                        }
 
                        self.source.consume();
 
                        next = self.source.next();
 
                    }
 
                    next = self.source.next();
 
                    found = true;
 
                    continue;
 
                }
 
            }
 
            break;
 
        }
 
        if expected && !found {
 
            Err(self.error_at_pos("Expected whitespace"))
 
        } else {
 
            Ok(())
 
        }
 
    }
 
    fn consume_any_chars(&mut self) {
 
        if !is_ident_start(self.source.next()) { return }
 
        self.source.consume();
 
        while is_ident_rest(self.source.next()) {
 
            self.source.consume()
 
        }
 
    }
 
    fn has_keyword(&self, keyword: &[u8]) -> bool {
 
        if !self.source.has(keyword) {
 
            return false;
 
        }
 

	
 
        // Word boundary
 
        let next = self.source.lookahead(keyword.len());
 
        if next.is_none() { return true; }
 
        return !is_ident_rest(next);
 
    }
 
    fn consume_keyword(&mut self, keyword: &[u8]) -> Result<(), ParseError> {
 
        let len = keyword.len();
 
        for i in 0..len {
 
            let expected = Some(lowercase(keyword[i]));
 
            let next = self.source.next();
 
            if next != expected {
 
                return Err(self.error_at_pos(&format!("Expected keyword '{}'", String::from_utf8_lossy(keyword))));
 
            }
 
            self.source.consume();
 
        }
 
        if let Some(next) = self.source.next() {
 
            if next >= b'A' && next <= b'Z' || next >= b'a' && next <= b'z' || next >= b'0' && next <= b'9' {
 
                return Err(self.error_at_pos(&format!("Expected word boundary after '{}'", String::from_utf8_lossy(keyword))));
 
            }
 
        }
 
        Ok(())
 
    }
 
    fn has_string(&self, string: &[u8]) -> bool {
 
        self.source.has(string)
 
    }
 
    fn consume_string(&mut self, string: &[u8]) -> Result<(), ParseError> {
 
        let len = string.len();
 
        for i in 0..len {
 
            let expected = Some(string[i]);
 
            let next = self.source.next();
 
            if next != expected {
 
                return Err(self.error_at_pos(&format!("Expected {}", String::from_utf8_lossy(string))));
 
            }
 
            self.source.consume();
 
        }
 
        Ok(())
 
    }
 
    /// Generic comma-separated consumer. If opening delimiter is not found then
 
    /// `Ok(None)` will be returned. Otherwise will consume the comma separated
 
    /// values, allowing a trailing comma. If no comma is found and the closing
 
    /// delimiter is not found, then a parse error with `expected_end_msg` is
 
    /// returned.
 
    fn consume_comma_separated<T, F>(
 
        &mut self, h: &mut Heap, open: u8, close: u8, expected_end_msg: &str, func: F
 
    ) -> Result<Option<Vec<T>>, ParseError>
 
        where F: Fn(&mut Lexer, &mut Heap) -> Result<T, ParseError>
 
    {
 
        if Some(open) != self.source.next() {
 
            return Ok(None)
 
        }
 

	
 
        self.source.consume();
 
        self.consume_whitespace(false)?;
 
        let mut elements = Vec::new();
 
        let mut had_comma = true;
 

	
 
        loop {
 
            if Some(close) == self.source.next() {
 
                self.source.consume();
 
                break;
 
            } else if !had_comma {
 
                return Err(ParseError::new_error(
 
                    &self.source, self.source.pos(), expected_end_msg
 
                ));
 
            }
 

	
 
            elements.push(func(self, h)?);
 
            self.consume_whitespace(false)?;
 

	
 
            had_comma = self.source.next() == Some(b',');
 
            if had_comma {
 
                self.source.consume();
 
                self.consume_whitespace(false)?;
 
            }
 
        }
 

	
 
        Ok(Some(elements))
 
    }
 
    /// Essentially the same as `consume_comma_separated`, but will not allocate
 
    /// memory. Will return `Ok(true)` and leave the input position at the end
 
    /// the comma-separated list if well formed and `Ok(false)` if the list is
 
    /// not present. Otherwise returns `Err(())` and leaves the input position 
 
    /// at a "random" position.
 
    fn consume_comma_separated_spilled_without_pos_recovery<F: Fn(&mut Lexer) -> bool>(
 
        &mut self, open: u8, close: u8, func: F
 
    ) -> Result<bool, ()> {
 
        if Some(open) != self.source.next() {
 
            return Ok(false);
 
        }
 

	
 
        self.source.consume();
 
        if self.consume_whitespace(false).is_err() { return Err(()) };
 
        let mut had_comma = true;
 
        loop {
 
            if Some(close) == self.source.next() {
 
                self.source.consume();
 
                return Ok(true);
 
            } else if !had_comma {
 
                return Err(());
 
            }
 

	
 
            if !func(self) { return Err(()); }
 
            if self.consume_whitespace(false).is_err() { return Err(()) };
 

	
 
            had_comma = self.source.next() == Some(b',');
 
            if had_comma {
 
                self.source.consume();
 
                if self.consume_whitespace(false).is_err() { return Err(()); }
 
            }
 
        }
 
    }
 
    fn consume_ident(&mut self) -> Result<Vec<u8>, ParseError> {
 
        if !self.has_identifier() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let mut result = Vec::new();
 
        let mut next = self.source.next();
 
        result.push(next.unwrap());
 
        self.source.consume();
 
        next = self.source.next();
 
        while is_ident_rest(next) {
 
            result.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 
        Ok(result)
 
    }
 
    fn has_integer(&mut self) -> bool {
 
        is_integer_start(self.source.next())
 
    }
 
    fn consume_integer(&mut self) -> Result<i64, ParseError> {
 
        let position = self.source.pos();
 
        let mut data = Vec::new();
 
        let mut next = self.source.next();
 
        while is_integer_rest(next) {
 
            data.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 

	
 
        let data_len = data.len();
 
        debug_assert_ne!(data_len, 0);
 
        if data_len == 1 {
 
            debug_assert!(data[0] >= b'0' && data[0] <= b'9');
 
            return Ok((data[0] - b'0') as i64);
 
        } else {
 
            // TODO: Fix, u64 should be supported as well
 
            let parsed = if data[1] == b'b' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 2)
 
            } else if data[1] == b'o' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 8)
 
            } else if data[1] == b'x' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 16)
 
            } else {
 
                // Assume decimal
 
                let data = String::from_utf8_lossy(&data);
 
                i64::from_str_radix(&data, 10)
 
            };
 

	
 
            if let Err(_err) = parsed {
 
                return Err(ParseError::new_error(&self.source, position, "Invalid integer constant"));
 
            }
 

	
 
            Ok(parsed.unwrap())
 
        }
 
    }
 

	
 
    // Statement keywords
 
    // TODO: Clean up these functions
 
    fn has_statement_keyword(&self) -> bool {
 
        self.has_keyword(b"channel")
 
            || self.has_keyword(b"skip")
 
            || self.has_keyword(b"if")
 
            || self.has_keyword(b"while")
 
            || self.has_keyword(b"break")
 
            || self.has_keyword(b"continue")
 
            || self.has_keyword(b"synchronous")
 
            || self.has_keyword(b"return")
 
            || self.has_keyword(b"assert")
 
            || self.has_keyword(b"goto")
 
            || self.has_keyword(b"new")
 
    }
 
    fn has_type_keyword(&self) -> bool {
 
        self.has_keyword(b"in")
 
            || self.has_keyword(b"out")
 
            || self.has_keyword(b"msg")
 
            || self.has_keyword(b"boolean")
 
            || self.has_keyword(b"byte")
 
            || self.has_keyword(b"short")
 
            || self.has_keyword(b"int")
 
            || self.has_keyword(b"long")
 
            || self.has_keyword(b"auto")
 
    }
 
    fn has_builtin_keyword(&self) -> bool {
 
        self.has_keyword(b"get")
 
            || self.has_keyword(b"fires")
 
            || self.has_keyword(b"create")
 
            || self.has_keyword(b"length")
 
    }
 
    fn has_reserved(&self) -> bool {
 
        self.has_statement_keyword()
 
            || self.has_type_keyword()
 
            || self.has_builtin_keyword()
 
            || self.has_keyword(b"let")
 
            || self.has_keyword(b"struct")
 
            || self.has_keyword(b"enum")
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
            || self.has_keyword(b"null")
 
    }
 

	
 
    // Identifiers
 

	
 
    fn has_identifier(&self) -> bool {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return false;
 
        }
 
        let next = self.source.next();
 
        is_ident_start(next)
 
    }
 
    fn consume_identifier(&mut self) -> Result<Identifier, ParseError> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let position = self.source.pos();
 
        let value = self.consume_ident()?;
 
        Ok(Identifier{ position, value })
 
    }
 
    fn consume_identifier_spilled(&mut self) -> Result<(), ParseError> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        self.consume_ident()?;
 
        Ok(())
 
    }
 

	
 
    fn consume_namespaced_identifier(&mut self, h: &mut Heap) -> Result<NamespacedIdentifier, ParseError> {
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        // Consumes a part of the namespaced identifier, returns a boolean
 
        // indicating whether polymorphic arguments were specified.
 
        // TODO: Continue here: if we fail to properly parse the polymorphic
 
        //  arguments, assume we have reached the end of the namespaced 
 
        //  identifier and are instead dealing with a less-than operator. Ugly?
 
        //  Yes. Needs tokenizer? Yes. 
 
        fn consume_part(
 
            l: &mut Lexer, h: &mut Heap, ident: &mut NamespacedIdentifier,
 
            backup_pos: &mut InputPosition
 
        ) -> Result<(), ParseError> {
 
            // Consume identifier
 
            if !ident.value.is_empty() {
 
                ident.value.extend(b"::");
 
            }
 
            let ident_start = ident.value.len();
 
            ident.value.extend(l.consume_ident()?);
 
            ident.parts.push(NamespacedIdentifierPart::Identifier{
 
                start: ident_start as u16,
 
                end: ident.value.len() as u16
 
            });
 

	
 
            // Maybe consume polymorphic args.
 
            *backup_pos = l.source.pos();
 
            l.consume_whitespace(false)?;
 
            match l.consume_polymorphic_args(h, true)? {
 
                Some(args) => {
 
                    let poly_start = ident.poly_args.len();
 
                    ident.poly_args.extend(args);
 

	
 
                    ident.parts.push(NamespacedIdentifierPart::PolyArgs{
 
                        start: poly_start as u16,
 
                        end: ident.poly_args.len() as u16,
 
                    });
 

	
 
                    *backup_pos = l.source.pos();
 
                },
 
                None => {}
 
            };
 

	
 
            Ok(())
 
        }
 

	
 
        let mut ident = NamespacedIdentifier{
 
            position: self.source.pos(),
 
            value: Vec::new(),
 
            poly_args: Vec::new(),
 
            parts: Vec::new(),
 
        };
 

	
 
        // Keep consume parts separted by "::". We don't consume the trailing
 
        // whitespace, hence we keep a backup position at the end of the last
 
        // valid part of the namespaced identifier (i.e. the last ident, or the
 
        // last encountered polymorphic arguments).
 
        let mut backup_pos = self.source.pos();
 
        consume_part(self, h, &mut ident, &mut backup_pos)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_whitespace(false)?;
 
            consume_part(self, h, &mut ident, &mut backup_pos)?;
 
            self.consume_whitespace(false)?;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(ident)
 
    }
 

	
 
    fn consume_namespaced_identifier_spilled(&mut self) -> Result<(), ParseError> {
 
    // Consumes a spilled namespaced identifier and returns the number of
 
    // namespaces that we encountered.
 
    fn consume_namespaced_identifier_spilled(&mut self) -> Result<usize, ParseError> {
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        debug_log!("consume_nsident2_spilled: {}", debug_line!(self.source));
 

	
 
        fn consume_part_spilled(l: &mut Lexer, backup_pos: &mut InputPosition) -> Result<(), ParseError> {
 
            l.consume_ident()?;
 
            *backup_pos = l.source.pos();
 
            l.consume_whitespace(false)?;
 
            match l.maybe_consume_poly_args_spilled_without_pos_recovery() {
 
                Ok(true) => { *backup_pos = l.source.pos(); },
 
                Ok(false) => {},
 
                Err(_) => { return Err(l.error_at_pos("Failed to parse poly args (spilled)")) },
 
            }
 
            Ok(())
 
        }
 

	
 
        let mut backup_pos = self.source.pos();
 
        let mut num_namespaces = 1;
 
        consume_part_spilled(self, &mut backup_pos)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_whitespace(false)?;
 
            consume_part_spilled(self, &mut backup_pos)?;
 
            self.consume_whitespace(false)?;
 
            num_namespaces += 1;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(())
 
        Ok(num_namespaces)
 
    }
 

	
 
    // Types and type annotations
 

	
 
    /// Consumes a type definition. When called the input position should be at
 
    /// the type specification. When done the input position will be at the end
 
    /// of the type specifications (hence may be at whitespace).
 
    fn consume_type(&mut self, h: &mut Heap, allow_inference: bool) -> Result<ParserTypeId, ParseError> {
 
        // Small helper function to convert in/out polymorphic arguments. Not
 
        // pretty, but return boolean is true if the error is due to inference
 
        // not being allowed
 
        let reduce_port_poly_args = |
 
            heap: &mut Heap,
 
            port_pos: &InputPosition,
 
            args: Vec<ParserTypeId>,
 
        | -> Result<ParserTypeId, bool> {
 
            match args.len() {
 
                0 => if allow_inference {  
 
                    Ok(heap.alloc_parser_type(|this| ParserType{
 
                        this,
 
                        pos: port_pos.clone(),
 
                        variant: ParserTypeVariant::Inferred
 
                    }))
 
                } else {
 
                    Err(true)
 
                },
 
                1 => Ok(args[0]),
 
                _ => Err(false)
 
            }
 
        };
 

	
 
        // Consume the type
 
        debug_log!("consume_type: {}", debug_line!(self.source));
 
        let pos = self.source.pos();
 
        let parser_type_variant = if self.has_keyword(b"msg") {
 
            self.consume_keyword(b"msg")?;
 
            ParserTypeVariant::Message
 
        } else if self.has_keyword(b"boolean") {
 
            self.consume_keyword(b"boolean")?;
 
            ParserTypeVariant::Bool
 
        } else if self.has_keyword(b"byte") {
 
            self.consume_keyword(b"byte")?;
 
            ParserTypeVariant::Byte
 
        } else if self.has_keyword(b"short") {
 
            self.consume_keyword(b"short")?;
 
            ParserTypeVariant::Short
 
        } else if self.has_keyword(b"int") {
 
            self.consume_keyword(b"int")?;
 
            ParserTypeVariant::Int
 
        } else if self.has_keyword(b"long") {
 
            self.consume_keyword(b"long")?;
 
            ParserTypeVariant::Long
 
        } else if self.has_keyword(b"str") {
 
            self.consume_keyword(b"str")?;
 
            ParserTypeVariant::String
 
        } else if self.has_keyword(b"auto") {
 
            if !allow_inference {
 
                return Err(ParseError::new_error(
 
                        &self.source, pos,
 
                        "Type inference is not allowed here"
 
                ));
 
            }
 

	
 
            self.consume_keyword(b"auto")?;
 
            ParserTypeVariant::Inferred
 
        } else if self.has_keyword(b"in") {
 
            // TODO: @cleanup: not particularly neat to have this special case
 
            //  where we enforce polyargs in the parser-phase
 
            self.consume_keyword(b"in")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?.unwrap_or_default();
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error|  {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'in' only allows for 1 polymorphic argument"
 
                    };
 
                    ParseError::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Input(poly_arg)
 
        } else if self.has_keyword(b"out") {
 
            self.consume_keyword(b"out")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?.unwrap_or_default();
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error| {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'out' only allows for 1 polymorphic argument, but {} were specified"
 
                    };
 
                    ParseError::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Output(poly_arg)
 
        } else {
 
            // Must be a symbolic type
 
            let identifier = self.consume_namespaced_identifier(h)?;
 
            ParserTypeVariant::Symbolic(SymbolicParserType{identifier, variant: None, poly_args2: Vec::new()})
 
        };
 

	
 
        // If the type was a basic type (not supporting polymorphic type
 
        // arguments), then we make sure the user did not specify any of them.
 
        let mut backup_pos = self.source.pos();
 
        if !parser_type_variant.supports_polymorphic_args() {
 
            self.consume_whitespace(false)?;
 
            if let Some(b'<') = self.source.next() {
 
                return Err(ParseError::new_error(
 
                    &self.source, self.source.pos(),
 
                    "This type does not allow polymorphic arguments"
 
                ));
 
            }
 

	
 
            self.source.seek(backup_pos);
 
        }
 

	
 
        let mut parser_type_id = h.alloc_parser_type(|this| ParserType{
 
            this, pos, variant: parser_type_variant
 
        });
 

	
 
        // If we're dealing with arrays, then we need to wrap the currently
 
        // parsed type in array types
 
        self.consume_whitespace(false)?;
 
        while let Some(b'[') = self.source.next() {
 
            let pos = self.source.pos();
 
            self.source.consume();
 
            self.consume_whitespace(false)?;
 
            if let Some(b']') = self.source.next() {
 
                // Type is wrapped in an array
 
                self.source.consume();
 
                parser_type_id = h.alloc_parser_type(|this| ParserType{
 
                    this, pos, variant: ParserTypeVariant::Array(parser_type_id)
 
                });
 
                backup_pos = self.source.pos();
 

	
 
                // In case we're dealing with another array
 
                self.consume_whitespace(false)?;
 
            } else {
 
                return Err(ParseError::new_error(
 
                    &self.source, pos,
 
                    "Expected a closing ']'"
 
                ));
 
            }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(parser_type_id)
 
    }
 

	
 
    /// Attempts to consume a type without returning it. If it doesn't encounter
 
    /// a well-formed type, then the input position is left at a "random"
 
    /// position.
 
    fn maybe_consume_type_spilled_without_pos_recovery(&mut self) -> bool {
 
        // Consume type identifier
 
        debug_log!("maybe_consume_type_spilled_...: {}", debug_line!(self.source));
 
        if self.has_type_keyword() {
 
            self.consume_any_chars();
 
        } else {
 
            let ident = self.consume_namespaced_identifier_spilled();
 
            if ident.is_err() { return false; }
 
        }
 

	
 
        // Consume any polymorphic arguments that follow the type identifier
 
        let mut backup_pos = self.source.pos();
 
        if self.consume_whitespace(false).is_err() { return false; }
 
        
 
        // Consume any array specifiers. Make sure we always leave the input
 
        // position at the end of the last array specifier if we do find a
 
        // valid type
 
        if self.consume_whitespace(false).is_err() { return false; }
 
        while let Some(b'[') = self.source.next() {
 
            self.source.consume();
 
            if self.consume_whitespace(false).is_err() { return false; }
 
            if self.source.next() != Some(b']') { return false; }
 
            self.source.consume();
 
            backup_pos = self.source.pos();
 
            if self.consume_whitespace(false).is_err() { return false; }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return true;
 
    }
 

	
 
    fn maybe_consume_type_spilled(&mut self) -> bool {
 
        let backup_pos = self.source.pos();
 
        if !self.maybe_consume_type_spilled_without_pos_recovery() {
 
            self.source.seek(backup_pos);
 
            return false;
 
        }
 

	
 
        return true;
 
    }
 

	
 
    /// Attempts to consume polymorphic arguments without returning them. If it
 
    /// doesn't encounter well-formed polymorphic arguments, then the input
 
    /// position is left at a "random" position. Returns a boolean indicating if
 
    /// the poly_args list was present.
 
    fn maybe_consume_poly_args_spilled_without_pos_recovery(&mut self) -> Result<bool, ()> {
 
        debug_log!("maybe_consume_poly_args_spilled_...: {}", debug_line!(self.source));
 
        self.consume_comma_separated_spilled_without_pos_recovery(
 
            b'<', b'>', |lexer| {
 
                lexer.maybe_consume_type_spilled_without_pos_recovery()
 
            })
 
    }
 

	
 
    /// Consumes polymorphic arguments and its delimiters if specified. If
 
    /// polyargs are present then the args are consumed and the input position
 
    /// will be placed after the polyarg list. If polyargs are not present then
 
    /// the input position will remain unmodified and an empty vector will be
 
    /// returned.
 
    ///
 
    /// Polymorphic arguments represent the specification of the parametric
 
    /// types of a polymorphic type: they specify the value of the polymorphic
 
    /// type's polymorphic variables.
 
    fn consume_polymorphic_args(&mut self, h: &mut Heap, allow_inference: bool) -> Result<Option<Vec<ParserTypeId>>, ParseError> {
 
        self.consume_comma_separated(
 
            h, b'<', b'>', "Expected the end of the polymorphic argument list",
 
            |lexer, heap| lexer.consume_type(heap, allow_inference)
 
        )
 
    }
 

	
 
    /// Consumes polymorphic variables. These are identifiers that are used
 
    /// within polymorphic types. The input position may be at whitespace. If
 
    /// polymorphic variables are present then the whitespace, wrapping
 
    /// delimiters and the polymorphic variables are consumed. Otherwise the
 
    /// input position will stay where it is. If no polymorphic variables are
 
    /// present then an empty vector will be returned.
 
    fn consume_polymorphic_vars(&mut self, h: &mut Heap) -> Result<Vec<Identifier>, ParseError> {
 
        let backup_pos = self.source.pos();
 
        match self.consume_comma_separated(
 
            h, b'<', b'>', "Expected the end of the polymorphic variable list",
 
            |lexer, _heap| lexer.consume_identifier()
 
        )? {
 
            Some(poly_vars) => Ok(poly_vars),
 
            None => {
 
                self.source.seek(backup_pos);
 
                Ok(vec!())
 
            }
 
        }
 
    }
 

	
 
    // Parameters
 

	
 
    fn consume_parameter(&mut self, h: &mut Heap) -> Result<ParameterId, ParseError> {
 
        let parser_type = self.consume_type(h, false)?;
 
        self.consume_whitespace(true)?;
 
        let position = self.source.pos();
 
        let identifier = self.consume_identifier()?;
 
        let id =
 
            h.alloc_parameter(|this| Parameter { this, position, parser_type, identifier });
 
        Ok(id)
 
    }
 
    fn consume_parameters(&mut self, h: &mut Heap) -> Result<Vec<ParameterId>, ParseError> {
 
        match self.consume_comma_separated(
 
            h, b'(', b')', "Expected the end of the parameter list",
 
            |lexer, heap| lexer.consume_parameter(heap)
 
        )? {
 
            Some(params) => Ok(params),
 
            None => {
 
                Err(ParseError::new_error(
 
                    &self.source, self.source.pos(),
 
                    "Expected a parameter list"
 
                ))
 
            }
 
        }
 
    }
 

	
 
    // ====================
 
    // Expressions
 
    // ====================
 

	
 
    fn consume_paren_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        self.consume_string(b"(")?;
 
        self.consume_whitespace(false)?;
 
        let result = self.consume_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b")")?;
 
        Ok(result)
 
    }
 
    fn consume_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.level >= MAX_LEVEL {
 
            return Err(self.error_at_pos("Too deeply nested expression"));
 
        }
 
        self.level += 1;
 
        let result = self.consume_assignment_expression(h);
 
        self.level -= 1;
 
        result
 
    }
 
    fn consume_assignment_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let result = self.consume_conditional_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        if self.has_assignment_operator() {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation = self.consume_assignment_operator()?;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_expression(h)?;
 
            Ok(h.alloc_assignment_expression(|this| AssignmentExpression {
 
                this,
 
                position,
 
                left,
 
                operation,
 
                right,
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            })
 
            .upcast())
 
        } else {
 
            Ok(result)
 
        }
 
    }
 
    fn has_assignment_operator(&self) -> bool {
 
        self.has_string(b"=")
 
            || self.has_string(b"*=")
 
            || self.has_string(b"/=")
 
            || self.has_string(b"%=")
 
            || self.has_string(b"+=")
 
            || self.has_string(b"-=")
 
            || self.has_string(b"<<=")
 
            || self.has_string(b">>=")
 
            || self.has_string(b"&=")
 
            || self.has_string(b"^=")
 
            || self.has_string(b"|=")
 
    }
 
    fn consume_assignment_operator(&mut self) -> Result<AssignmentOperator, ParseError> {
 
        if self.has_string(b"=") {
 
            self.consume_string(b"=")?;
 
            Ok(AssignmentOperator::Set)
 
        } else if self.has_string(b"*=") {
 
            self.consume_string(b"*=")?;
 
            Ok(AssignmentOperator::Multiplied)
 
        } else if self.has_string(b"/=") {
 
            self.consume_string(b"/=")?;
 
            Ok(AssignmentOperator::Divided)
 
        } else if self.has_string(b"%=") {
 
            self.consume_string(b"%=")?;
 
            Ok(AssignmentOperator::Remained)
 
        } else if self.has_string(b"+=") {
 
            self.consume_string(b"+=")?;
 
            Ok(AssignmentOperator::Added)
 
        } else if self.has_string(b"-=") {
 
            self.consume_string(b"-=")?;
 
            Ok(AssignmentOperator::Subtracted)
 
        } else if self.has_string(b"<<=") {
 
            self.consume_string(b"<<=")?;
 
            Ok(AssignmentOperator::ShiftedLeft)
 
        } else if self.has_string(b">>=") {
 
            self.consume_string(b">>=")?;
 
            Ok(AssignmentOperator::ShiftedRight)
 
        } else if self.has_string(b"&=") {
 
            self.consume_string(b"&=")?;
 
            Ok(AssignmentOperator::BitwiseAnded)
 
        } else if self.has_string(b"^=") {
 
            self.consume_string(b"^=")?;
 
            Ok(AssignmentOperator::BitwiseXored)
 
        } else if self.has_string(b"|=") {
 
            self.consume_string(b"|=")?;
 
            Ok(AssignmentOperator::BitwiseOred)
 
        } else {
 
            Err(self.error_at_pos("Expected assignment operator"))
 
        }
 
    }
 
    fn consume_conditional_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let result = self.consume_concat_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        if self.has_string(b"?") {
 
            let position = self.source.pos();
 
            let test = result;
 
            self.consume_string(b"?")?;
 
            self.consume_whitespace(false)?;
 
            let true_expression = self.consume_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            self.consume_string(b":")?;
 
            self.consume_whitespace(false)?;
 
            let false_expression = self.consume_expression(h)?;
 
            Ok(h.alloc_conditional_expression(|this| ConditionalExpression {
 
                this,
 
                position,
 
                test,
 
                true_expression,
 
                false_expression,
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            })
 
            .upcast())
 
        } else {
 
@@ -1055,1519 +1059,1556 @@ impl Lexer<'_> {
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_band_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_eq_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"&") && !self.has_string(b"&&") && !self.has_string(b"&=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"&")?;
 
            let operation = BinaryOperator::BitwiseAnd;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_eq_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_eq_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_rel_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"==") || self.has_string(b"!=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"==") {
 
                self.consume_string(b"==")?;
 
                operation = BinaryOperator::Equality;
 
            } else {
 
                self.consume_string(b"!=")?;
 
                operation = BinaryOperator::Inequality;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_rel_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_rel_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_shift_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"<=")
 
            || self.has_string(b">=")
 
            || self.has_string(b"<") && !self.has_string(b"<<=")
 
            || self.has_string(b">") && !self.has_string(b">>=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"<=") {
 
                self.consume_string(b"<=")?;
 
                operation = BinaryOperator::LessThanEqual;
 
            } else if self.has_string(b">=") {
 
                self.consume_string(b">=")?;
 
                operation = BinaryOperator::GreaterThanEqual;
 
            } else if self.has_string(b"<") {
 
                self.consume_string(b"<")?;
 
                operation = BinaryOperator::LessThan;
 
            } else {
 
                self.consume_string(b">")?;
 
                operation = BinaryOperator::GreaterThan;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_shift_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_shift_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_add_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"<<") && !self.has_string(b"<<=")
 
            || self.has_string(b">>") && !self.has_string(b">>=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"<<") {
 
                self.consume_string(b"<<")?;
 
                operation = BinaryOperator::ShiftLeft;
 
            } else {
 
                self.consume_string(b">>")?;
 
                operation = BinaryOperator::ShiftRight;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_add_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_add_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_mul_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"+") && !self.has_string(b"+=")
 
            || self.has_string(b"-") && !self.has_string(b"-=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"+") {
 
                self.consume_string(b"+")?;
 
                operation = BinaryOperator::Add;
 
            } else {
 
                self.consume_string(b"-")?;
 
                operation = BinaryOperator::Subtract;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_mul_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_mul_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_prefix_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"*") && !self.has_string(b"*=")
 
            || self.has_string(b"/") && !self.has_string(b"/=")
 
            || self.has_string(b"%") && !self.has_string(b"%=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"*") {
 
                self.consume_string(b"*")?;
 
                operation = BinaryOperator::Multiply;
 
            } else if self.has_string(b"/") {
 
                self.consume_string(b"/")?;
 
                operation = BinaryOperator::Divide;
 
            } else {
 
                self.consume_string(b"%")?;
 
                operation = BinaryOperator::Remainder;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_prefix_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_prefix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.has_string(b"+")
 
            || self.has_string(b"-")
 
            || self.has_string(b"~")
 
            || self.has_string(b"!")
 
        {
 
            let position = self.source.pos();
 
            let operation;
 
            if self.has_string(b"+") {
 
                self.consume_string(b"+")?;
 
                if self.has_string(b"+") {
 
                    self.consume_string(b"+")?;
 
                    operation = UnaryOperation::PreIncrement;
 
                } else {
 
                    operation = UnaryOperation::Positive;
 
                }
 
            } else if self.has_string(b"-") {
 
                self.consume_string(b"-")?;
 
                if self.has_string(b"-") {
 
                    self.consume_string(b"-")?;
 
                    operation = UnaryOperation::PreDecrement;
 
                } else {
 
                    operation = UnaryOperation::Negative;
 
                }
 
            } else if self.has_string(b"~") {
 
                self.consume_string(b"~")?;
 
                operation = UnaryOperation::BitwiseNot;
 
            } else {
 
                self.consume_string(b"!")?;
 
                operation = UnaryOperation::LogicalNot;
 
            }
 
            self.consume_whitespace(false)?;
 
            if self.level >= MAX_LEVEL {
 
                return Err(self.error_at_pos("Too deeply nested expression"));
 
            }
 
            self.level += 1;
 
            let result = self.consume_prefix_expression(h);
 
            self.level -= 1;
 
            let expression = result?;
 
            return Ok(h
 
                .alloc_unary_expression(|this| UnaryExpression {
 
                    this,
 
                    position,
 
                    operation,
 
                    expression,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast());
 
        }
 
        self.consume_postfix_expression(h)
 
    }
 
    fn consume_postfix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_primary_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"++")
 
            || self.has_string(b"--")
 
            || self.has_string(b"[")
 
            || (self.has_string(b".") && !self.has_string(b".."))
 
        {
 
            let mut position = self.source.pos();
 
            if self.has_string(b"++") {
 
                self.consume_string(b"++")?;
 
                let operation = UnaryOperation::PostIncrement;
 
                let expression = result;
 
                self.consume_whitespace(false)?;
 
                result = h
 
                    .alloc_unary_expression(|this| UnaryExpression {
 
                        this,
 
                        position,
 
                        operation,
 
                        expression,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            } else if self.has_string(b"--") {
 
                self.consume_string(b"--")?;
 
                let operation = UnaryOperation::PostDecrement;
 
                let expression = result;
 
                self.consume_whitespace(false)?;
 
                result = h
 
                    .alloc_unary_expression(|this| UnaryExpression {
 
                        this,
 
                        position,
 
                        operation,
 
                        expression,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            } else if self.has_string(b"[") {
 
                self.consume_string(b"[")?;
 
                self.consume_whitespace(false)?;
 
                let subject = result;
 
                let index = self.consume_expression(h)?;
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b"..") || self.has_string(b":") {
 
                    position = self.source.pos();
 
                    if self.has_string(b"..") {
 
                        self.consume_string(b"..")?;
 
                    } else {
 
                        self.consume_string(b":")?;
 
                    }
 
                    self.consume_whitespace(false)?;
 
                    let to_index = self.consume_expression(h)?;
 
                    self.consume_whitespace(false)?;
 
                    result = h
 
                        .alloc_slicing_expression(|this| SlicingExpression {
 
                            this,
 
                            position,
 
                            subject,
 
                            from_index: index,
 
                            to_index,
 
                            parent: ExpressionParent::None,
 
                            concrete_type: ConcreteType::default(),
 
                        })
 
                        .upcast();
 
                } else {
 
                    result = h
 
                        .alloc_indexing_expression(|this| IndexingExpression {
 
                            this,
 
                            position,
 
                            subject,
 
                            index,
 
                            parent: ExpressionParent::None,
 
                            concrete_type: ConcreteType::default(),
 
                        })
 
                        .upcast();
 
                }
 
                self.consume_string(b"]")?;
 
                self.consume_whitespace(false)?;
 
            } else {
 
                assert!(self.has_string(b"."));
 
                self.consume_string(b".")?;
 
                self.consume_whitespace(false)?;
 
                let subject = result;
 
                let field;
 
                if self.has_keyword(b"length") {
 
                    self.consume_keyword(b"length")?;
 
                    field = Field::Length;
 
                } else {
 
                    let identifier = self.consume_identifier()?;
 
                    field = Field::Symbolic(FieldSymbolic{
 
                        identifier,
 
                        definition: None,
 
                        field_idx: 0,
 
                    });
 
                }
 
                result = h
 
                    .alloc_select_expression(|this| SelectExpression {
 
                        this,
 
                        position,
 
                        subject,
 
                        field,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            }
 
        }
 
        Ok(result)
 
    }
 
    fn consume_primary_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.has_string(b"(") {
 
            return self.consume_paren_expression(h);
 
        }
 
        if self.has_string(b"{") {
 
            return Ok(self.consume_array_expression(h)?.upcast());
 
        }
 
        if self.has_builtin_literal() {
 
            return Ok(self.consume_builtin_literal_expression(h)?.upcast());
 
        }
 
        if self.has_struct_literal() {
 
            return Ok(self.consume_struct_literal_expression(h)?.upcast());
 
        }
 
        if self.has_call_expression() {
 
            return Ok(self.consume_call_expression(h)?.upcast());
 
        }
 
        if self.has_enum_literal() {
 
            return Ok(self.consume_enum_literal(h)?.upcast());
 
        }
 
        Ok(self.consume_variable_expression(h)?.upcast())
 
    }
 
    fn consume_array_expression(&mut self, h: &mut Heap) -> Result<ArrayExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        let mut elements = Vec::new();
 
        self.consume_string(b"{")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b"}") {
 
            while self.source.next().is_some() {
 
                elements.push(self.consume_expression(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b"}") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?;
 
            }
 
        }
 
        self.consume_string(b"}")?;
 
        Ok(h.alloc_array_expression(|this| ArrayExpression {
 
            this,
 
            position,
 
            elements,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn has_builtin_literal(&self) -> bool {
 
        is_constant(self.source.next())
 
            || self.has_keyword(b"null")
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
    }
 
    fn consume_builtin_literal_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<LiteralExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        let value;
 
        if self.has_keyword(b"null") {
 
            self.consume_keyword(b"null")?;
 
            value = Literal::Null;
 
        } else if self.has_keyword(b"true") {
 
            self.consume_keyword(b"true")?;
 
            value = Literal::True;
 
        } else if self.has_keyword(b"false") {
 
            self.consume_keyword(b"false")?;
 
            value = Literal::False;
 
        } else if self.source.next() == Some(b'\'') {
 
            self.source.consume();
 
            let mut data = Vec::new();
 
            let mut next = self.source.next();
 
            while next != Some(b'\'') && (is_vchar(next) || next == Some(b' ')) {
 
                data.push(next.unwrap());
 
                self.source.consume();
 
                next = self.source.next();
 
            }
 
            if next != Some(b'\'') || data.is_empty() {
 
                return Err(self.error_at_pos("Expected character constant"));
 
            }
 
            self.source.consume();
 
            value = Literal::Character(data);
 
        } else {
 
            if !self.has_integer() {
 
                return Err(self.error_at_pos("Expected integer constant"));
 
            }
 

	
 
            value = Literal::Integer(self.consume_integer()?);
 
        }
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression {
 
            this,
 
            position,
 
            value,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 

	
 
    fn has_enum_literal(&mut self) -> bool {
 
        // An enum literal is always:
 
        //      maybe_a_namespace::EnumName<maybe_one_of_these>::Variant
 
        // So may for now be distinguished from other literals/variables by 
 
        // first checking for struct literals and call expressions, then for
 
        // enum literals, finally for variable expressions. It is different
 
        // from a variable expression in that it _always_ contains multiple
 
        // elements to the enum.
 
        let backup_pos = self.source.pos();
 
        let result = match self.consume_namespaced_identifier_spilled() {
 
            Ok(num_namespaces) => num_namespaces > 1,
 
            Err(_) => false,
 
        };
 
        self.source.seek(backup_pos);
 
        result
 
    }
 
    fn consume_enum_literal(&mut self, h: &mut Heap) -> Result<LiteralExpressionId, ParseError> {
 
        let identifier = self.consume_namespaced_identifier(h)?;
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression{
 
            this,
 
            position: identifier.position,
 
            value: Literal::Enum(LiteralEnum{
 
                identifier,
 
                poly_args2: Vec::new(),
 
                definition: None,
 
                variant_idx: 0,
 
            }),
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn has_struct_literal(&mut self) -> bool {
 
        // A struct literal is written as:
 
        //      namespace::StructName<maybe_one_of_these, auto>{ field: expr }
 
        // We will parse up until the opening brace to see if we're dealing with
 
        // a struct literal.
 
        let backup_pos = self.source.pos();
 
        let result = self.consume_namespaced_identifier_spilled().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.source.next() == Some(b'{');
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 

	
 
    fn consume_struct_literal_expression(&mut self, h: &mut Heap) -> Result<LiteralExpressionId, ParseError> {
 
        // Consume identifier and polymorphic arguments
 
        debug_log!("consume_struct_literal_expression: {}", debug_line!(self.source));
 
        let position = self.source.pos();
 
        let identifier = self.consume_namespaced_identifier(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume fields
 
        let fields = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected the end of the list of struct fields",
 
            |lexer, heap| {
 
                let identifier = lexer.consume_identifier()?;
 
                lexer.consume_whitespace(false)?;
 
                lexer.consume_string(b":")?;
 
                lexer.consume_whitespace(false)?;
 
                let value = lexer.consume_expression(heap)?;
 

	
 
                Ok(LiteralStructField{ identifier, value, field_idx: 0 })
 
            }
 
        )? {
 
            Some(fields) => fields,
 
            None => return Err(ParseError::new_error(
 
                self.source, self.source.pos(),
 
                "A struct literal must be followed by its field values"
 
            ))
 
        };
 

	
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression{
 
            this,
 
            position,
 
            value: Literal::Struct(LiteralStruct{
 
                identifier,
 
                fields,
 
                poly_args2: Vec::new(),
 
                definition: None,
 
            }),
 
            parent: ExpressionParent::None,
 
            concrete_type: Default::default()
 
        }))
 
    }
 

	
 
    fn has_call_expression(&mut self) -> bool {
 
        // We need to prevent ambiguity with various operators (because we may
 
        // be specifying polymorphic variables) and variables.
 
        if self.has_builtin_keyword() {
 
            return true;
 
        }
 

	
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 

	
 
        if self.consume_namespaced_identifier_spilled().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.source.next() == Some(b'(') {
 
            // Seems like we have a function call or an enum literal
 
            result = true;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_call_expression(&mut self, h: &mut Heap) -> Result<CallExpressionId, ParseError> {
 
        let position = self.source.pos();
 

	
 
        // Consume method identifier
 
        // TODO: @token Replace this conditional polymorphic arg parsing once we have a tokenizer.
 
        debug_log!("consume_call_expression: {}", debug_line!(self.source));
 
        let method;
 
        let mut consume_poly_args_explicitly = true;
 
        if self.has_keyword(b"get") {
 
            self.consume_keyword(b"get")?;
 
            method = Method::Get;
 
        } else if self.has_keyword(b"put") {
 
            self.consume_keyword(b"put")?;
 
            method = Method::Put;
 
        } else if self.has_keyword(b"fires") {
 
            self.consume_keyword(b"fires")?;
 
            method = Method::Fires;
 
        } else if self.has_keyword(b"create") {
 
            self.consume_keyword(b"create")?;
 
            method = Method::Create;
 
        } else {
 
            let identifier = self.consume_namespaced_identifier(h)?;
 
            method = Method::Symbolic(MethodSymbolic{
 
                identifier,
 
                definition: None
 
            });
 
            consume_poly_args_explicitly = false;
 
        };
 

	
 
        // Consume polymorphic arguments
 
        let poly_args = if consume_poly_args_explicitly {
 
            self.consume_whitespace(false)?;
 
            self.consume_polymorphic_args(h, true)?.unwrap_or_default()
 
        } else {
 
            Vec::new()
 
        };
 

	
 
        // Consume arguments to call
 
        self.consume_whitespace(false)?;
 
        let mut arguments = Vec::new();
 
        self.consume_string(b"(")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b")") {
 
            // TODO: allow trailing comma
 
            while self.source.next().is_some() {
 
                arguments.push(self.consume_expression(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b")") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?
 
            }
 
        }
 
        self.consume_string(b")")?;
 
        Ok(h.alloc_call_expression(|this| CallExpression {
 
            this,
 
            position,
 
            method,
 
            arguments,
 
            poly_args,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn consume_variable_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<VariableExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        debug_log!("consume_variable_expression: {}", debug_line!(self.source));
 

	
 
        // TODO: @token Reimplement when tokenizer is implemented, prevent ambiguities
 
        let identifier = identifier_as_namespaced(self.consume_identifier()?);
 

	
 
        Ok(h.alloc_variable_expression(|this| VariableExpression {
 
            this,
 
            position,
 
            identifier,
 
            declaration: None,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 

	
 
    // ====================
 
    // Statements
 
    // ====================
 

	
 
    /// Consumes any kind of statement from the source and will error if it
 
    /// did not encounter a statement. Will also return an error if the
 
    /// statement is nested too deeply.
 
    ///
 
    /// `wrap_in_block` may be set to true to ensure that the parsed statement
 
    /// will be wrapped in a block statement if it is not already a block
 
    /// statement. This is used to ensure that all `if`, `while` and `sync`
 
    /// statements have a block statement as body.
 
    fn consume_statement(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError> {
 
        if self.level >= MAX_LEVEL {
 
            return Err(self.error_at_pos("Too deeply nested statement"));
 
        }
 
        self.level += 1;
 
        let result = self.consume_statement_impl(h, wrap_in_block);
 
        self.level -= 1;
 
        result
 
    }
 
    fn has_label(&mut self) -> bool {
 
        // To prevent ambiguity with expression statements consisting only of an
 
        // identifier or a namespaced identifier, we look ahead and match on the
 
        // *single* colon that signals a labeled statement.
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 
        if self.consume_identifier_spilled().is_ok() {
 
            // next character is ':', second character is NOT ':'
 
            result = Some(b':') == self.source.next() && Some(b':') != self.source.lookahead(1)
 
        }
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_statement_impl(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError> {
 
        // Parse and allocate statement
 
        let mut must_wrap = true;
 
        let mut stmt_id = if self.has_string(b"{") {
 
            must_wrap = false;
 
            self.consume_block_statement(h)?
 
        } else if self.has_keyword(b"skip") {
 
            must_wrap = false;
 
            self.consume_skip_statement(h)?.upcast()
 
        } else if self.has_keyword(b"if") {
 
            self.consume_if_statement(h)?.upcast()
 
        } else if self.has_keyword(b"while") {
 
            self.consume_while_statement(h)?.upcast()
 
        } else if self.has_keyword(b"break") {
 
            self.consume_break_statement(h)?.upcast()
 
        } else if self.has_keyword(b"continue") {
 
            self.consume_continue_statement(h)?.upcast()
 
        } else if self.has_keyword(b"synchronous") {
 
            self.consume_synchronous_statement(h)?.upcast()
 
        } else if self.has_keyword(b"return") {
 
            self.consume_return_statement(h)?.upcast()
 
        } else if self.has_keyword(b"assert") {
 
            self.consume_assert_statement(h)?.upcast()
 
        } else if self.has_keyword(b"goto") {
 
            self.consume_goto_statement(h)?.upcast()
 
        } else if self.has_keyword(b"new") {
 
            self.consume_new_statement(h)?.upcast()
 
        } else if self.has_label() {
 
            self.consume_labeled_statement(h)?.upcast()
 
        } else {
 
            self.consume_expression_statement(h)?.upcast()
 
        };
 

	
 
        // Wrap if desired and if needed
 
        if must_wrap && wrap_in_block {
 
            let position = h[stmt_id].position();
 
            let block_wrapper = h.alloc_block_statement(|this| BlockStatement{
 
                this,
 
                position,
 
                statements: vec![stmt_id],
 
                parent_scope: None,
 
                relative_pos_in_parent: 0,
 
                locals: Vec::new(),
 
                labels: Vec::new()
 
            });
 

	
 
            stmt_id = block_wrapper.upcast();
 
        }
 

	
 
        Ok(stmt_id)
 
    }
 
    fn has_local_statement(&mut self) -> bool {
 
        /* To avoid ambiguity, we look ahead to find either the
 
        channel keyword that signals a variable declaration, or
 
        a type annotation followed by another identifier.
 
        Example:
 
          my_type[] x = {5}; // memory statement
 
          my_var[5] = x; // assignment expression, expression statement
 
        Note how both the local and the assignment
 
        start with arbitrary identifier followed by [. */
 
        if self.has_keyword(b"channel") {
 
            return true;
 
        }
 
        if self.has_statement_keyword() {
 
            return false;
 
        }
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 
        if self.maybe_consume_type_spilled_without_pos_recovery() {
 
            // We seem to have a valid type, do we now have an identifier?
 
            if self.consume_whitespace(true).is_ok() {
 
                result = self.has_identifier();
 
            }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_block_statement(&mut self, h: &mut Heap) -> Result<StatementId, ParseError> {
 
        let position = self.source.pos();
 
        let mut statements = Vec::new();
 
        self.consume_string(b"{")?;
 
        self.consume_whitespace(false)?;
 
        while self.has_local_statement() {
 
            let (local_id, stmt_id) = self.consume_local_statement(h)?;
 
            statements.push(local_id.upcast());
 
            if let Some(stmt_id) = stmt_id {
 
                statements.push(stmt_id.upcast());
 
            }
 
            self.consume_whitespace(false)?;
 
        }
 
        while !self.has_string(b"}") {
 
            statements.push(self.consume_statement(h, false)?);
 
            self.consume_whitespace(false)?;
 
        }
 
        self.consume_string(b"}")?;
 
        if statements.is_empty() {
 
            Ok(h.alloc_skip_statement(|this| SkipStatement { this, position, next: None }).upcast())
 
        } else {
 
            Ok(h.alloc_block_statement(|this| BlockStatement {
 
                this,
 
                position,
 
                statements,
 
                parent_scope: None,
 
                relative_pos_in_parent: 0,
 
                locals: Vec::new(),
 
                labels: Vec::new(),
 
            })
 
            .upcast())
 
        }
 
    }
 
    fn consume_local_statement(&mut self, h: &mut Heap) -> Result<(LocalStatementId, Option<ExpressionStatementId>), ParseError> {
 
        if self.has_keyword(b"channel") {
 
            let local_id = self.consume_channel_statement(h)?.upcast();
 
            Ok((local_id, None))
 
        } else {
 
            let (memory_id, stmt_id) = self.consume_memory_statement(h)?;
 
            Ok((memory_id.upcast(), Some(stmt_id)))
 
        }
 
    }
 
    fn consume_channel_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ChannelStatementId, ParseError> {
 
        // Consume channel statement and polymorphic argument if specified.
 
        // Needs a tiny bit of special parsing to ensure the right amount of
 
        // whitespace is present.
 
        let position = self.source.pos();
 
        self.consume_keyword(b"channel")?;
 

	
 
        let expect_whitespace = self.source.next() != Some(b'<');
 
        self.consume_whitespace(expect_whitespace)?;
 
        let poly_args = self.consume_polymorphic_args(h, true)?.unwrap_or_default();
 
        let poly_arg_id = match poly_args.len() {
 
            0 => h.alloc_parser_type(|this| ParserType{
 
                this, pos: position.clone(), variant: ParserTypeVariant::Inferred,
 
            }),
 
            1 => poly_args[0],
 
            _ => return Err(ParseError::new_error(
 
                &self.source, self.source.pos(),
 
                "port construction using 'channel' accepts up to 1 polymorphic argument"
 
            ))
 
        };
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume the output port
 
        let out_parser_type = h.alloc_parser_type(|this| ParserType{
 
            this, pos: position.clone(), variant: ParserTypeVariant::Output(poly_arg_id)
 
        });
 
        let out_identifier = self.consume_identifier()?;
 

	
 
        // Consume the "->" syntax
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b"->")?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume the input port
 
        let in_parser_type = h.alloc_parser_type(|this| ParserType{
 
            this, pos: position.clone(), variant: ParserTypeVariant::Input(poly_arg_id)
 
        });
 
        let in_identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        let out_port = h.alloc_local(|this| Local {
 
            this,
 
            position,
 
            parser_type: out_parser_type,
 
            identifier: out_identifier,
 
            relative_pos_in_block: 0
 
        });
 
        let in_port = h.alloc_local(|this| Local {
 
            this,
 
            position,
 
            parser_type: in_parser_type,
 
            identifier: in_identifier,
 
            relative_pos_in_block: 0
 
        });
 
        Ok(h.alloc_channel_statement(|this| ChannelStatement {
 
            this,
 
            position,
 
            from: out_port,
 
            to: in_port,
 
            relative_pos_in_block: 0,
 
            next: None,
 
        }))
 
    }
 
    fn consume_memory_statement(&mut self, h: &mut Heap) -> Result<(MemoryStatementId, ExpressionStatementId), ParseError> {
 
        let position = self.source.pos();
 
        let parser_type = self.consume_type(h, true)?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let assignment_position = self.source.pos();
 
        self.consume_string(b"=")?;
 
        self.consume_whitespace(false)?;
 
        let initial = self.consume_expression(h)?;
 
        let variable = h.alloc_local(|this| Local {
 
            this,
 
            position,
 
            parser_type,
 
            identifier: identifier.clone(),
 
            relative_pos_in_block: 0
 
        });
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 

	
 
        // Transform into the variable declaration, followed by an assignment
 
        let memory_stmt_id = h.alloc_memory_statement(|this| MemoryStatement {
 
            this,
 
            position,
 
            variable,
 
            next: None,
 
        });
 
        let variable_expr_id = h.alloc_variable_expression(|this| VariableExpression{
 
            this,
 
            position: identifier.position.clone(),
 
            identifier: identifier_as_namespaced(identifier),
 
            declaration: None,
 
            parent: ExpressionParent::None,
 
            concrete_type: Default::default()
 
        });
 
        let assignment_expr_id = h.alloc_assignment_expression(|this| AssignmentExpression{
 
            this,
 
            position: assignment_position,
 
            left: variable_expr_id.upcast(),
 
            operation: AssignmentOperator::Set,
 
            right: initial,
 
            parent: ExpressionParent::None,
 
            concrete_type: Default::default()
 
        });
 
        let assignment_stmt_id = h.alloc_expression_statement(|this| ExpressionStatement{
 
            this,
 
            position,
 
            expression: assignment_expr_id.upcast(),
 
            next: None
 
        });
 
        Ok((memory_stmt_id, assignment_stmt_id))
 
    }
 
    fn consume_labeled_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<LabeledStatementId, ParseError> {
 
        let position = self.source.pos();
 
        let label = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b":")?;
 
        self.consume_whitespace(false)?;
 
        let body = self.consume_statement(h, false)?;
 
        Ok(h.alloc_labeled_statement(|this| LabeledStatement {
 
            this,
 
            position,
 
            label,
 
            body,
 
            relative_pos_in_block: 0,
 
            in_sync: None,
 
        }))
 
    }
 
    fn consume_skip_statement(&mut self, h: &mut Heap) -> Result<SkipStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"skip")?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_skip_statement(|this| SkipStatement { this, position, next: None }))
 
    }
 
    fn consume_if_statement(&mut self, h: &mut Heap) -> Result<IfStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"if")?;
 
        self.consume_whitespace(false)?;
 
        let test = self.consume_paren_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        let true_body = self.consume_statement(h, true)?;
 
        self.consume_whitespace(false)?;
 
        let false_body = if self.has_keyword(b"else") {
 
            self.consume_keyword(b"else")?;
 
            self.consume_whitespace(false)?;
 
            self.consume_statement(h, true)?
 
        } else {
 
            h.alloc_skip_statement(|this| SkipStatement { this, position, next: None }).upcast()
 
        };
 
        Ok(h.alloc_if_statement(|this| IfStatement { this, position, test, true_body, false_body, end_if: None }))
 
    }
 
    fn consume_while_statement(&mut self, h: &mut Heap) -> Result<WhileStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"while")?;
 
        self.consume_whitespace(false)?;
 
        let test = self.consume_paren_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        let body = self.consume_statement(h, true)?;
 
        Ok(h.alloc_while_statement(|this| WhileStatement {
 
            this,
 
            position,
 
            test,
 
            body,
 
            end_while: None,
 
            in_sync: None,
 
        }))
 
    }
 
    fn consume_break_statement(&mut self, h: &mut Heap) -> Result<BreakStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"break")?;
 
        self.consume_whitespace(false)?;
 
        let label;
 
        if self.has_identifier() {
 
            label = Some(self.consume_identifier()?);
 
            self.consume_whitespace(false)?;
 
        } else {
 
            label = None;
 
        }
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_break_statement(|this| BreakStatement { this, position, label, target: None }))
 
    }
 
    fn consume_continue_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ContinueStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"continue")?;
 
        self.consume_whitespace(false)?;
 
        let label;
 
        if self.has_identifier() {
 
            label = Some(self.consume_identifier()?);
 
            self.consume_whitespace(false)?;
 
        } else {
 
            label = None;
 
        }
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_continue_statement(|this| ContinueStatement {
 
            this,
 
            position,
 
            label,
 
            target: None,
 
        }))
 
    }
 
    fn consume_synchronous_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<SynchronousStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"synchronous")?;
 
        self.consume_whitespace(false)?;
 
        // TODO: What was the purpose of this? Seems superfluous and confusing?
 
        // let mut parameters = Vec::new();
 
        // if self.has_string(b"(") {
 
        //     self.consume_parameters(h, &mut parameters)?;
 
        //     self.consume_whitespace(false)?;
 
        // } else if !self.has_keyword(b"skip") && !self.has_string(b"{") {
 
        //     return Err(self.error_at_pos("Expected block statement"));
 
        // }
 
        let body = self.consume_statement(h, true)?;
 
        Ok(h.alloc_synchronous_statement(|this| SynchronousStatement {
 
            this,
 
            position,
 
            body,
 
            end_sync: None,
 
            parent_scope: None,
 
        }))
 
    }
 
    fn consume_return_statement(&mut self, h: &mut Heap) -> Result<ReturnStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"return")?;
 
        self.consume_whitespace(false)?;
 
        let expression = if self.has_string(b"(") {
 
            self.consume_paren_expression(h)
 
        } else {
 
            self.consume_expression(h)
 
        }?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_return_statement(|this| ReturnStatement { this, position, expression }))
 
    }
 
    fn consume_assert_statement(&mut self, h: &mut Heap) -> Result<AssertStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"assert")?;
 
        self.consume_whitespace(false)?;
 
        let expression = if self.has_string(b"(") {
 
            self.consume_paren_expression(h)
 
        } else {
 
            self.consume_expression(h)
 
        }?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_assert_statement(|this| AssertStatement {
 
            this,
 
            position,
 
            expression,
 
            next: None,
 
        }))
 
    }
 
    fn consume_goto_statement(&mut self, h: &mut Heap) -> Result<GotoStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"goto")?;
 
        self.consume_whitespace(false)?;
 
        let label = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_goto_statement(|this| GotoStatement { this, position, label, target: None }))
 
    }
 
    fn consume_new_statement(&mut self, h: &mut Heap) -> Result<NewStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"new")?;
 
        self.consume_whitespace(false)?;
 
        let expression = self.consume_call_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_new_statement(|this| NewStatement { this, position, expression, next: None }))
 
    }
 
    fn consume_expression_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ExpressionStatementId, ParseError> {
 
        let position = self.source.pos();
 
        let expression = self.consume_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_expression_statement(|this| ExpressionStatement {
 
            this,
 
            position,
 
            expression,
 
            next: None,
 
        }))
 
    }
 

	
 
    // ====================
 
    // Symbol definitions
 
    // ====================
 

	
 
    fn has_symbol_definition(&self) -> bool {
 
        self.has_keyword(b"composite")
 
            || self.has_keyword(b"primitive")
 
            || self.has_type_keyword()
 
            || self.has_identifier()
 
    }
 
    fn consume_symbol_definition(&mut self, h: &mut Heap) -> Result<DefinitionId, ParseError> {
 
        if self.has_keyword(b"struct") {
 
            Ok(self.consume_struct_definition(h)?.upcast())
 
        } else if self.has_keyword(b"enum") {
 
            Ok(self.consume_enum_definition(h)?.upcast())
 
        } else if self.has_keyword(b"composite") || self.has_keyword(b"primitive") {
 
            Ok(self.consume_component_definition(h)?.upcast())
 
        } else {
 
            Ok(self.consume_function_definition(h)?.upcast())
 
        }
 
    }
 
    fn consume_struct_definition(&mut self, h: &mut Heap) -> Result<StructId, ParseError> {
 
        // Parse "struct" keyword, optional polyvars and its identifier
 
        let struct_pos = self.source.pos();
 
        self.consume_keyword(b"struct")?;
 
        self.consume_whitespace(true)?;
 
        let struct_ident = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Parse struct fields
 
        let fields = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected the end of the list of struct fields",
 
            |lexer, heap| {
 
                let position = lexer.source.pos();
 
                let parser_type = lexer.consume_type(heap, false)?;
 
                lexer.consume_whitespace(true)?;
 
                let field = lexer.consume_identifier()?;
 

	
 
                Ok(StructFieldDefinition{ position, field, parser_type })
 
            }
 
        )? {
 
            Some(fields) => fields,
 
            None => return Err(ParseError::new_error(
 
                self.source, struct_pos,
 
                "An struct definition must be followed by its fields"
 
            )),
 
        };
 

	
 
        // Valid struct definition
 
        Ok(h.alloc_struct_definition(|this| StructDefinition{
 
            this,
 
            position: struct_pos,
 
            identifier: struct_ident,
 
            poly_vars,
 
            fields,
 
        }))
 
    }
 
    fn consume_enum_definition(&mut self, h: &mut Heap) -> Result<EnumId, ParseError> {
 
        // Parse "enum" keyword, optional polyvars and its identifier
 
        let enum_pos = self.source.pos();
 
        self.consume_keyword(b"enum")?;
 
        self.consume_whitespace(true)?;
 
        let enum_ident = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        let variants = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected end of enum variant list",
 
            |lexer, heap| {
 
                // Variant identifier
 
                let position = lexer.source.pos();
 
                let identifier = lexer.consume_identifier()?;
 
                lexer.consume_whitespace(false)?;
 

	
 
                // Optional variant value/type
 
                let next = lexer.source.next();
 
                let value = match next {
 
                    Some(b',') => {
 
                        // Do not consume, let `consume_comma_separated` handle
 
                        // the next item
 
                        EnumVariantValue::None
 
                    },
 
                    Some(b'=') => {
 
                        // Integer value
 
                        lexer.source.consume();
 
                        lexer.consume_whitespace(false)?;
 
                        if !lexer.has_integer() {
 
                            return Err(lexer.error_at_pos("expected integer"))
 
                        }
 
                        let value = lexer.consume_integer()?;
 
                        EnumVariantValue::Integer(value)
 
                    },
 
                    Some(b'(') => {
 
                        // Embedded type
 
                        lexer.source.consume();
 
                        lexer.consume_whitespace(false)?;
 
                        let embedded_type = lexer.consume_type(heap, false)?;
 
                        lexer.consume_whitespace(false)?;
 
                        lexer.consume_string(b")")?;
 
                        EnumVariantValue::Type(embedded_type)
 
                    },
 
                    Some(b'}') => {
 
                        // End of enum
 
                        EnumVariantValue::None
 
                    }
 
                    _ => {
 
                        return Err(lexer.error_at_pos("Expected ',', '=', or '('"));
 
                        return Err(lexer.error_at_pos("Expected ',', '=', '}' or '('"));
 
                    }
 
                };
 

	
 
                Ok(EnumVariantDefinition{ position, identifier, value })
 
            }
 
        )? {
 
            Some(variants) => variants,
 
            None => return Err(ParseError::new_error(
 
                self.source, enum_pos,
 
                "An enum definition must be followed by its variants"
 
            )),
 
        };
 

	
 
        Ok(h.alloc_enum_definition(|this| EnumDefinition{
 
            this,
 
            position: enum_pos,
 
            identifier: enum_ident,
 
            poly_vars,
 
            variants,
 
        }))
 
    }
 
    fn consume_component_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // TODO: Cleanup
 
        if self.has_keyword(b"composite") {
 
            Ok(self.consume_composite_definition(h)?)
 
        } else {
 
            Ok(self.consume_primitive_definition(h)?)
 
        }
 
    }
 
    fn consume_composite_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // Parse keyword, optional polyvars and the identifier
 
        let position = self.source.pos();
 
        self.consume_keyword(b"composite")?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Parse body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_component(|this| Component { 
 
            this,
 
            variant: ComponentVariant::Composite,
 
            position,
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body
 
        }))
 
    }
 
    fn consume_primitive_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // Consume keyword, optional polyvars and identifier
 
        let position = self.source.pos();
 
        self.consume_keyword(b"primitive")?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_component(|this| Component { 
 
            this,
 
            variant: ComponentVariant::Primitive,
 
            position,
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body
 
        }))
 
    }
 
    fn consume_function_definition(&mut self, h: &mut Heap) -> Result<FunctionId, ParseError> {
 
        // Consume return type, optional polyvars and identifier
 
        let position = self.source.pos();
 
        let return_type = self.consume_type(h, false)?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_function(|this| Function {
 
            this,
 
            position,
 
            return_type,
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body,
 
        }))
 
    }
 
    fn has_pragma(&self) -> bool {
 
        if let Some(c) = self.source.next() {
 
            c == b'#'
 
        } else {
 
            false
 
        }
 
    }
 
    fn consume_pragma(&mut self, h: &mut Heap) -> Result<PragmaId, ParseError> {
 
        let position = self.source.pos();
 
        let next = self.source.next();
 
        if next != Some(b'#') {
 
            return Err(self.error_at_pos("Expected pragma"));
 
        }
 
        self.source.consume();
 
        if !is_vchar(self.source.next()) {
 
            return Err(self.error_at_pos("Expected pragma"));
 
        }
 
        if self.has_string(b"version") {
 
            self.consume_string(b"version")?;
 
            self.consume_whitespace(true)?;
 
            if !self.has_integer() {
 
                return Err(self.error_at_pos("Expected integer constant"));
 
            }
 
            let version = self.consume_integer()?;
 
            debug_assert!(version >= 0);
 
            return Ok(h.alloc_pragma(|this| Pragma::Version(PragmaVersion{
 
                this, position, version: version as u64
 
            })))
 
        } else if self.has_string(b"module") {
 
            self.consume_string(b"module")?;
 
            self.consume_whitespace(true)?;
 
            if !self.has_identifier() {
 
                return Err(self.error_at_pos("Expected identifier"));
 
            }
 
            let mut value = Vec::new();
 
            let mut ident = self.consume_ident()?;
 
            value.append(&mut ident);
 
            while self.has_string(b".") {
 
                self.consume_string(b".")?;
 
                value.push(b'.');
 
                ident = self.consume_ident()?;
 
                value.append(&mut ident);
 
            }
 
            return Ok(h.alloc_pragma(|this| Pragma::Module(PragmaModule{
 
                this, position, value
 
            })));
 
        } else {
 
            return Err(self.error_at_pos("Unknown pragma"));
 
        }
 
    }
 

	
 
    fn has_import(&self) -> bool {
 
        self.has_keyword(b"import")
 
    }
 
    fn consume_import(&mut self, h: &mut Heap) -> Result<ImportId, ParseError> {
 
        // Parse the word "import" and the name of the module
 
        let position = self.source.pos();
 
        self.consume_keyword(b"import")?;
 
        self.consume_whitespace(true)?;
 
        let mut value = Vec::new();
 
        let mut last_ident_pos = self.source.pos();
 
        let mut ident = self.consume_ident()?;
 
        value.append(&mut ident);
 
        let mut last_ident_start = 0;
 

	
 
        while self.has_string(b".") {
 
            self.consume_string(b".")?;
 
            value.push(b'.');
 
            last_ident_pos = self.source.pos();
 
            ident = self.consume_ident()?;
 
            last_ident_start = value.len();
 
            value.append(&mut ident);
 
        }
 

	
 

	
 
        self.consume_whitespace(false)?;
 

	
 
        // Check for the potential aliasing or specific module imports
 
        let import = if self.has_string(b"as") {
 
            self.consume_string(b"as")?;
 
            self.consume_whitespace(true)?;
 
            let alias = self.consume_identifier()?;
 

	
 
            h.alloc_import(|this| Import::Module(ImportModule{
 
                this,
 
                position,
 
                module_name: value,
 
                alias,
 
                module_id: None,
 
            }))
 
        } else if self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_whitespace(false)?;
 

	
 
            let next = self.source.next();
 
            if Some(b'{') == next {
 
                let symbols = match self.consume_comma_separated(
 
                    h, b'{', b'}', "Expected end of import list",
 
                    |lexer, _heap| {
 
                        // Symbol name
 
                        let position = lexer.source.pos();
 
                        let name = lexer.consume_identifier()?;
 
                        lexer.consume_whitespace(false)?;
 

	
 
                        // Symbol alias
 
                        if lexer.has_string(b"as") {
 
                            // With alias
 
                            lexer.consume_string(b"as")?;
 
                            lexer.consume_whitespace(true)?;
 
                            let alias = lexer.consume_identifier()?;
 

	
 
                            Ok(AliasedSymbol{
 
                                position,
 
                                name,
 
                                alias,
 
                                definition_id: None
 
                            })
 
                        } else {
 
                            // Without alias
 
                            Ok(AliasedSymbol{
 
                                position,
 
                                name: name.clone(),
 
                                alias: name,
 
                                definition_id: None
 
                            })
 
                        }
 
                    }
 
                )? {
 
                    Some(symbols) => symbols,
 
                    None => unreachable!(), // because we checked for opening '{'
 
                };
 

	
 
                h.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    position,
 
                    module_name: value,
 
                    module_id: None,
 
                    symbols,
 
                }))
 
            } else if Some(b'*') == next {
 
                self.source.consume();
 
                h.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    position,
 
                    module_name: value,
 
                    module_id: None,
 
                    symbols: Vec::new()
 
                }))
 
            } else if self.has_identifier() {
 
                let position = self.source.pos();
 
                let name = self.consume_identifier()?;
 
                self.consume_whitespace(false)?;
 
                let alias = if self.has_string(b"as") {
 
                    self.consume_string(b"as")?;
 
                    self.consume_whitespace(true)?;
 
                    self.consume_identifier()?
 
                } else {
 
                    name.clone()
 
                };
 

	
 
                h.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    position,
 
                    module_name: value,
 
                    module_id: None,
 
                    symbols: vec![AliasedSymbol{
 
                        position,
 
                        name,
 
                        alias,
 
                        definition_id: None
 
                    }]
 
                }))
 
            } else {
 
                return Err(self.error_at_pos("Expected '*', '{' or a symbol name"));
 
            }
 
        } else {
 
            // No explicit alias or subimports, so implicit alias
 
            let alias_value = Vec::from(&value[last_ident_start..]);
 
            h.alloc_import(|this| Import::Module(ImportModule{
 
                this,
 
                position,
 
                module_name: value,
 
                alias: Identifier{
 
                    position: last_ident_pos,
 
                    value: Vec::from(alias_value),
 
                },
 
                module_id: None,
 
            }))
 
        };
 

	
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(import)
 
    }
 
    pub fn consume_protocol_description(&mut self, h: &mut Heap) -> Result<RootId, ParseError> {
 
        let position = self.source.pos();
 
        let mut pragmas = Vec::new();
 
        let mut imports = Vec::new();
 
        let mut definitions = Vec::new();
 
        self.consume_whitespace(false)?;
 
        while self.has_pragma() {
 
            let pragma = self.consume_pragma(h)?;
 
            pragmas.push(pragma);
 
            self.consume_whitespace(false)?;
 
        }
 
        while self.has_import() {
 
            let import = self.consume_import(h)?;
 
            imports.push(import);
 
            self.consume_whitespace(false)?;
 
        }
 
        while self.has_symbol_definition() {
 
            let def = self.consume_symbol_definition(h)?;
 
            definitions.push(def);
 
            self.consume_whitespace(false)?;
 
        }
 
        // end of file
 
        if !self.source.is_eof() {
 
            return Err(self.error_at_pos("Expected end of file"));
 
        }
 
        Ok(h.alloc_protocol_description(|this| Root {
 
            this,
 
            position,
 
            pragmas,
 
            imports,
 
            definitions,
 
        }))
 
    }
 
}
src/protocol/parser/mod.rs
Show inline comments
 
mod depth_visitor;
 
pub(crate) mod symbol_table;
 
pub(crate) mod type_table;
 
mod type_resolver;
 
mod visitor;
 
mod visitor_linker;
 
mod utils;
 

	
 
use depth_visitor::*;
 
use symbol_table::SymbolTable;
 
use visitor::Visitor2;
 
use visitor_linker::ValidityAndLinkerVisitor;
 
use type_resolver::{TypeResolvingVisitor, ResolveQueue};
 
use type_table::{TypeTable, TypeCtx};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::lexer::*;
 

	
 
use std::collections::HashMap;
 
use crate::protocol::ast_printer::ASTWriter;
 

	
 
// TODO: @fixme, pub qualifier
 
pub(crate) struct LexedModule {
 
    pub(crate) source: InputSource,
 
    module_name: Vec<u8>,
 
    version: Option<u64>,
 
    pub(crate) root_id: RootId,
 
}
 

	
 
pub struct Parser {
 
    pub(crate) heap: Heap,
 
    pub(crate) modules: Vec<LexedModule>,
 
    pub(crate) module_lookup: HashMap<Vec<u8>, usize>, // from (optional) module name to `modules` idx
 
    pub(crate) symbol_table: SymbolTable,
 
    pub(crate) type_table: TypeTable,
 
}
 

	
 
impl Parser {
 
    pub fn new() -> Self {
 
        Parser{
 
            heap: Heap::new(),
 
            modules: Vec::new(),
 
            module_lookup: HashMap::new(),
 
            symbol_table: SymbolTable::new(),
 
            type_table: TypeTable::new(),
 
        }
 
    }
 

	
 
    pub fn feed(&mut self, mut source: InputSource) -> Result<RootId, ParseError> {
 
        // Lex the input source
 
        let mut lex = Lexer::new(&mut source);
 
        let pd = lex.consume_protocol_description(&mut self.heap)?;
 

	
 
        // Seek the module name and version
 
        let root = &self.heap[pd];
 
        let mut module_name_pos = InputPosition::default();
 
        let mut module_name = Vec::new();
 
        let mut module_version_pos = InputPosition::default();
 
        let mut module_version = None;
 

	
 
        for pragma in &root.pragmas {
 
            match &self.heap[*pragma] {
 
                Pragma::Module(module) => {
 
                    if !module_name.is_empty() {
 
                        return Err(
 
                            ParseError::new_error(&source, module.position, "Double definition of module name in the same file")
 
                                .with_postfixed_info(&source, module_name_pos, "Previous definition was here")
 
                        )
 
                    }
 

	
 
                    module_name_pos = module.position.clone();
 
                    module_name = module.value.clone();
 
                },
 
                Pragma::Version(version) => {
 
                    if module_version.is_some() {
 
                        return Err(
 
                            ParseError::new_error(&source, version.position, "Double definition of module version")
 
                                .with_postfixed_info(&source, module_version_pos, "Previous definition was here")
 
                        )
 
                    }
 

	
 
                    module_version_pos = version.position.clone();
 
                    module_version = Some(version.version);
 
                },
 
            }
 
        }
 

	
 
        // Add module to list of modules and prevent naming conflicts
 
        let cur_module_idx = self.modules.len();
 
        if let Some(prev_module_idx) = self.module_lookup.get(&module_name) {
 
            // Find `#module` statement in other module again
 
            let prev_module = &self.modules[*prev_module_idx];
 
            let prev_module_pos = self.heap[prev_module.root_id].pragmas
 
                .iter()
 
                .find_map(|p| {
 
                    match &self.heap[*p] {
 
                        Pragma::Module(module) => Some(module.position.clone()),
 
                        _ => None
 
                    }
 
                })
 
                .unwrap_or(InputPosition::default());
 

	
 
            let module_name_msg = if module_name.is_empty() {
 
                format!("a nameless module")
 
            } else {
 
                format!("module '{}'", String::from_utf8_lossy(&module_name))
 
            };
 

	
 
            return Err(
 
                ParseError::new_error(&source, module_name_pos, &format!("Double definition of {} across files", module_name_msg))
 
                    .with_postfixed_info(&prev_module.source, prev_module_pos, "Other definition was here")
 
            );
 
        }
 

	
 
        self.modules.push(LexedModule{
 
            source,
 
            module_name: module_name.clone(),
 
            version: module_version,
 
            root_id: pd
 
        });
 
        self.module_lookup.insert(module_name, cur_module_idx);
 
        Ok(pd)
 
    }
 

	
 
    fn resolve_symbols_and_types(&mut self) -> Result<(), ParseError> {
 
        // Construct the symbol table to resolve any imports and/or definitions,
 
        // then use the symbol table to actually annotate all of the imports.
 
        // If the type table is constructed correctly then all imports MUST be
 
        // resolvable.
 
        self.symbol_table.build(&self.heap, &self.modules)?;
 

	
 
        // Not pretty, but we need to work around rust's borrowing rules, it is
 
        // totally safe to mutate the contents of an AST element that we are
 
        // not borrowing anywhere else.
 
        let mut module_index = 0;
 
        let mut import_index = 0;
 
        loop {
 
            if module_index >= self.modules.len() {
 
                break;
 
            }
 

	
 
            let module_root_id = self.modules[module_index].root_id;
 
            let import_id = {
 
                let root = &self.heap[module_root_id];
 
                if import_index >= root.imports.len() {
 
                    module_index += 1;
 
                    import_index = 0;
 
                    continue
 
                }
 
                root.imports[import_index]
 
            };
 

	
 
            let import = &mut self.heap[import_id];
 
            match import {
 
                Import::Module(import) => {
 
                    debug_assert!(import.module_id.is_none(), "module import already resolved");
 
                    let target_module_id = self.symbol_table.resolve_module(&import.module_name)
 
                        .expect("module import is resolved by symbol table");
 
                    import.module_id = Some(target_module_id)
 
                },
 
                Import::Symbols(import) => {
 
                    debug_assert!(import.module_id.is_none(), "module of symbol import already resolved");
 
                    let target_module_id = self.symbol_table.resolve_module(&import.module_name)
 
                        .expect("symbol import's module is resolved by symbol table");
 
                    import.module_id = Some(target_module_id);
 

	
 
                    for symbol in &mut import.symbols {
 
                        debug_assert!(symbol.definition_id.is_none(), "symbol import already resolved");
 
                        let (_, target_definition_id) = self.symbol_table.resolve_identifier(module_root_id, &symbol.alias)
 
                            .expect("symbol import is resolved by symbol table")
 
                            .as_definition()
 
                            .expect("symbol import does not resolve to namespace symbol");
 
                        symbol.definition_id = Some(target_definition_id);
 
                    }
 
                }
 
            }
 

	
 
            import_index += 1;
 
        }
 

	
 
        // All imports in the AST are now annotated. We now use the symbol table
 
        // to construct the type table.
 
        let mut type_ctx = TypeCtx::new(&self.symbol_table, &mut self.heap, &self.modules);
 
        self.type_table.build_base_types(&mut type_ctx)?;
 

	
 
        Ok(())
 
    }
 

	
 
    pub fn parse(&mut self) -> Result<(), ParseError> {
 
        self.resolve_symbols_and_types()?;
 

	
 
        // Validate and link all modules
 
        let mut visit = ValidityAndLinkerVisitor::new();
 
        for module in &self.modules {
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            visit.visit_module(&mut ctx)?;
 
        }
 

	
 
        // Perform typechecking on all modules
 
        let mut visit = TypeResolvingVisitor::new();
 
        let mut queue = ResolveQueue::new();
 
        for module in &self.modules {
 
            let ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            TypeResolvingVisitor::queue_module_definitions(&ctx, &mut queue);   
 
        };
 
        while !queue.is_empty() {
 
            let top = queue.pop().unwrap();
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module: &self.modules[top.root_id.index as usize],
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            visit.handle_module_definition(&mut ctx, &mut queue, top)?;
 
        }
 

	
 
        // Perform remaining steps
 
        // TODO: Phase out at some point
 
        for module in &self.modules {
 
            let root_id = module.root_id;
 
            if let Err((position, message)) = Self::parse_inner(&mut self.heap, root_id) {
 
                return Err(ParseError::new_error(&self.modules[0].source, position, &message))
 
            }
 
        }
 

	
 
        // let mut writer = ASTWriter::new();
 
        // let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap();
 
        // writer.write_ast(&mut file, &self.heap);
 
        let mut writer = ASTWriter::new();
 
        let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap();
 
        writer.write_ast(&mut file, &self.heap);
 

	
 
        Ok(())
 
    }
 

	
 
    pub fn parse_inner(h: &mut Heap, pd: RootId) -> VisitorResult {
 
        // TODO: @cleanup, slowly phasing out old compiler
 
        // NestedSynchronousStatements::new().visit_protocol_description(h, pd)?;
 
        // ChannelStatementOccurrences::new().visit_protocol_description(h, pd)?;
 
        // FunctionStatementReturns::new().visit_protocol_description(h, pd)?;
 
        // ComponentStatementReturnNew::new().visit_protocol_description(h, pd)?;
 
        // CheckBuiltinOccurrences::new().visit_protocol_description(h, pd)?;
 
        // BuildSymbolDeclarations::new().visit_protocol_description(h, pd)?;
 
        // LinkCallExpressions::new().visit_protocol_description(h, pd)?;
 
        // BuildScope::new().visit_protocol_description(h, pd)?;
 
        // ResolveVariables::new().visit_protocol_description(h, pd)?;
 
        LinkStatements::new().visit_protocol_description(h, pd)?;
 
        // BuildLabels::new().visit_protocol_description(h, pd)?;
 
        // ResolveLabels::new().visit_protocol_description(h, pd)?;
 
        AssignableExpressions::new().visit_protocol_description(h, pd)?;
 
        IndexableExpressions::new().visit_protocol_description(h, pd)?;
 
        SelectableExpressions::new().visit_protocol_description(h, pd)?;
 

	
 
        Ok(())
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/type_resolver.rs
Show inline comments
 
/// type_resolver.rs
 
///
 
/// Performs type inference and type checking. Type inference is implemented by
 
/// applying constraints on (sub)trees of types. During this process the
 
/// resolver takes the `ParserType` structs (the representation of the types
 
/// written by the programmer), converts them to `InferenceType` structs (the
 
/// temporary data structure used during type inference) and attempts to arrive
 
/// at `ConcreteType` structs (the representation of a fully checked and
 
/// validated type).
 
///
 
/// The resolver will visit every statement and expression relevant to the
 
/// procedure and insert and determine its initial type based on context (e.g. a
 
/// return statement's expression must match the function's return type, an
 
/// if statement's test expression must evaluate to a boolean). When all are
 
/// visited we attempt to make progress in evaluating the types. Whenever a type
 
/// is progressed we queue the related expressions for further type progression.
 
/// Once no more expressions are in the queue the algorithm is finished. At this
 
/// point either all types are inferred (or can be trivially implicitly
 
/// determined), or we have incomplete types. In the latter casee we return an
 
/// error.
 
///
 
/// Inference may be applied on non-polymorphic procedures and on polymorphic
 
/// procedures. When dealing with a non-polymorphic procedure we apply the type
 
/// resolver and annotate the AST with the `ConcreteType`s. When dealing with
 
/// polymorphic procedures we will only annotate the AST once, preserving
 
/// references to polymorphic variables. Any later pass will perform just the
 
/// type checking.
 
///
 
/// TODO: Needs a thorough rewrite:
 
///  0. polymorph_progress is intentionally broken at the moment.
 
///  1. For polymorphic type inference we need to have an extra datastructure
 
///     for progressing the polymorphic variables and mapping them back to each
 
///     signature type that uses that polymorphic type. The two types of markers
 
///     became somewhat of a mess.
 
///  2. We're doing a lot of extra work. It seems better to apply the initial
 
///     type based on expression parents, then to apply forced constraints (arg
 
///     to a fires() call must be port-like), only then to start progressing the
 
///     types.
 
///     Furthermore, queueing of expressions can be more intelligent, currently
 
///     every child/parent of an expression is inferred again when queued. Hence
 
///     we need to queue only specific children/parents of expressions.
 
///  3. Remove the `msg` type?
 
///  4. Disallow certain types in certain operations (e.g. `Void`).
 
///  5. Implement implicit and explicit casting.
 
///  6. Investigate different ways of performing the type-on-type inference,
 
///     maybe there is a better way then flattened trees + markers?
 

	
 
macro_rules! debug_log {
 
    ($format:literal) => {
 
        enabled_debug_print!(true, "types", $format);
 
    };
 
    ($format:literal, $($args:expr),*) => {
 
        enabled_debug_print!(true, "types", $format, $($args),*);
 
    };
 
}
 

	
 
use std::collections::{HashMap, HashSet, VecDeque};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::parser::type_table::*;
 
use super::visitor::{
 
    STMT_BUFFER_INIT_CAPACITY,
 
    EXPR_BUFFER_INIT_CAPACITY,
 
    Ctx,
 
    Visitor2,
 
    VisitorResult
 
};
 
use std::collections::hash_map::Entry;
 

	
 
const MESSAGE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Message, InferenceTypePart::Byte ];
 
const BOOL_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Bool ];
 
const NUMBERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::NumberLike ];
 
const INTEGERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::IntegerLike ];
 
const ARRAY_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Array, InferenceTypePart::Unknown ];
 
const ARRAYLIKE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::ArrayLike, InferenceTypePart::Unknown ];
 

	
 
/// TODO: @performance Turn into PartialOrd+Ord to simplify checks
 
/// TODO: @types Remove the Message -> Byte hack at some point...
 
#[derive(Debug, Clone, Eq, PartialEq)]
 
pub(crate) enum InferenceTypePart {
 
    // A marker with an identifier which we can use to retrieve the type subtree
 
    // that follows the marker. This is used to perform type inference on
 
    // polymorphs: an expression may determine the polymorphs type, after we
 
    // need to apply that information to all other places where the polymorph is
 
    // used.
 
    MarkerDefinition(usize), // marker for polymorph types on a procedure's definition
 
    MarkerBody(usize), // marker for polymorph types within a procedure body
 
    // Completely unknown type, needs to be inferred
 
    Unknown,
 
    // Partially known type, may be inferred to to be the appropriate related 
 
    // type.
 
    // IndexLike,      // index into array/slice
 
    NumberLike,     // any kind of integer/float
 
    IntegerLike,    // any kind of integer
 
    ArrayLike,      // array or slice. Note that this must have a subtype
 
    PortLike,       // input or output port
 
    // Special types that cannot be instantiated by the user
 
    Void, // For builtin functions that do not return anything
 
    // Concrete types without subtypes
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    // One subtype
 
    Message,
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // A user-defined type with any number of subtypes
 
    Instance(DefinitionId, usize)
 
}
 

	
 
impl InferenceTypePart {
 
    fn is_marker(&self) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        match self {
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    /// Checks if the type is concrete, markers are interpreted as concrete
 
    /// types.
 
    fn is_concrete(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | 
 
            ITP::ArrayLike | ITP::PortLike => false,
 
            _ => true
 
        }
 
    }
 

	
 
    fn is_concrete_number(&self) -> bool {
 
        // TODO: @float
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_integer(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_msg_array_or_slice(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Array | ITP::Slice | ITP::Message => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_port(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Input | ITP::Output => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    /// Checks if a part is less specific than the argument. Only checks for 
 
    /// single-part inference (i.e. not the replacement of an `Unknown` variant 
 
    /// with the argument)
 
    fn may_be_inferred_from(&self, arg: &InferenceTypePart) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        (*self == ITP::IntegerLike && arg.is_concrete_integer()) ||
 
        (*self == ITP::NumberLike && (arg.is_concrete_number() || *arg == ITP::IntegerLike)) ||
 
        (*self == ITP::ArrayLike && arg.is_concrete_msg_array_or_slice()) ||
 
        (*self == ITP::PortLike && arg.is_concrete_port())
 
    }
 

	
 
    /// Returns the change in "iteration depth" when traversing this particular
 
    /// part. The iteration depth is used to traverse the tree in a linear 
 
    /// fashion. It is basically `number_of_subtypes - 1`
 
    fn depth_change(&self) -> i32 {
 
        use InferenceTypePart as ITP;
 
        match &self {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike |
 
            ITP::Void | ITP::Bool |
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long | 
 
            ITP::String => {
 
                -1
 
            },
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) |
 
            ITP::ArrayLike | ITP::Message | ITP::Array | ITP::Slice |
 
            ITP::PortLike | ITP::Input | ITP::Output => {
 
                // One subtype, so do not modify depth
 
                0
 
            },
 
            ITP::Instance(_, num_args) => {
 
                (*num_args as i32) - 1
 
            }
 
        }
 
    }
 
}
 

	
 
impl From<ConcreteTypePart> for InferenceTypePart {
 
    fn from(v: ConcreteTypePart) -> InferenceTypePart {
 
        use ConcreteTypePart as CTP;
 
        use InferenceTypePart as ITP;
 

	
 
        match v {
 
            CTP::Marker(_) => {
 
                unreachable!("encountered marker while converting concrete type to inferred type");
 
            }
 
            CTP::Void => ITP::Void,
 
            CTP::Message => ITP::Message,
 
            CTP::Bool => ITP::Bool,
 
            CTP::Byte => ITP::Byte,
 
            CTP::Short => ITP::Short,
 
            CTP::Int => ITP::Int,
 
            CTP::Long => ITP::Long,
 
            CTP::String => ITP::String,
 
            CTP::Array => ITP::Array,
 
            CTP::Slice => ITP::Slice,
 
            CTP::Input => ITP::Input,
 
            CTP::Output => ITP::Output,
 
            CTP::Instance(id, num) => ITP::Instance(id, num),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug)]
 
struct InferenceType {
 
    has_body_marker: bool,
 
    is_done: bool,
 
    parts: Vec<InferenceTypePart>,
 
}
 

	
 
impl InferenceType {
 
    /// Generates a new InferenceType. The two boolean flags will be checked in
 
    /// debug mode.
 
    fn new(has_body_marker: bool, is_done: bool, parts: Vec<InferenceTypePart>) -> Self {
 
        if cfg!(debug_assertions) {
 
            debug_assert!(!parts.is_empty());
 
            if !has_body_marker {
 
                debug_assert!(parts.iter().all(|v| {
 
                    if let InferenceTypePart::MarkerBody(_) = v { false } else { true }
 
                debug_assert!(!parts.iter().any(|v| {
 
                    if let InferenceTypePart::MarkerBody(_) = v { true } else { false }
 
                }));
 
            }
 
            if is_done {
 
                debug_assert!(parts.iter().all(|v| v.is_concrete()));
 
            }
 
        }
 
        Self{ has_body_marker: has_body_marker, is_done, parts }
 
    }
 

	
 
    /// Replaces a type subtree with the provided subtree. The caller must make
 
    /// sure the the replacement is a well formed type subtree.
 
    fn replace_subtree(&mut self, start_idx: usize, with: &[InferenceTypePart]) {
 
        let end_idx = Self::find_subtree_end_idx(&self.parts, start_idx);
 
        debug_assert_eq!(with.len(), Self::find_subtree_end_idx(with, 0));
 
        self.parts.splice(start_idx..end_idx, with.iter().cloned());
 
        self.recompute_is_done();
 
    }
 

	
 
    // TODO: @performance, might all be done inline in the type inference methods
 
    fn recompute_is_done(&mut self) {
 
        self.is_done = self.parts.iter().all(|v| v.is_concrete());
 
    }
 

	
 
    /// Seeks a body marker starting at the specified position. If a marker is
 
    /// found then its value and the index of the type subtree that follows it
 
    /// is returned.
 
    fn find_body_marker(&self, mut start_idx: usize) -> Option<(usize, usize)> {
 
        while start_idx < self.parts.len() {
 
            if let InferenceTypePart::MarkerBody(marker) = &self.parts[start_idx] {
 
                return Some((*marker, start_idx + 1))
 
            }
 

	
 
            start_idx += 1;
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Returns an iterator over all body markers and the partial type tree that
 
    /// follows those markers. If it is a problem that `InferenceType` is 
 
    /// borrowed by the iterator, then use `find_body_marker`.
 
    fn body_marker_iter(&self) -> InferenceTypeMarkerIter {
 
        InferenceTypeMarkerIter::new(&self.parts)
 
    }
 

	
 
    /// Given that the `parts` are a depth-first serialized tree of types, this
 
    /// function finds the subtree anchored at a specific node. The returned 
 
    /// index is exclusive.
 
    fn find_subtree_end_idx(parts: &[InferenceTypePart], start_idx: usize) -> usize {
 
        let mut depth = 1;
 
        let mut idx = start_idx;
 

	
 
        while idx < parts.len() {
 
            depth += parts[idx].depth_change();
 
            if depth == 0 {
 
                return idx + 1;
 
            }
 
            idx += 1;
 
        }
 

	
 
        // If here, then the inference type is malformed
 
        unreachable!();
 
    }
 

	
 
    /// Call that attempts to infer the part at `to_infer.parts[to_infer_idx]` 
 
    /// using the subtree at `template.parts[template_idx]`. Will return 
 
    /// `Some(depth_change_due_to_traversal)` if type inference has been 
 
    /// applied. In this case the indices will also be modified to point to the 
 
    /// next part in both templates. If type inference has not (or: could not) 
 
    /// be applied then `None` will be returned. Note that this might mean that 
 
    /// the types are incompatible.
 
    ///
 
    /// As this is a helper functions, some assumptions: the parts are not 
 
    /// exactly equal, and neither of them contains a marker. Also: only the
 
    /// `to_infer` parts are checked for inference. It might be that this 
 
    /// function returns `None`, but that that `template` is still compatible
 
    /// with `to_infer`, e.g. when `template` has an `Unknown` part.
 
    fn infer_part_for_single_type(
 
        to_infer: &mut InferenceType, to_infer_idx: &mut usize,
 
        template_parts: &[InferenceTypePart], template_idx: &mut usize,
 
    ) -> Option<i32> {
 
        use InferenceTypePart as ITP;
 

	
 
        let to_infer_part = &to_infer.parts[*to_infer_idx];
 
        let template_part = &template_parts[*template_idx];
 

	
 
        // TODO: Maybe do this differently?
 
        let mut template_definition_marker = None;
 
        if *template_idx > 0 {
 
            if let ITP::MarkerDefinition(marker) = &template_parts[*template_idx - 1] {
 
                template_definition_marker = Some(*marker)
 
            }
 
        }
 

	
 
        // Check for programmer mistakes
 
        debug_assert_ne!(to_infer_part, template_part);
 
        debug_assert!(!to_infer_part.is_marker(), "marker encountered in 'infer part'");
 
        debug_assert!(!template_part.is_marker(), "marker encountered in 'template part'");
 

	
 
        // Inference of a somewhat-specified type
 
        if to_infer_part.may_be_inferred_from(template_part) {
 
            let depth_change = to_infer_part.depth_change();
 
            debug_assert_eq!(depth_change, template_part.depth_change());
 

	
 
            if let Some(marker) = template_definition_marker {
 
                to_infer.parts.insert(*to_infer_idx, ITP::MarkerDefinition(marker));
 
                *to_infer_idx += 1;
 
            }
 

	
 
            to_infer.parts[*to_infer_idx] = template_part.clone();
 

	
 
            *to_infer_idx += 1;
 
            *template_idx += 1;
 
            return Some(depth_change);
 
        }
 

	
 
        // Inference of a completely unknown type
 
        if *to_infer_part == ITP::Unknown {
 
            // template part is different, so cannot be unknown, hence copy the
 
            // entire subtree. Make sure to copy definition markers, but not to
 
            // transfer polymorph markers.
 
            let template_end_idx = Self::find_subtree_end_idx(template_parts, *template_idx);
 
            let template_start_idx = if let Some(marker) = template_definition_marker {
 
                to_infer.parts[*to_infer_idx] = ITP::MarkerDefinition(marker);
 
                *template_idx
 
            } else {
 
                to_infer.parts[*to_infer_idx] = template_parts[*template_idx].clone();
 
                *template_idx + 1
 
            };
 
            *to_infer_idx += 1;
 

	
 
            for template_idx in template_start_idx..template_end_idx {
 
                let template_part = &template_parts[template_idx];
 
                if let ITP::MarkerBody(_) = template_part {
 
                    // Do not copy this one
 
                } else {
 
                    to_infer.parts.insert(*to_infer_idx, template_part.clone());
 
                    *to_infer_idx += 1;
 
                }
 
            }
 
            *template_idx = template_end_idx;
 

	
 
            // Note: by definition the LHS was Unknown and the RHS traversed a 
 
            // full subtree.
 
            return Some(-1);
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Call that checks if the `to_check` part is compatible with the `infer`
 
    /// part. This is essentially a copy of `infer_part_for_single_type`, but
 
    /// without actually copying the type parts.
 
    fn check_part_for_single_type(
 
        to_check_parts: &[InferenceTypePart], to_check_idx: &mut usize,
 
        template_parts: &[InferenceTypePart], template_idx: &mut usize
 
    ) -> Option<i32> {
 
        use InferenceTypePart as ITP;
 

	
 
        let to_check_part = &to_check_parts[*to_check_idx];
 
        let template_part = &template_parts[*template_idx];
 

	
 
        // Checking programmer errors
 
        debug_assert_ne!(to_check_part, template_part);
 
        debug_assert!(!to_check_part.is_marker(), "marker encountered in 'to_check part'");
 
        debug_assert!(!template_part.is_marker(), "marker encountered in 'template part'");
 

	
 
        if to_check_part.may_be_inferred_from(template_part) {
 
            let depth_change = to_check_part.depth_change();
 
            debug_assert_eq!(depth_change, template_part.depth_change());
 
            *to_check_idx += 1;
 
            *template_idx += 1;
 
            return Some(depth_change);
 
        }
 

	
 
        if *to_check_part == ITP::Unknown {
 
            *to_check_idx += 1;
 
            *template_idx = Self::find_subtree_end_idx(template_parts, *template_idx);
 

	
 
            // By definition LHS and RHS had depth change of -1
 
            return Some(-1);
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Attempts to infer types between two `InferenceType` instances. This 
 
    /// function is unsafe as it accepts pointers to work around Rust's 
 
    /// borrowing rules. The caller must ensure that the pointers are distinct.
 
    unsafe fn infer_subtrees_for_both_types(
 
        type_a: *mut InferenceType, start_idx_a: usize,
 
        type_b: *mut InferenceType, start_idx_b: usize
 
    ) -> DualInferenceResult {
 
        debug_assert!(!std::ptr::eq(type_a, type_b), "encountered pointers to the same inference type");
 
        let type_a = &mut *type_a;
 
        let type_b = &mut *type_b;
 

	
 
        let mut modified_a = false;
 
        let mut modified_b = false;
 
        let mut idx_a = start_idx_a;
 
        let mut idx_b = start_idx_b;
 
        let mut depth = 1;
 

	
 
        while depth > 0 {
 
            // Advance indices if we encounter markers or equal parts
 
            let part_a = &type_a.parts[idx_a];
 
            let part_b = &type_b.parts[idx_b];
 
            
 
            if part_a == part_b {
 
                let depth_change = part_a.depth_change();
 
                depth += depth_change;
 
                debug_assert_eq!(depth_change, part_b.depth_change());
 
                idx_a += 1;
 
                idx_b += 1;
 
                continue;
 
            }
 
            if part_a.is_marker() { idx_a += 1; continue; }
 
            if part_b.is_marker() { idx_b += 1; continue; }
 

	
 
            // Types are not equal and are both not markers
 
            if let Some(depth_change) = Self::infer_part_for_single_type(type_a, &mut idx_a, &type_b.parts, &mut idx_b) {
 
                depth += depth_change;
 
                modified_a = true;
 
                continue;
 
            }
 
            if let Some(depth_change) = Self::infer_part_for_single_type(type_b, &mut idx_b, &type_a.parts, &mut idx_a) {
 
                depth += depth_change;
 
                modified_b = true;
 
                continue;
 
            }
 

	
 
            // And can also not be inferred in any way: types must be incompatible
 
            return DualInferenceResult::Incompatible;
 
        }
 

	
 
        if modified_a { type_a.recompute_is_done(); }
 
        if modified_b { type_b.recompute_is_done(); }
 

	
 
        // If here then we completely inferred the subtrees.
 
        match (modified_a, modified_b) {
 
            (false, false) => DualInferenceResult::Neither,
 
            (false, true) => DualInferenceResult::Second,
 
            (true, false) => DualInferenceResult::First,
 
            (true, true) => DualInferenceResult::Both
 
        }
 
    }
 

	
 
    /// Attempts to infer the first subtree based on the template. Like
 
    /// `infer_subtrees_for_both_types`, but now only applying inference to
 
    /// `to_infer` based on the type information in `template`.
 
    /// Secondary use is to make sure that a type follows a certain template.
 
    fn infer_subtree_for_single_type(
 
        to_infer: &mut InferenceType, mut to_infer_idx: usize,
 
        template: &[InferenceTypePart], mut template_idx: usize,
 
    ) -> SingleInferenceResult {
 
        let mut modified = false;
 
        let mut depth = 1;
 

	
 
        while depth > 0 {
 
            let to_infer_part = &to_infer.parts[to_infer_idx];
 
            let template_part = &template[template_idx];
 

	
 
            if to_infer_part == template_part {
 
                let depth_change = to_infer_part.depth_change();
 
                depth += depth_change;
 
                debug_assert_eq!(depth_change, template_part.depth_change());
 
                to_infer_idx += 1;
 
                template_idx += 1;
 
                continue;
 
            }
 
            if to_infer_part.is_marker() { to_infer_idx += 1; continue; }
 
            if template_part.is_marker() { template_idx += 1; continue; }
 

	
 
            // Types are not equal and not markers. So check if we can infer 
 
            // anything
 
            if let Some(depth_change) = Self::infer_part_for_single_type(
 
                to_infer, &mut to_infer_idx, template, &mut template_idx
 
            ) {
 
                depth += depth_change;
 
                modified = true;
 
                continue;
 
            }
 

	
 
            // We cannot infer anything, but the template may still be 
 
            // compatible with the type we're inferring
 
            if let Some(depth_change) = Self::check_part_for_single_type(
 
                template, &mut template_idx, &to_infer.parts, &mut to_infer_idx
 
            ) {
 
                depth += depth_change;
 
                continue;
 
            }
 

	
 
            return SingleInferenceResult::Incompatible
 
        }
 

	
 
        return if modified {
 
            to_infer.recompute_is_done();
 
            SingleInferenceResult::Modified
 
        } else {
 
            SingleInferenceResult::Unmodified
 
        }
 
    }
 

	
 
    /// Checks if both types are compatible, doesn't perform any inference
 
    fn check_subtrees(
 
        type_parts_a: &[InferenceTypePart], start_idx_a: usize,
 
        type_parts_b: &[InferenceTypePart], start_idx_b: usize
 
    ) -> bool {
 
        let mut depth = 1;
 
        let mut idx_a = start_idx_a;
 
        let mut idx_b = start_idx_b;
 

	
 
        while depth > 0 {
 
            let part_a = &type_parts_a[idx_a];
 
            let part_b = &type_parts_b[idx_b];
 

	
 
            if part_a == part_b {
 
                let depth_change = part_a.depth_change();
 
                depth += depth_change;
 
                debug_assert_eq!(depth_change, part_b.depth_change());
 
                idx_a += 1;
 
                idx_b += 1;
 
                continue;
 
            }
 
            
 
            if part_a.is_marker() { idx_a += 1; continue; }
 
            if part_b.is_marker() { idx_b += 1; continue; }
 

	
 
            if let Some(depth_change) = Self::check_part_for_single_type(
 
                type_parts_a, &mut idx_a, type_parts_b, &mut idx_b
 
            ) {
 
                depth += depth_change;
 
                continue;
 
            }
 
            if let Some(depth_change) = Self::check_part_for_single_type(
 
                type_parts_b, &mut idx_b, type_parts_a, &mut idx_a
 
            ) {
 
                depth += depth_change;
 
                continue;
 
            }
 

	
 
            return false;
 
        }
 

	
 
        true
 
    }
 

	
 
    /// Performs the conversion of the inference type into a concrete type.
 
    /// By calling this function you must make sure that no unspecified types
 
    /// (e.g. Unknown or IntegerLike) exist in the type.
 
    fn write_concrete_type(&self, concrete_type: &mut ConcreteType) {
 
        use InferenceTypePart as ITP;
 
        use ConcreteTypePart as CTP;
 

	
 
        // Make sure inference type is specified but concrete type is not yet specified
 
        debug_assert!(!self.parts.is_empty());
 
        debug_assert!(concrete_type.parts.is_empty());
 
        concrete_type.parts.reserve(self.parts.len());
 

	
 
        let mut idx = 0;
 
        while idx < self.parts.len() {
 
            let part = &self.parts[idx];
 
            let converted_part = match part {
 
                ITP::MarkerDefinition(marker) => {
 
                    // Outer markers are converted to regular markers, we
 
                    // completely remove the type subtree that follows it
 
                    idx = InferenceType::find_subtree_end_idx(&self.parts, idx + 1);
 
                    concrete_type.parts.push(CTP::Marker(*marker));
 
                    continue;
 
                },
 
                ITP::MarkerBody(_) => {
 
                    // Inner markers are removed when writing to the concrete
 
                    // type.
 
                    idx += 1;
 
                    continue;
 
                },
 
                ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | ITP::ArrayLike | ITP::PortLike => {
 
                    unreachable!("Attempted to convert inference type part {:?} into concrete type", part);
 
                },
 
                ITP::Void => CTP::Void,
 
                ITP::Message => CTP::Message,
 
                ITP::Bool => CTP::Bool,
 
                ITP::Byte => CTP::Byte,
 
                ITP::Short => CTP::Short,
 
@@ -843,2369 +843,2455 @@ pub(crate) struct TypeResolvingVisitor {
 
struct ExtraData {
 
    /// Progression of polymorphic variables (if any)
 
    poly_vars: Vec<InferenceType>,
 
    /// Progression of types of call arguments or struct members
 
    embedded: Vec<InferenceType>,
 
    returned: InferenceType,
 
}
 

	
 
struct VarData {
 
    /// Type of the variable
 
    var_type: InferenceType,
 
    /// VariableExpressions that use the variable
 
    used_at: Vec<ExpressionId>,
 
    /// For channel statements we link to the other variable such that when one
 
    /// channel's interior type is resolved, we can also resolve the other one.
 
    linked_var: Option<VariableId>,
 
}
 

	
 
impl VarData {
 
    fn new_channel(var_type: InferenceType, other_port: VariableId) -> Self {
 
        Self{ var_type, used_at: Vec::new(), linked_var: Some(other_port) }
 
    }
 
    fn new_local(var_type: InferenceType) -> Self {
 
        Self{ var_type, used_at: Vec::new(), linked_var: None }
 
    }
 
}
 

	
 
impl TypeResolvingVisitor {
 
    pub(crate) fn new() -> Self {
 
        TypeResolvingVisitor{
 
            definition_type: DefinitionType::None,
 
            poly_vars: Vec::new(),
 
            stmt_buffer: Vec::with_capacity(STMT_BUFFER_INIT_CAPACITY),
 
            expr_buffer: Vec::with_capacity(EXPR_BUFFER_INIT_CAPACITY),
 
            var_types: HashMap::new(),
 
            expr_types: HashMap::new(),
 
            extra_data: HashMap::new(),
 
            expr_queued: HashSet::new(),
 
        }
 
    }
 

	
 
    // TODO: @cleanup Unsure about this, maybe a pattern will arise after
 
    //  a while.
 
    pub(crate) fn queue_module_definitions(ctx: &Ctx, queue: &mut ResolveQueue) {
 
        let root_id = ctx.module.root_id;
 
        let root = &ctx.heap.protocol_descriptions[root_id];
 
        for definition_id in &root.definitions {
 
            let definition = &ctx.heap[*definition_id];
 
            match definition {
 
                Definition::Function(definition) => {
 
                    if definition.poly_vars.is_empty() {
 
                        queue.push(ResolveQueueElement{
 
                            root_id,
 
                            definition_id: *definition_id,
 
                            monomorph_types: Vec::new(),
 
                        })
 
                    }
 
                },
 
                Definition::Component(definition) => {
 
                    if definition.poly_vars.is_empty() {
 
                        queue.push(ResolveQueueElement{
 
                            root_id,
 
                            definition_id: *definition_id,
 
                            monomorph_types: Vec::new(),
 
                        })
 
                    }
 
                },
 
                Definition::Enum(_) | Definition::Struct(_) => {},
 
            }
 
        }
 
    }
 

	
 
    pub(crate) fn handle_module_definition(
 
        &mut self, ctx: &mut Ctx, queue: &mut ResolveQueue, element: ResolveQueueElement
 
    ) -> VisitorResult {
 
        // Visit the definition
 
        debug_assert_eq!(ctx.module.root_id, element.root_id);
 
        self.reset();
 
        self.poly_vars.clear();
 
        self.poly_vars.extend(element.monomorph_types.iter().cloned());
 
        self.visit_definition(ctx, element.definition_id)?;
 

	
 
        // Keep resolving types
 
        self.resolve_types(ctx, queue)?;
 
        Ok(())
 
    }
 

	
 
    fn reset(&mut self) {
 
        self.definition_type = DefinitionType::None;
 
        self.poly_vars.clear();
 
        self.stmt_buffer.clear();
 
        self.expr_buffer.clear();
 
        self.var_types.clear();
 
        self.expr_types.clear();
 
        self.extra_data.clear();
 
        self.expr_queued.clear();
 
    }
 
}
 

	
 
impl Visitor2 for TypeResolvingVisitor {
 
    // Definitions
 

	
 
    fn visit_component_definition(&mut self, ctx: &mut Ctx, id: ComponentId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Component(id);
 

	
 
        let comp_def = &ctx.heap[id];
 
        debug_assert_eq!(comp_def.poly_vars.len(), self.poly_vars.len(), "component polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting component '{}': {}", &String::from_utf8_lossy(&comp_def.identifier.value), id.0.index);
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        for param_id in comp_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type(ctx, param.parser_type, true);
 
            debug_assert!(var_type.is_done, "expected component arguments to be concrete types");
 
            self.var_types.insert(param_id.upcast(), VarData::new_local(var_type));
 
        }
 

	
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    fn visit_function_definition(&mut self, ctx: &mut Ctx, id: FunctionId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Function(id);
 

	
 
        let func_def = &ctx.heap[id];
 
        debug_assert_eq!(func_def.poly_vars.len(), self.poly_vars.len(), "function polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting function '{}': {}", &String::from_utf8_lossy(&func_def.identifier.value), id.0.index);
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        for param_id in func_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type(ctx, param.parser_type, true);
 
            debug_assert!(var_type.is_done, "expected function arguments to be concrete types");
 
            self.var_types.insert(param_id.upcast(), VarData::new_local(var_type));
 
        }
 

	
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    // Statements
 

	
 
    fn visit_block_stmt(&mut self, ctx: &mut Ctx, id: BlockStatementId) -> VisitorResult {
 
        // Transfer statements for traversal
 
        let block = &ctx.heap[id];
 

	
 
        for stmt_id in block.statements.clone() {
 
            self.visit_stmt(ctx, stmt_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_memory_stmt(&mut self, ctx: &mut Ctx, id: MemoryStatementId) -> VisitorResult {
 
        let memory_stmt = &ctx.heap[id];
 

	
 
        let local = &ctx.heap[memory_stmt.variable];
 
        let var_type = self.determine_inference_type_from_parser_type(ctx, local.parser_type, true);
 
        self.var_types.insert(memory_stmt.variable.upcast(), VarData::new_local(var_type));
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_channel_stmt(&mut self, ctx: &mut Ctx, id: ChannelStatementId) -> VisitorResult {
 
        let channel_stmt = &ctx.heap[id];
 

	
 
        let from_local = &ctx.heap[channel_stmt.from];
 
        let from_var_type = self.determine_inference_type_from_parser_type(ctx, from_local.parser_type, true);
 
        self.var_types.insert(from_local.this.upcast(), VarData::new_channel(from_var_type, channel_stmt.to.upcast()));
 

	
 
        let to_local = &ctx.heap[channel_stmt.to];
 
        let to_var_type = self.determine_inference_type_from_parser_type(ctx, to_local.parser_type, true);
 
        self.var_types.insert(to_local.this.upcast(), VarData::new_channel(to_var_type, channel_stmt.from.upcast()));
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_labeled_stmt(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> VisitorResult {
 
        let labeled_stmt = &ctx.heap[id];
 
        let substmt_id = labeled_stmt.body;
 
        self.visit_stmt(ctx, substmt_id)
 
    }
 

	
 
    fn visit_if_stmt(&mut self, ctx: &mut Ctx, id: IfStatementId) -> VisitorResult {
 
        let if_stmt = &ctx.heap[id];
 

	
 
        let true_body_id = if_stmt.true_body;
 
        let false_body_id = if_stmt.false_body;
 
        let test_expr_id = if_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_stmt(ctx, true_body_id)?;
 
        self.visit_stmt(ctx, false_body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_while_stmt(&mut self, ctx: &mut Ctx, id: WhileStatementId) -> VisitorResult {
 
        let while_stmt = &ctx.heap[id];
 

	
 
        let body_id = while_stmt.body;
 
        let test_expr_id = while_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_stmt(ctx, body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_synchronous_stmt(&mut self, ctx: &mut Ctx, id: SynchronousStatementId) -> VisitorResult {
 
        let sync_stmt = &ctx.heap[id];
 
        let body_id = sync_stmt.body;
 

	
 
        self.visit_stmt(ctx, body_id)
 
    }
 

	
 
    fn visit_return_stmt(&mut self, ctx: &mut Ctx, id: ReturnStatementId) -> VisitorResult {
 
        let return_stmt = &ctx.heap[id];
 
        let expr_id = return_stmt.expression;
 

	
 
        self.visit_expr(ctx, expr_id)
 
    }
 

	
 
    fn visit_assert_stmt(&mut self, ctx: &mut Ctx, id: AssertStatementId) -> VisitorResult {
 
        let assert_stmt = &ctx.heap[id];
 
        let test_expr_id = assert_stmt.expression;
 

	
 
        self.visit_expr(ctx, test_expr_id)
 
    }
 

	
 
    fn visit_new_stmt(&mut self, ctx: &mut Ctx, id: NewStatementId) -> VisitorResult {
 
        let new_stmt = &ctx.heap[id];
 
        let call_expr_id = new_stmt.expression;
 

	
 
        self.visit_call_expr(ctx, call_expr_id)
 
    }
 

	
 
    fn visit_expr_stmt(&mut self, ctx: &mut Ctx, id: ExpressionStatementId) -> VisitorResult {
 
        let expr_stmt = &ctx.heap[id];
 
        let subexpr_id = expr_stmt.expression;
 

	
 
        self.visit_expr(ctx, subexpr_id)
 
    }
 

	
 
    // Expressions
 

	
 
    fn visit_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let assign_expr = &ctx.heap[id];
 
        let left_expr_id = assign_expr.left;
 
        let right_expr_id = assign_expr.right;
 

	
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.visit_expr(ctx, right_expr_id)?;
 

	
 
        self.progress_assignment_expr(ctx, id)
 
    }
 

	
 
    fn visit_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let conditional_expr = &ctx.heap[id];
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        self.expr_types.insert(test_expr_id, InferenceType::new(false, true, vec![InferenceTypePart::Bool]));
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_expr(ctx, true_expr_id)?;
 
        self.visit_expr(ctx, false_expr_id)?;
 

	
 
        self.progress_conditional_expr(ctx, id)
 
    }
 

	
 
    fn visit_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let binary_expr = &ctx.heap[id];
 
        let lhs_expr_id = binary_expr.left;
 
        let rhs_expr_id = binary_expr.right;
 

	
 
        self.visit_expr(ctx, lhs_expr_id)?;
 
        self.visit_expr(ctx, rhs_expr_id)?;
 

	
 
        self.progress_binary_expr(ctx, id)
 
    }
 

	
 
    fn visit_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let unary_expr = &ctx.heap[id];
 
        let arg_expr_id = unary_expr.expression;
 

	
 
        self.visit_expr(ctx, arg_expr_id)?;
 

	
 
        self.progress_unary_expr(ctx, id)
 
    }
 

	
 
    fn visit_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let indexing_expr = &ctx.heap[id];
 
        let subject_expr_id = indexing_expr.subject;
 
        let index_expr_id = indexing_expr.index;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.visit_expr(ctx, index_expr_id)?;
 

	
 
        self.progress_indexing_expr(ctx, id)
 
    }
 

	
 
    fn visit_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let slicing_expr = &ctx.heap[id];
 
        let subject_expr_id = slicing_expr.subject;
 
        let from_expr_id = slicing_expr.from_index;
 
        let to_expr_id = slicing_expr.to_index;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.visit_expr(ctx, from_expr_id)?;
 
        self.visit_expr(ctx, to_expr_id)?;
 

	
 
        self.progress_slicing_expr(ctx, id)
 
    }
 

	
 
    fn visit_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let select_expr = &ctx.heap[id];
 
        let subject_expr_id = select_expr.subject;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 

	
 
        self.progress_select_expr(ctx, id)
 
    }
 

	
 
    fn visit_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let array_expr = &ctx.heap[id];
 
        // TODO: @performance
 
        for element_id in array_expr.elements.clone().into_iter() {
 
            self.visit_expr(ctx, element_id)?;
 
        }
 

	
 
        self.progress_array_expr(ctx, id)
 
    }
 

	
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let literal_expr = &ctx.heap[id];
 
        match &literal_expr.value {
 
            Literal::Null | Literal::False | Literal::True |
 
            Literal::Integer(_) | Literal::Character(_) => {
 
                // No subexpressions
 
            },
 
            Literal::Struct(literal) => {
 
                // TODO: @performance
 
                let expr_ids: Vec<_> = literal.fields
 
                    .iter()
 
                    .map(|f| f.value)
 
                    .collect();
 

	
 
                self.insert_initial_struct_polymorph_data(ctx, id);
 

	
 
                for expr_id in expr_ids {
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 
            },
 
            Literal::Enum(_) => {
 
                // Enumerations do not carry any subexpressions, but may still
 
                // have a user-defined polymorphic marker variable. For this 
 
                // reason we may still have to apply inference to this 
 
                // polymorphic variable
 
                self.insert_initial_enum_polymorph_data(ctx, id);
 
            }
 
        }
 

	
 
        self.progress_literal_expr(ctx, id)
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        self.insert_initial_call_polymorph_data(ctx, id);
 

	
 
        // TODO: @performance
 
        let call_expr = &ctx.heap[id];
 
        for arg_expr_id in call_expr.arguments.clone() {
 
            self.visit_expr(ctx, arg_expr_id)?;
 
        }
 

	
 
        self.progress_call_expr(ctx, id)
 
    }
 

	
 
    fn visit_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let var_expr = &ctx.heap[id];
 
        debug_assert!(var_expr.declaration.is_some());
 
        let var_data = self.var_types.get_mut(var_expr.declaration.as_ref().unwrap()).unwrap();
 
        var_data.used_at.push(upcast_id);
 

	
 
        self.progress_variable_expr(ctx, id)
 
    }
 
}
 

	
 
macro_rules! debug_assert_expr_ids_unique_and_known {
 
    // Base case for a single expression ID
 
    ($resolver:ident, $id:ident) => {
 
        if cfg!(debug_assertions) {
 
            $resolver.expr_types.contains_key(&$id);
 
        }
 
    };
 
    // Base case for two expression IDs
 
    ($resolver:ident, $id1:ident, $id2:ident) => {
 
        debug_assert_ne!($id1, $id2);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2);
 
    };
 
    // Generic case
 
    ($resolver:ident, $id1:ident, $id2:ident, $($tail:ident),+) => {
 
        debug_assert_ne!($id1, $id2);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2, $($tail),+);
 
    };
 
}
 

	
 
macro_rules! debug_assert_ptrs_distinct {
 
    // Base case
 
    ($ptr1:ident, $ptr2:ident) => {
 
        debug_assert!(!std::ptr::eq($ptr1, $ptr2));
 
    };
 
    // Generic case
 
    ($ptr1:ident, $ptr2:ident, $($tail:ident),+) => {
 
        debug_assert_ptrs_distinct!($ptr1, $ptr2);
 
        debug_assert_ptrs_distinct!($ptr2, $($tail),+);
 
    };
 
}
 

	
 
impl TypeResolvingVisitor {
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError> {
 
        // Keep inferring until we can no longer make any progress
 
        while let Some(next_expr_id) = self.expr_queued.iter().next() {
 
            let next_expr_id = *next_expr_id;
 
            self.expr_queued.remove(&next_expr_id);
 
            self.progress_expr(ctx, next_expr_id)?;
 
        }
 

	
 
        // We check if we have all the types we need. If we're typechecking a 
 
        // polymorphic procedure more than once, then we have already annotated
 
        // the AST and have now performed typechecking for a different 
 
        // monomorph. In that case we just need to perform typechecking, no need
 
        // to annotate the AST again.
 
        let definition_id = match &self.definition_type {
 
            DefinitionType::Component(id) => id.upcast(),
 
            DefinitionType::Function(id) => id.upcast(),
 
            _ => unreachable!(),
 
        };
 

	
 
        let already_checked = ctx.types.get_base_definition(&definition_id).unwrap().has_any_monomorph();
 
        for (expr_id, expr_type) in self.expr_types.iter_mut() {
 
            if !expr_type.is_done {
 
                // Auto-infer numberlike/integerlike types to a regular int
 
                if expr_type.parts.len() == 1 && expr_type.parts[0] == InferenceTypePart::IntegerLike {
 
                    expr_type.parts[0] = InferenceTypePart::Int;
 
                } else {
 
                    let expr = &ctx.heap[*expr_id];
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Could not fully infer the type of this expression (got '{}')",
 
                            expr_type.display_name(&ctx.heap)
 
                        )
 
                    ))
 
                }
 
            }
 

	
 
            if !already_checked {
 
                let concrete_type = ctx.heap[*expr_id].get_type_mut();
 
                expr_type.write_concrete_type(concrete_type);
 
            } else {
 
                if cfg!(debug_assertions) {
 
                    let mut concrete_type = ConcreteType::default();
 
                    expr_type.write_concrete_type(&mut concrete_type);
 
                    debug_assert_eq!(*ctx.heap[*expr_id].get_type(), concrete_type);
 
                }
 
            }
 
        }
 

	
 
        // All types are fine
 
        ctx.types.add_monomorph(&definition_id, self.poly_vars.clone());
 

	
 
        // Check all things we need to monomorphize
 
        // TODO: Struct/enum/union monomorphization
 
        for (expr_id, extra_data) in self.extra_data.iter() {
 
            if extra_data.poly_vars.is_empty() { continue; }
 

	
 
            // Retrieve polymorph variable specification. Those of struct 
 
            // literals and those of procedure calls need to be fully inferred.
 
            // The remaining ones (e.g. select expressions) allow partial 
 
            // inference of types, as long as the accessed field's type is
 
            // fully inferred.
 
            let needs_full_inference = match &ctx.heap[*expr_id] {
 
                Expression::Call(_) => true,
 
                Expression::Literal(_) => true,
 
                _ => false
 
            };
 

	
 
            if needs_full_inference {
 
                let mut monomorph_types = Vec::with_capacity(extra_data.poly_vars.len());
 
                for (poly_idx, poly_type) in extra_data.poly_vars.iter().enumerate() {
 
                    if !poly_type.is_done {
 
                        // TODO: Single clean function for function signatures and polyvars.
 
                        // TODO: Better error message
 
                        let expr = &ctx.heap[*expr_id];
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, expr.position(),
 
                            &format!(
 
                                "Could not fully infer the type of polymorphic variable {} of this expression (got '{}')",
 
                                poly_idx, poly_type.display_name(&ctx.heap)
 
                            )
 
                        ))
 
                    }
 

	
 
                    let mut concrete_type = ConcreteType::default();
 
                    poly_type.write_concrete_type(&mut concrete_type);
 
                    monomorph_types.insert(poly_idx, concrete_type);
 
                }
 

	
 
                // Resolve to the appropriate expression and instantiate 
 
                // monomorphs.
 
                match &ctx.heap[*expr_id] {
 
                    Expression::Call(call_expr) => {
 
                        // Add to type table if not yet typechecked
 
                        if let Method::Symbolic(symbolic) = &call_expr.method {
 
                            let definition_id = symbolic.definition.unwrap();
 
                            if !ctx.types.has_monomorph(&definition_id, &monomorph_types) {
 
                                let root_id = ctx.types
 
                                    .get_base_definition(&definition_id)
 
                                    .unwrap()
 
                                    .ast_root;
 

	
 
                                // Pre-emptively add the monomorph to the type table, but
 
                                // we still need to perform typechecking on it
 
                                // TODO: Unsure about this, performance wise
 
                                let queue_element = ResolveQueueElement{
 
                                    root_id,
 
                                    definition_id,
 
                                    monomorph_types,
 
                                };
 
                                if !queue.contains(&queue_element) {
 
                                    queue.push(queue_element);
 
                                }
 
                            }
 
                        }
 
                    },
 
                    Expression::Literal(lit_expr) => {
 
                        let lit_struct = lit_expr.value.as_struct();
 
                        let definition_id = lit_struct.definition.as_ref().unwrap();
 
                        let definition_id = match &lit_expr.value {
 
                            Literal::Struct(literal) => literal.definition.as_ref().unwrap(),
 
                            Literal::Enum(literal) => literal.definition.as_ref().unwrap(),
 
                            _ => unreachable!("post-inference monomorph for non-struct, non-enum literal")
 
                        };
 
                        if !ctx.types.has_monomorph(definition_id, &monomorph_types) {
 
                            ctx.types.add_monomorph(definition_id, monomorph_types);
 
                        }
 
                    },
 
                    _ => unreachable!("needs fully inference, but not a struct literal or call expression")
 
                }
 
            } // else: was just a helper structure...
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> Result<(), ParseError> {
 
        match &ctx.heap[id] {
 
            Expression::Assignment(expr) => {
 
                let id = expr.this;
 
                self.progress_assignment_expr(ctx, id)
 
            },
 
            Expression::Conditional(expr) => {
 
                let id = expr.this;
 
                self.progress_conditional_expr(ctx, id)
 
            },
 
            Expression::Binary(expr) => {
 
                let id = expr.this;
 
                self.progress_binary_expr(ctx, id)
 
            },
 
            Expression::Unary(expr) => {
 
                let id = expr.this;
 
                self.progress_unary_expr(ctx, id)
 
            },
 
            Expression::Indexing(expr) => {
 
                let id = expr.this;
 
                self.progress_indexing_expr(ctx, id)
 
            },
 
            Expression::Slicing(expr) => {
 
                let id = expr.this;
 
                self.progress_slicing_expr(ctx, id)
 
            },
 
            Expression::Select(expr) => {
 
                let id = expr.this;
 
                self.progress_select_expr(ctx, id)
 
            },
 
            Expression::Array(expr) => {
 
                let id = expr.this;
 
                self.progress_array_expr(ctx, id)
 
            },
 
            Expression::Literal(expr) => {
 
                let id = expr.this;
 
                self.progress_literal_expr(ctx, id)
 
            },
 
            Expression::Call(expr) => {
 
                let id = expr.this;
 
                self.progress_call_expr(ctx, id)
 
            },
 
            Expression::Variable(expr) => {
 
                let id = expr.this;
 
                self.progress_variable_expr(ctx, id)
 
            }
 
        }
 
    }
 

	
 
    fn progress_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> Result<(), ParseError> {
 
        use AssignmentOperator as AO;
 

	
 
        // TODO: Assignable check
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.left;
 
        let arg2_expr_id = expr.right;
 

	
 
        debug_log!("Assignment expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let progress_base = match expr.operation {
 
            AO::Set =>
 
                false,
 
            AO::Multiplied | AO::Divided | AO::Added | AO::Subtracted =>
 
                self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?,
 
            AO::Remained | AO::ShiftedLeft | AO::ShiftedRight |
 
            AO::BitwiseAnded | AO::BitwiseXored | AO::BitwiseOred =>
 
                self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?,
 
        };
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = self.apply_equal3_constraint(
 
            ctx, upcast_id, arg1_expr_id, arg2_expr_id, 0
 
        )?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_base || progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 

	
 
        if progress_base || progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> Result<(), ParseError> {
 
        // Note: test expression type is already enforced
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.true_expression;
 
        let arg2_expr_id = expr.false_expression;
 

	
 
        debug_log!("Conditional expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = self.apply_equal3_constraint(
 
            ctx, upcast_id, arg1_expr_id, arg2_expr_id, 0
 
        )?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> Result<(), ParseError> {
 
        // Note: our expression type might be fixed by our parent, but we still
 
        // need to make sure it matches the type associated with our operation.
 
        use BinaryOperator as BO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_id = expr.left;
 
        let arg2_id = expr.right;
 

	
 
        debug_log!("Binary expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = match expr.operation {
 
            BO::Concatenate => {
 
                // Arguments may be arrays/slices, output is always an array
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &ARRAY_TEMPLATE)?;
 
                let progress_arg1 = self.apply_forced_constraint(ctx, arg1_id, &ARRAYLIKE_TEMPLATE)?;
 
                let progress_arg2 = self.apply_forced_constraint(ctx, arg2_id, &ARRAYLIKE_TEMPLATE)?;
 

	
 
                // If they're all arraylike, then we want the subtype to match
 
                let (subtype_expr, subtype_arg1, subtype_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 1)?;
 

	
 
                (progress_expr || subtype_expr, progress_arg1 || subtype_arg1, progress_arg2 || subtype_arg2)
 
            },
 
            BO::LogicalOr | BO::LogicalAnd => {
 
                // Forced boolean on all
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg1 = self.apply_forced_constraint(ctx, arg1_id, &BOOL_TEMPLATE)?;
 
                let progress_arg2 = self.apply_forced_constraint(ctx, arg2_id, &BOOL_TEMPLATE)?;
 

	
 
                (progress_expr, progress_arg1, progress_arg2)
 
            },
 
            BO::BitwiseOr | BO::BitwiseXor | BO::BitwiseAnd | BO::Remainder | BO::ShiftLeft | BO::ShiftRight => {
 
                // All equal of integer type
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg1, progress_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg1, progress_base || progress_arg2)
 
            },
 
            BO::Equality | BO::Inequality => {
 
                // Equal2 on args, forced boolean output
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let (progress_arg1, progress_arg2) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, arg1_id, 0, arg2_id, 0)?;
 

	
 
                (progress_expr, progress_arg1, progress_arg2)
 
            },
 
            BO::LessThan | BO::GreaterThan | BO::LessThanEqual | BO::GreaterThanEqual => {
 
                // Equal2 on args with numberlike type, forced boolean output
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg_base = self.apply_forced_constraint(ctx, arg1_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_arg1, progress_arg2) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, arg1_id, 0, arg2_id, 0)?;
 

	
 
                (progress_expr, progress_arg_base || progress_arg1, progress_arg_base || progress_arg2)
 
            },
 
            BO::Add | BO::Subtract | BO::Multiply | BO::Divide => {
 
                // All equal of number type
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg1, progress_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg1, progress_base || progress_arg2)
 
            },
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_id); }
 
        if progress_arg2 { self.queue_expr(arg2_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> Result<(), ParseError> {
 
        use UnaryOperation as UO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg_id = expr.expression;
 

	
 
        debug_log!("Unary expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg  type: {}", self.expr_types.get(&arg_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg) = match expr.operation {
 
            UO::Positive | UO::Negative => {
 
                // Equal types of numeric class
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, arg_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg)
 
            },
 
            UO::BitwiseNot | UO::PreIncrement | UO::PreDecrement | UO::PostIncrement | UO::PostDecrement => {
 
                // Equal types of integer class
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, arg_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg)
 
            },
 
            UO::LogicalNot => {
 
                // Both booleans
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                (progress_expr, progress_arg)
 
            }
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg  type [{}]: {}", progress_arg, self.expr_types.get(&arg_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg { self.queue_expr(arg_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let subject_id = expr.subject;
 
        let index_id = expr.index;
 

	
 
        debug_log!("Indexing expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Subject type: {}", self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Index   type: {}", self.expr_types.get(&index_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // Make sure subject is arraylike and index is integerlike
 
        let progress_subject_base = self.apply_forced_constraint(ctx, subject_id, &ARRAYLIKE_TEMPLATE)?;
 
        let progress_index = self.apply_forced_constraint(ctx, index_id, &INTEGERLIKE_TEMPLATE)?;
 

	
 
        // Make sure if output is of T then subject is Array<T>
 
        let (progress_expr, progress_subject) =
 
            self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, subject_id, 1)?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Subject type [{}]: {}", progress_subject_base || progress_subject, self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Index   type [{}]: {}", progress_index, self.expr_types.get(&index_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_subject_base || progress_subject { self.queue_expr(subject_id); }
 
        if progress_index { self.queue_expr(index_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let subject_id = expr.subject;
 
        let from_id = expr.from_index;
 
        let to_id = expr.to_index;
 

	
 
        debug_log!("Slicing expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Subject type: {}", self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - FromIdx type: {}", self.expr_types.get(&from_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - ToIdx   type: {}", self.expr_types.get(&to_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // Make sure subject is arraylike and indices are of equal integerlike
 
        let progress_subject_base = self.apply_forced_constraint(ctx, subject_id, &ARRAYLIKE_TEMPLATE)?;
 
        let progress_idx_base = self.apply_forced_constraint(ctx, from_id, &INTEGERLIKE_TEMPLATE)?;
 
        let (progress_from, progress_to) = self.apply_equal2_constraint(ctx, upcast_id, from_id, 0, to_id, 0)?;
 

	
 
        // Make sure if output is of T then subject is Array<T>
 
        let (progress_expr, progress_subject) =
 
            self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, subject_id, 1)?;
 

	
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Subject type [{}]: {}", progress_subject_base || progress_subject, self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - FromIdx type [{}]: {}", progress_idx_base || progress_from, self.expr_types.get(&from_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - ToIdx   type [{}]: {}", progress_idx_base || progress_to, self.expr_types.get(&to_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_subject_base || progress_subject { self.queue_expr(subject_id); }
 
        if progress_idx_base || progress_from { self.queue_expr(from_id); }
 
        if progress_idx_base || progress_to { self.queue_expr(to_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        
 
        debug_log!("Select expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Subject type: {}", self.expr_types.get(&ctx.heap[id].subject).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let expr = &mut ctx.heap[id];
 
        let subject_id = expr.subject;
 

	
 
        fn determine_inference_type_instance<'a>(types: &'a TypeTable, infer_type: &InferenceType) -> Result<Option<&'a DefinedType>, ()> {
 
            for part in &infer_type.parts {
 
                if part.is_marker() || !part.is_concrete() {
 
                    continue;
 
                }
 

	
 
                // Part is concrete, check if it is an instance of something
 
                if let InferenceTypePart::Instance(definition_id, _num_sub) = part {
 
                    // Lookup type definition and ensure the specified field 
 
                    // name exists on the struct
 
                    let definition = types.get_base_definition(definition_id);
 
                    debug_assert!(definition.is_some());
 
                    let definition = definition.unwrap();
 

	
 
                    return Ok(Some(definition))
 
                } else {
 
                    // Expected an instance of something
 
                    return Err(())
 
                }
 
            }
 

	
 
            // Nothing is concrete yet
 
            Ok(None)
 
        }
 

	
 
        let (progress_subject, progress_expr) = match &mut expr.field {
 
            Field::Length => {
 
                let progress_subject = self.apply_forced_constraint(ctx, subject_id, &ARRAYLIKE_TEMPLATE)?;
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 

	
 
                (progress_subject, progress_expr)
 
            },
 
            Field::Symbolic(field) => {
 
                // Retrieve the struct definition id and field index if possible 
 
                // and not previously determined
 
                if field.definition.is_none() {
 
                    // Not yet known, check if we can determine it
 
                    let subject_type = self.expr_types.get(&subject_id).unwrap();
 
                    let type_def = determine_inference_type_instance(&ctx.types, subject_type);
 

	
 
                    match type_def {
 
                        Ok(Some(type_def)) => {
 
                            // Subject type is known, check if it is a 
 
                            // struct and the field exists on the struct
 
                            let struct_def = if let DefinedTypeVariant::Struct(struct_def) = &type_def.definition {
 
                                struct_def
 
                            } else {
 
                                return Err(ParseError::new_error(
 
                                    &ctx.module.source, field.identifier.position,
 
                                    &format!(
 
                                        "Can only apply field access to structs, got a subject of type '{}'",
 
                                        subject_type.display_name(&ctx.heap)
 
                                    )
 
                                ));
 
                            };
 

	
 
                            for (field_def_idx, field_def) in struct_def.fields.iter().enumerate() {
 
                                if field_def.identifier == field.identifier {
 
                                    // Set field definition and index
 
                                    field.definition = Some(type_def.ast_definition);
 
                                    field.field_idx = field_def_idx;
 
                                    break;
 
                                }
 
                            }
 

	
 
                            if field.definition.is_none() {
 
                                let field_position = field.identifier.position;
 
                                let ast_struct_def = ctx.heap[type_def.ast_definition].as_struct();
 
                                return Err(ParseError::new_error(
 
                                    &ctx.module.source, field_position,
 
                                    &format!(
 
                                        "This field does not exist on the struct '{}'",
 
                                        &String::from_utf8_lossy(&ast_struct_def.identifier.value)
 
                                    )
 
                                ))
 
                            }
 

	
 
                            // Encountered definition and field index for the
 
                            // first time
 
                            self.insert_initial_select_polymorph_data(ctx, id);
 
                        },
 
                        Ok(None) => {
 
                            // Type of subject is not yet known, so we 
 
                            // cannot make any progress yet
 
                            return Ok(())
 
                        },
 
                        Err(()) => {
 
                            return Err(ParseError::new_error(
 
                                &ctx.module.source, field.identifier.position,
 
                                &format!(
 
                                    "Can only apply field access to structs, got a subject of type '{}'",
 
                                    subject_type.display_name(&ctx.heap)
 
                                )
 
                            ));
 
                        }
 
                    }
 
                }
 

	
 
                // If here then field definition and index are known, and the
 
                // initial type (based on the struct's definition) has been
 
                // applied.
 
                // Check to see if we can infer anything about the subject's and
 
                // the field's polymorphic variables
 
                let poly_data = self.extra_data.get_mut(&upcast_id).unwrap();
 
                let mut poly_progress = HashSet::new();
 
                
 
                // Apply to struct's type
 
                let signature_type: *mut _ = &mut poly_data.embedded[0];
 
                let subject_type: *mut _ = self.expr_types.get_mut(&subject_id).unwrap();
 

	
 
                let (_, progress_subject) = Self::apply_equal2_signature_constraint(
 
                    ctx, upcast_id, Some(subject_id), poly_data, &mut poly_progress,
 
                    signature_type, 0, subject_type, 0
 
                )?;
 

	
 
                if progress_subject {
 
                    self.expr_queued.insert(subject_id);
 
                }
 
                
 
                // Apply to field's type
 
                let signature_type: *mut _ = &mut poly_data.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let (_, progress_expr) = Self::apply_equal2_signature_constraint(
 
                    ctx, upcast_id, None, poly_data, &mut poly_progress, 
 
                    signature_type, 0, expr_type, 0
 
                )?;
 

	
 
                if progress_expr {
 
                    if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                        self.expr_queued.insert(parent_id);
 
                    }
 
                }
 

	
 
                // Reapply progress in polymorphic variables to struct's type
 
                let signature_type: *mut _ = &mut poly_data.embedded[0];
 
                let subject_type: *mut _ = self.expr_types.get_mut(&subject_id).unwrap();
 
                
 
                let progress_subject = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    poly_data, &poly_progress, signature_type, subject_type
 
                );
 

	
 
                let signature_type: *mut _ = &mut poly_data.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    poly_data, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                (progress_subject, progress_expr)
 
            }
 
        };
 

	
 
        if progress_subject { self.queue_expr(subject_id); }
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Subject type [{}]: {}", progress_subject, self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let expr_elements = expr.elements.clone(); // TODO: @performance
 

	
 
        debug_log!("Array expr ({} elements): {}", expr_elements.len(), upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // All elements should have an equal type
 
        let progress = self.apply_equal_n_constraint(ctx, upcast_id, &expr_elements)?;
 
        for (progress_arg, arg_id) in progress.iter().zip(expr_elements.iter()) {
 
            if *progress_arg {
 
                self.queue_expr(*arg_id);
 
            }
 
        }
 

	
 
        // And the output should be an array of the element types
 
        let mut expr_progress = self.apply_forced_constraint(ctx, upcast_id, &ARRAY_TEMPLATE)?;
 
        if !expr_elements.is_empty() {
 
            let first_arg_id = expr_elements[0];
 
            let (inner_expr_progress, arg_progress) = self.apply_equal2_constraint(
 
                ctx, upcast_id, upcast_id, 1, first_arg_id, 0
 
            )?;
 

	
 
            expr_progress = expr_progress || inner_expr_progress;
 

	
 
            // Note that if the array type progressed the type of the arguments,
 
            // then we should enqueue this progression function again
 
            // TODO: @fix Make apply_equal_n accept a start idx as well
 
            if arg_progress { self.queue_expr(upcast_id); }
 
        }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type [{}]: {}", expr_progress, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if expr_progress { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 

	
 
        debug_log!("Literal expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let progress_expr = match &expr.value {
 
            Literal::Null => {
 
                self.apply_forced_constraint(ctx, upcast_id, &MESSAGE_TEMPLATE)?
 
            },
 
            Literal::Integer(_) => {
 
                self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?
 
            },
 
            Literal::True | Literal::False => {
 
                self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?
 
            },
 
            Literal::Character(_) => todo!("character literals"),
 
            Literal::Struct(data) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
                debug_assert_eq!(extra.embedded.len(), data.fields.len());
 

	
 
                debug_log!(" * During (inferring types from fields and struct type):");
 

	
 
                // Mutually infer field signature/expression types
 
                for (field_idx, field) in data.fields.iter().enumerate() {
 
                    let field_expr_id = field.value;
 
                    let signature_type: *mut _ = &mut extra.embedded[field_idx];
 
                    let field_type: *mut _ = self.expr_types.get_mut(&field_expr_id).unwrap();
 
                    let (_, progress_arg) = Self::apply_equal2_signature_constraint(
 
                        ctx, upcast_id, Some(field_expr_id), extra, &mut poly_progress,
 
                        signature_type, 0, field_type, 0
 
                    )?;
 

	
 
                    debug_log!(
 
                        "   - Field {} type | sig: {}, field: {}", field_idx,
 
                        unsafe{&*signature_type}.display_name(&ctx.heap),
 
                        unsafe{&*field_type}.display_name(&ctx.heap)
 
                    );
 

	
 
                    if progress_arg {
 
                        self.expr_queued.insert(field_expr_id);
 
                    }
 
                }
 

	
 
                debug_log!("   - Field poly progress | {:?}", poly_progress);
 

	
 
                // Same for the type of the struct itself
 
                let signature_type: *mut _ = &mut extra.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 
                let (_, progress_expr) = Self::apply_equal2_signature_constraint(
 
                    ctx, upcast_id, None, extra, &mut poly_progress,
 
                    signature_type, 0, expr_type, 0
 
                )?;
 

	
 
                debug_log!(
 
                    "   - Ret type | sig: {}, expr: {}",
 
                    unsafe{&*signature_type}.display_name(&ctx.heap),
 
                    unsafe{&*expr_type}.display_name(&ctx.heap)
 
                );
 
                debug_log!("   - Ret poly progress | {:?}", poly_progress);
 

	
 
                if progress_expr {
 
                    // TODO: @cleanup, cannot call utility self.queue_parent thingo
 
                    if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                        self.expr_queued.insert(parent_id);
 
                    }
 
                }
 

	
 
                // Check which expressions use the polymorphic arguments. If the
 
                // polymorphic variables have been progressed then we try to 
 
                // progress them inside the expression as well.
 
                debug_log!(" * During (reinferring from progressed polyvars):");
 

	
 
                // For all field expressions
 
                for field_idx in 0..extra.embedded.len() {
 
                    debug_assert_eq!(field_idx, data.fields[field_idx].field_idx, "confusing, innit?");
 
                    let signature_type: *mut _ = &mut extra.embedded[field_idx];
 
                    let field_expr_id = data.fields[field_idx].value;
 
                    let field_type: *mut _ = self.expr_types.get_mut(&field_expr_id).unwrap();
 

	
 
                    let progress_arg = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                        extra, &poly_progress, signature_type, field_type
 
                    );
 

	
 
                    debug_log!(
 
                        "   - Field {} type | sig: {}, field: {}", field_idx,
 
                        unsafe{&*signature_type}.display_name(&ctx.heap),
 
                        unsafe{&*field_type}.display_name(&ctx.heap)
 
                    );
 
                    if progress_arg {
 
                        self.expr_queued.insert(field_expr_id);
 
                    }
 
                }
 
                
 
                // For the return type
 
                let signature_type: *mut _ = &mut extra.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    extra, &poly_progress, signature_type, expr_type
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(
 
                    &ctx.heap, extra, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                progress_expr
 
            },
 
            Literal::Enum(_) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
                
 
                debug_log!(" * During (inferring types from return type)");
 

	
 
                let signature_type: *mut _ = &mut extra.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 
                let (_, progress_expr) = Self::apply_equal2_signature_constraint(
 
                    ctx, upcast_id, None, extra, &mut poly_progress,
 
                    signature_type, 0, expr_type, 0
 
                )?;
 

	
 
                debug_log!(
 
                    "   - Ret type | sig: {}, expr: {}",
 
                    unsafe{&*signature_type}.display_name(&ctx.heap),
 
                    unsafe{&*expr_type}.display_name(&ctx.heap)
 
                );
 

	
 
                if progress_expr {
 
                    // TODO: @cleanup
 
                    if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                        self.expr_queued.insert(parent_id);
 
                    }
 
                }
 

	
 
                debug_log!(" * During (reinferring from progress polyvars):");
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(
 
                    &ctx.heap, extra, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                progress_expr
 
            }
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // TODO: FIX!!!!
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    // TODO: @cleanup, see how this can be cleaned up once I implement
 
    //  polymorphic struct/enum/union literals. These likely follow the same
 
    //  pattern as here.
 
    fn progress_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 

	
 
        debug_log!("Call expr '{}': {}", match &expr.method {
 
            Method::Create => String::from("create"),
 
            Method::Fires => String::from("fires"),
 
            Method::Get => String::from("get"),
 
            Method::Put => String::from("put"),
 
            Method::Symbolic(method) => String::from_utf8_lossy(&method.identifier.value).to_string()
 
        },upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 
        debug_log!(" * During (inferring types from arguments and return type):");
 

	
 
        // Check if we can make progress using the arguments and/or return types
 
        // while keeping track of the polyvars we've extended
 
        let mut poly_progress = HashSet::new();
 
        debug_assert_eq!(extra.embedded.len(), expr.arguments.len());
 

	
 
        for (arg_idx, arg_id) in expr.arguments.clone().into_iter().enumerate() {
 
            let signature_type: *mut _ = &mut extra.embedded[arg_idx];
 
            let argument_type: *mut _ = self.expr_types.get_mut(&arg_id).unwrap();
 
            let (_, progress_arg) = Self::apply_equal2_signature_constraint(
 
                ctx, upcast_id, Some(arg_id), extra, &mut poly_progress,
 
                signature_type, 0, argument_type, 0
 
            )?;
 

	
 
            debug_log!(
 
                "   - Arg {} type | sig: {}, arg: {}", arg_idx,
 
                unsafe{&*signature_type}.display_name(&ctx.heap), 
 
                unsafe{&*argument_type}.display_name(&ctx.heap));
 

	
 
            if progress_arg {
 
                // Progressed argument expression
 
                self.expr_queued.insert(arg_id);
 
            }
 
        }
 

	
 
        // Do the same for the return type
 
        let signature_type: *mut _ = &mut extra.returned;
 
        let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 
        let (_, progress_expr) = Self::apply_equal2_signature_constraint(
 
            ctx, upcast_id, None, extra, &mut poly_progress,
 
            signature_type, 0, expr_type, 0
 
        )?;
 

	
 
        debug_log!(
 
            "   - Ret type | sig: {}, expr: {}", 
 
            unsafe{&*signature_type}.display_name(&ctx.heap), 
 
            unsafe{&*expr_type}.display_name(&ctx.heap)
 
        );
 

	
 
        if progress_expr {
 
            // TODO: @cleanup, cannot call utility self.queue_parent thingo
 
            if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                self.expr_queued.insert(parent_id);
 
            }
 
        }
 

	
 
        // If we did not have an error in the polymorph inference above, then
 
        // reapplying the polymorph type to each argument type and the return
 
        // type should always succeed.
 
        debug_log!(" * During (reinferring from progressed polyvars):");
 
        for (poly_idx, poly_var) in extra.poly_vars.iter().enumerate() {
 
            debug_log!("   - Poly {} | sig: {}", poly_idx, poly_var.display_name(&ctx.heap));
 
        }
 
        // TODO: @performance If the algorithm is changed to be more "on demand
 
        //  argument re-evaluation", instead of "all-argument re-evaluation",
 
        //  then this is no longer true
 
        for arg_idx in 0..extra.embedded.len() {
 
            let signature_type: *mut _ = &mut extra.embedded[arg_idx];
 
            let arg_expr_id = expr.arguments[arg_idx];
 
            let arg_type: *mut _ = self.expr_types.get_mut(&arg_expr_id).unwrap();
 
            
 
            let progress_arg = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                extra, &poly_progress,
 
                signature_type, arg_type
 
            );
 
            
 
            debug_log!(
 
                "   - Arg {} type | sig: {}, arg: {}", arg_idx, 
 
                unsafe{&*signature_type}.display_name(&ctx.heap), 
 
                unsafe{&*arg_type}.display_name(&ctx.heap)
 
            );
 
            if progress_arg {
 
                self.expr_queued.insert(arg_expr_id);
 
            }
 
        }
 

	
 
        // Once more for the return type
 
        let signature_type: *mut _ = &mut extra.returned;
 
        let ret_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
        let progress_ret = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
            extra, &poly_progress, signature_type, ret_type
 
        );
 
        debug_log!(
 
            "   - Ret type | sig: {}, arg: {}", 
 
            unsafe{&*signature_type}.display_name(&ctx.heap), 
 
            unsafe{&*ret_type}.display_name(&ctx.heap)
 
        );
 
        if progress_ret {
 
            self.queue_expr_parent(ctx, upcast_id);
 
        }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let var_expr = &ctx.heap[id];
 
        let var_id = var_expr.declaration.unwrap();
 

	
 
        debug_log!("Variable expr '{}': {}", &String::from_utf8_lossy(&ctx.heap[var_id].identifier().value), upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Var  type: {}", self.var_types.get(&var_id).unwrap().var_type.display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // Retrieve shared variable type and expression type and apply inference
 
        let var_data = self.var_types.get_mut(&var_id).unwrap();
 
        let expr_type = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            &mut var_data.var_type as *mut _, 0, expr_type, 0
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            let var_decl = &ctx.heap[var_id];
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, var_decl.position(),
 
                &format!(
 
                    "Conflicting types for this variable, previously assigned the type '{}'",
 
                    var_data.var_type.display_name(&ctx.heap)
 
                )
 
            ).with_postfixed_info(
 
                &ctx.module.source, var_expr.position,
 
                &format!(
 
                    "But inferred to have incompatible type '{}' here",
 
                    expr_type.display_name(&ctx.heap)
 
                )
 
            ))
 
        }
 

	
 
        let progress_var = infer_res.modified_lhs();
 
        let progress_expr = infer_res.modified_rhs();
 

	
 
        if progress_var {
 
            // Let other variable expressions using this type progress as well
 
            for other_expr in var_data.used_at.iter() {
 
                if *other_expr != upcast_id {
 
                    self.expr_queued.insert(*other_expr);
 
                }
 
            }
 

	
 
            // Let a linked port know that our type has updated
 
            if let Some(linked_id) = var_data.linked_var {
 
                // Only perform one-way inference to prevent updating our type, this
 
                // would lead to an inconsistency
 
                let var_type: *mut _ = &mut var_data.var_type;
 
                let link_data = self.var_types.get_mut(&linked_id).unwrap();
 

	
 
                debug_assert!(
 
                    unsafe{&*var_type}.parts[0] == InferenceTypePart::Input ||
 
                    unsafe{&*var_type}.parts[0] == InferenceTypePart::Output
 
                );
 
                debug_assert!(
 
                    link_data.var_type.parts[0] == InferenceTypePart::Input ||
 
                    link_data.var_type.parts[0] == InferenceTypePart::Output
 
                );
 
                match InferenceType::infer_subtree_for_single_type(&mut link_data.var_type, 1, &unsafe{&*var_type}.parts, 1) {
 
                    SingleInferenceResult::Modified => {
 
                        for other_expr in &link_data.used_at {
 
                            self.expr_queued.insert(*other_expr);
 
                        }
 
                    },
 
                    SingleInferenceResult::Unmodified => {},
 
                    SingleInferenceResult::Incompatible => {
 
                        let var_data = self.var_types.get(&var_id).unwrap();
 
                        let link_data = self.var_types.get(&linked_id).unwrap();
 
                        let var_decl = &ctx.heap[var_id];
 
                        let link_decl = &ctx.heap[linked_id];
 

	
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, var_decl.position(),
 
                            &format!(
 
                                "Conflicting types for this variable, assigned the type '{}'",
 
                                var_data.var_type.display_name(&ctx.heap)
 
                            )
 
                        ).with_postfixed_info(
 
                            &ctx.module.source, link_decl.position(),
 
                            &format!(
 
                                "Because it is incompatible with this variable, assigned the type '{}'",
 
                                link_data.var_type.display_name(&ctx.heap)
 
                            )
 
                        ));
 
                    }
 
                }
 
            }
 
        }
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Var  type [{}]: {}", progress_var, self.var_types.get(&var_id).unwrap().var_type.display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 

	
 
        Ok(())
 
    }
 

	
 
    fn queue_expr_parent(&mut self, ctx: &Ctx, expr_id: ExpressionId) {
 
        if let ExpressionParent::Expression(parent_expr_id, _) = &ctx.heap[expr_id].parent() {
 
            self.expr_queued.insert(*parent_expr_id);
 
        }
 
    }
 

	
 
    fn queue_expr(&mut self, expr_id: ExpressionId) {
 
        self.expr_queued.insert(expr_id);
 
    }
 

	
 
    /// Applies a forced type constraint: the type associated with the supplied
 
    /// expression will be molded into the provided "template". The template may
 
    /// be fully specified (e.g. a bool) or contain "inference" variables (e.g.
 
    /// an array of T)
 
    fn apply_forced_constraint(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
    ) -> Result<bool, ParseError> {
 
        debug_assert_expr_ids_unique_and_known!(self, expr_id);
 
        let expr_type = self.expr_types.get_mut(&expr_id).unwrap();
 
        match InferenceType::infer_subtree_for_single_type(expr_type, 0, template, 0) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(
 
                self.construct_template_type_error(ctx, expr_id, template)
 
            )
 
        }
 
    }
 

	
 
    fn apply_forced_constraint_types(
 
        to_infer: *mut InferenceType, to_infer_start_idx: usize,
 
        template: &[InferenceTypePart], template_start_idx: usize
 
    ) -> Result<bool, ()> {
 
        match InferenceType::infer_subtree_for_single_type(
 
            unsafe{ &mut *to_infer }, to_infer_start_idx,
 
            template, template_start_idx
 
        ) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(()),
 
        }
 
    }
 

	
 
    /// Applies a type constraint that expects the two provided types to be
 
    /// equal. We attempt to make progress in inferring the types. If the call
 
    /// is successful then the composition of all types are made equal.
 
    /// The "parent" `expr_id` is provided to construct errors.
 
    fn apply_equal2_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg1_start_idx: usize,
 
        arg2_id: ExpressionId, arg2_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError> {
 
        debug_assert_expr_ids_unique_and_known!(self, arg1_id, arg2_id);
 
        let arg1_type: *mut _ = self.expr_types.get_mut(&arg1_id).unwrap();
 
        let arg2_type: *mut _ = self.expr_types.get_mut(&arg2_id).unwrap();
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            arg1_type, arg1_start_idx,
 
            arg2_type, arg2_start_idx
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
        }
 

	
 
        Ok((infer_res.modified_lhs(), infer_res.modified_rhs()))
 
    }
 

	
 
    /// Applies an equal2 constraint between a signature type (e.g. a function
 
    /// argument or struct field) and an expression whose type should match that
 
    /// expression. If we make progress on the signature, then we try to see if
 
    /// any of the embedded polymorphic types can be progressed.
 
    ///
 
    /// `outer_expr_id` is the main expression we're progressing (e.g. a 
 
    /// function call), while `expr_id` is the embedded expression we're 
 
    /// matching against the signature. `expression_type` and 
 
    /// `expression_start_idx` belong to `expr_id`.
 
    fn apply_equal2_signature_constraint(
 
        ctx: &Ctx, outer_expr_id: ExpressionId, expr_id: Option<ExpressionId>,
 
        polymorph_data: &mut ExtraData, polymorph_progress: &mut HashSet<usize>,
 
        signature_type: *mut InferenceType, signature_start_idx: usize,
 
        expression_type: *mut InferenceType, expression_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError> {
 
        // Safety: cannot mutually infer the same types
 
        //         polymorph_data containers may not be modified
 
        debug_assert_ptrs_distinct!(signature_type, expression_type);
 

	
 
        // Infer the signature and expression type
 
        let infer_res = unsafe { 
 
            InferenceType::infer_subtrees_for_both_types(
 
                signature_type, signature_start_idx,
 
                expression_type, expression_start_idx
 
            ) 
 
        };
 

	
 
        if infer_res == DualInferenceResult::Incompatible {
 
            // TODO: Check if I still need to use this
 
            let outer_position = ctx.heap[outer_expr_id].position();
 
            let (position_name, position) = match expr_id {
 
                Some(expr_id) => ("argument's", ctx.heap[expr_id].position()),
 
                None => ("type's", outer_position)
 
            };
 
            let (signature_display_type, expression_display_type) = unsafe { (
 
                (&*signature_type).display_name(&ctx.heap),
 
                (&*expression_type).display_name(&ctx.heap)
 
            ) };
 

	
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, outer_position,
 
                "Failed to fully resolve the types of this expression"
 
            ).with_postfixed_info(
 
                &ctx.module.source, position,
 
                &format!(
 
                    "Because the {} signature has been resolved to '{}', but the expression has been resolved to '{}'",
 
                    position_name, signature_display_type, expression_display_type
 
                )
 
            ));
 
        }
 

	
 
        // Try to see if we can progress any of the polymorphic variables
 
        let progress_sig = infer_res.modified_lhs();
 
        let progress_expr = infer_res.modified_rhs();
 

	
 
        if progress_sig {
 
            let signature_type = unsafe{&mut *signature_type};
 
            debug_assert!(
 
                signature_type.has_body_marker, 
 
                "made progress on signature type, but it doesn't have a marker"
 
            );
 
            for (poly_idx, poly_section) in signature_type.body_marker_iter() {
 
                let polymorph_type = &mut polymorph_data.poly_vars[poly_idx];
 
                match Self::apply_forced_constraint_types(
 
                    polymorph_type, 0, poly_section, 0
 
                ) {
 
                    Ok(true) => { polymorph_progress.insert(poly_idx); },
 
                    Ok(false) => {},
 
                    Err(()) => { return Err(Self::construct_poly_arg_error(ctx, polymorph_data, outer_expr_id))}
 
                }
 
            }
 
        }
 
        Ok((progress_sig, progress_expr))
 
    }
 

	
 
    /// Applies equal2 constraints on the signature type for each of the 
 
    /// polymorphic variables. If the signature type is progressed then we 
 
    /// progress the expression type as well.
 
    ///
 
    /// This function assumes that the polymorphic variables have already been
 
    /// progressed as far as possible by calling 
 
    /// `apply_equal2_signature_constraint`. As such, we expect to not encounter
 
    /// any errors.
 
    ///
 
    /// This function returns true if the expression's type has been progressed
 
    fn apply_equal2_polyvar_constraint(
 
        heap: &Heap,
 
        polymorph_data: &ExtraData, _polymorph_progress: &HashSet<usize>,
 
        signature_type: *mut InferenceType, expr_type: *mut InferenceType
 
    ) -> bool {
 
        // Safety: all pointers should be distinct
 
        //         polymorph_data contains may not be modified
 
        debug_assert_ptrs_distinct!(signature_type, expr_type);
 
        let signature_type = unsafe{&mut *signature_type};
 
        let expr_type = unsafe{&mut *expr_type};
 

	
 
        // Iterate through markers in signature type to try and make progress
 
        // on the polymorphic variable        
 
        let mut seek_idx = 0;
 
        let mut modified_sig = false;
 
        
 
        while let Some((poly_idx, start_idx)) = signature_type.find_body_marker(seek_idx) {
 
            let end_idx = InferenceType::find_subtree_end_idx(&signature_type.parts, start_idx);
 
            // if polymorph_progress.contains(&poly_idx) {
 
                // Need to match subtrees
 
                let polymorph_type = &polymorph_data.poly_vars[poly_idx];
 
                debug_log!("   - DEBUG: Applying {} to '{}' from '{}'", polymorph_type.display_name(heap), InferenceType::partial_display_name(heap, &signature_type.parts[start_idx..]), signature_type.display_name(heap));
 
                let modified_at_marker = Self::apply_forced_constraint_types(
 
                    signature_type, start_idx, 
 
                    &polymorph_type.parts, 0
 
                ).expect("no failure when applying polyvar constraints");
 

	
 
                modified_sig = modified_sig || modified_at_marker;
 
            // }
 

	
 
            seek_idx = end_idx;
 
        }
 

	
 
        // If we made any progress on the signature's type, then we also need to
 
        // apply it to the expression that is supposed to match the signature.
 
        if modified_sig {
 
            match InferenceType::infer_subtree_for_single_type(
 
                expr_type, 0, &signature_type.parts, 0
 
            ) {
 
                SingleInferenceResult::Modified => true,
 
                SingleInferenceResult::Unmodified => false,
 
                SingleInferenceResult::Incompatible =>
 
                    unreachable!("encountered failure while reapplying modified signature to expression after polyvar inference")
 
            }
 
        } else {
 
            false
 
        }
 
    }
 

	
 
    /// Applies a type constraint that expects all three provided types to be
 
    /// equal. In case we can make progress in inferring the types then we
 
    /// attempt to do so. If the call is successful then the composition of all
 
    /// types is made equal.
 
    fn apply_equal3_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId,
 
        start_idx: usize
 
    ) -> Result<(bool, bool, bool), ParseError> {
 
        // Safety: all expression IDs are always distinct, and we do not modify
 
        //  the container
 
        debug_assert_expr_ids_unique_and_known!(self, expr_id, arg1_id, arg2_id);
 
        let expr_type: *mut _ = self.expr_types.get_mut(&expr_id).unwrap();
 
        let arg1_type: *mut _ = self.expr_types.get_mut(&arg1_id).unwrap();
 
        let arg2_type: *mut _ = self.expr_types.get_mut(&arg2_id).unwrap();
 

	
 
        let expr_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(expr_type, start_idx, arg1_type, start_idx)
 
        };
 
        if expr_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_expr_type_error(ctx, expr_id, arg1_id));
 
        }
 

	
 
        let args_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(arg1_type, start_idx, arg2_type, start_idx) };
 
        if args_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
        }
 

	
 
        // If all types are compatible, but the second call caused the arg1_type
 
        // to be expanded, then we must also assign this to expr_type.
 
        let mut progress_expr = expr_res.modified_lhs();
 
        let mut progress_arg1 = expr_res.modified_rhs();
 
        let progress_arg2 = args_res.modified_rhs();
 

	
 
        if args_res.modified_lhs() { 
 
            unsafe {
 
                let end_idx = InferenceType::find_subtree_end_idx(&(*arg2_type).parts, start_idx);
 
                let subtree = &((*arg2_type).parts[start_idx..end_idx]);
 
                (*expr_type).replace_subtree(start_idx, subtree);
 
            }
 
            progress_expr = true;
 
            progress_arg1 = true;
 
        }
 

	
 
        Ok((progress_expr, progress_arg1, progress_arg2))
 
    }
 

	
 
    // TODO: @optimize Since we only deal with a single type this might be done
 
    //  a lot more efficiently, methinks (disregarding the allocations here)
 
    fn apply_equal_n_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId, args: &[ExpressionId],
 
    ) -> Result<Vec<bool>, ParseError> {
 
        // Early exit
 
        match args.len() {
 
            0 => return Ok(vec!()),         // nothing to progress
 
            1 => return Ok(vec![false]),    // only one type, so nothing to infer
 
            _ => {}
 
        }
 

	
 
        let mut progress = Vec::new();
 
        progress.resize(args.len(), false);
 

	
 
        // Do pairwise inference, keep track of the last entry we made progress
 
        // on. Once done we need to update everything to the most-inferred type.
 
        let mut arg_iter = args.iter();
 
        let mut last_arg_id = *arg_iter.next().unwrap();
 
        let mut last_lhs_progressed = 0;
 
        let mut lhs_arg_idx = 0;
 

	
 
        while let Some(next_arg_id) = arg_iter.next() {
 
            let arg1_type: *mut _ = self.expr_types.get_mut(&last_arg_id).unwrap();
 
            let arg2_type: *mut _ = self.expr_types.get_mut(next_arg_id).unwrap();
 

	
 
            let res = unsafe {
 
                InferenceType::infer_subtrees_for_both_types(arg1_type, 0, arg2_type, 0)
 
            };
 

	
 
            if res == DualInferenceResult::Incompatible {
 
                return Err(self.construct_arg_type_error(ctx, expr_id, last_arg_id, *next_arg_id));
 
            }
 

	
 
            if res.modified_lhs() {
 
                // We re-inferred something on the left hand side, so everything
 
                // up until now should be re-inferred.
 
                progress[lhs_arg_idx] = true;
 
                last_lhs_progressed = lhs_arg_idx;
 
            }
 
            progress[lhs_arg_idx + 1] = res.modified_rhs();
 

	
 
            last_arg_id = *next_arg_id;
 
            lhs_arg_idx += 1;
 
        }
 

	
 
        // Re-infer everything. Note that we do not need to re-infer the type
 
        // exactly at `last_lhs_progressed`, but only everything up to it.
 
        let last_type: *mut _ = self.expr_types.get_mut(args.last().unwrap()).unwrap();
 
        for arg_idx in 0..last_lhs_progressed {
 
            let arg_type: *mut _ = self.expr_types.get_mut(&args[arg_idx]).unwrap();
 
            unsafe{
 
                (*arg_type).replace_subtree(0, &(*last_type).parts);
 
            }
 
            progress[arg_idx] = true;
 
        }
 

	
 
        Ok(progress)
 
    }
 

	
 
    /// Determines the `InferenceType` for the expression based on the
 
    /// expression parent. Note that if the parent is another expression, we do
 
    /// not take special action, instead we let parent expressions fix the type
 
    /// of subexpressions before they have a chance to call this function.
 
    fn insert_initial_expr_inference_type(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId
 
    ) -> Result<(), ParseError> {
 
        use ExpressionParent as EP;
 
        use InferenceTypePart as ITP;
 

	
 
        let expr = &ctx.heap[expr_id];
 
        let inference_type = match expr.parent() {
 
            EP::None =>
 
                // Should have been set by linker
 
                unreachable!(),
 
            EP::ExpressionStmt(_) | EP::Expression(_, _) =>
 
                // Determined during type inference
 
                InferenceType::new(false, false, vec![ITP::Unknown]),
 
            EP::If(_) | EP::While(_) | EP::Assert(_) =>
 
                // Must be a boolean
 
                InferenceType::new(false, true, vec![ITP::Bool]),
 
            EP::Return(_) =>
 
                // Must match the return type of the function
 
                if let DefinitionType::Function(func_id) = self.definition_type {
 
                    let return_parser_type_id = ctx.heap[func_id].return_type;
 
                    self.determine_inference_type_from_parser_type(ctx, return_parser_type_id, true)
 
                } else {
 
                    // Cannot happen: definition always set upon body traversal
 
                    // and "return" calls in components are illegal.
 
                    unreachable!();
 
                },
 
            EP::New(_) =>
 
                // Must be a component call, which we assign a "Void" return
 
                // type
 
                InferenceType::new(false, true, vec![ITP::Void]),
 
        };
 

	
 
        match self.expr_types.entry(expr_id) {
 
            Entry::Vacant(vacant) => {
 
                vacant.insert(inference_type);
 
            },
 
            Entry::Occupied(mut preexisting) => {
 
                // We already have an entry, this happens if our parent fixed
 
                // our type (e.g. we're used in a conditional expression's test)
 
                // but we have a different type.
 
                // TODO: Is this ever called? Seems like it can't
 
                debug_assert!(false, "I am actually called, my ID is {}", expr_id.index);
 
                let old_type = preexisting.get_mut();
 
                if let SingleInferenceResult::Incompatible = InferenceType::infer_subtree_for_single_type(
 
                    old_type, 0, &inference_type.parts, 0
 
                ) {
 
                    return Err(self.construct_expr_type_error(ctx, expr_id, expr_id))
 
                }
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn insert_initial_call_polymorph_data(
 
        &mut self, ctx: &mut Ctx, call_id: CallExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 

	
 
        // Note: the polymorph variables may be partially specified and may
 
        // contain references to the wrapping definition's (i.e. the proctype
 
        // we are currently visiting) polymorphic arguments.
 
        //
 
        // The arguments of the call may refer to polymorphic variables in the
 
        // definition of the function we're calling, not of the wrapping
 
        // definition. We insert markers in these inferred types to be able to
 
        // map them back and forth to the polymorphic arguments of the function
 
        // we are calling.
 
        let call = &ctx.heap[call_id];
 

	
 
        // Handle the polymorphic variables themselves
 
        let mut poly_vars = Vec::with_capacity(call.poly_args.len());
 
        for poly_arg_type_id in call.poly_args.clone() { // TODO: @performance
 
            poly_vars.push(self.determine_inference_type_from_parser_type(ctx, poly_arg_type_id, true));
 
        }
 

	
 
        // Handle the arguments
 
        // TODO: @cleanup: Maybe factor this out for reuse in the validator/linker, should also
 
        //  make the code slightly more robust.
 
        let (embedded_types, return_type) = match &call.method {
 
            Method::Create => {
 
                // Not polymorphic
 
                (
 
                    vec![InferenceType::new(false, true, vec![ITP::Int])],
 
                    InferenceType::new(false, true, vec![ITP::Message, ITP::Byte])
 
                )
 
            },
 
            Method::Fires => {
 
                // bool fires<T>(PortLike<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::PortLike, ITP::MarkerBody(0), ITP::Unknown])],
 
                    InferenceType::new(false, true, vec![ITP::Bool])
 
                )
 
            },
 
            Method::Get => {
 
                // T get<T>(input<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::Input, ITP::MarkerBody(0), ITP::Unknown])],
 
                    InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown])
 
                )
 
            },
 
            Method::Put => {
 
                // void Put<T>(output<T> port, T msg)
 
                (
 
                    vec![
 
                        InferenceType::new(true, false, vec![ITP::Output, ITP::MarkerBody(0), ITP::Unknown]),
 
                        InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown])
 
                    ],
 
                    InferenceType::new(false, true, vec![ITP::Void])
 
                )
 
            }
 
            Method::Symbolic(symbolic) => {
 
                let definition = &ctx.heap[symbolic.definition.unwrap()];
 

	
 
                match definition {
 
                    Definition::Component(definition) => {
 
                        debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
                        for param_id in definition.parameters.clone() {
 
                            let param = &ctx.heap[param_id];
 
                            let param_parser_type_id = param.parser_type;
 
                            parameter_types.push(self.determine_inference_type_from_parser_type(ctx, param_parser_type_id, false));
 
                        }
 

	
 
                        (parameter_types, InferenceType::new(false, true, vec![InferenceTypePart::Void]))
 
                    },
 
                    Definition::Function(definition) => {
 
                        debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
                        for param_id in definition.parameters.clone() {
 
                            let param = &ctx.heap[param_id];
 
                            let param_parser_type_id = param.parser_type;
 
                            parameter_types.push(self.determine_inference_type_from_parser_type(ctx, param_parser_type_id, false));
 
                        }
 

	
 
                        let return_type = self.determine_inference_type_from_parser_type(ctx, definition.return_type, false);
 
                        (parameter_types, return_type)
 
                    },
 
                    Definition::Struct(_) | Definition::Enum(_) => {
 
                        unreachable!("insert initial polymorph data for struct/enum");
 
                    }
 
                }
 
            }
 
        };
 

	
 
        self.extra_data.insert(call_id.upcast(), ExtraData {
 
            poly_vars,
 
            embedded: embedded_types,
 
            returned: return_type
 
        });
 
    }
 

	
 
    fn insert_initial_struct_polymorph_data(
 
        &mut self, ctx: &mut Ctx, lit_id: LiteralExpressionId,
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_struct();
 

	
 
        // Handle polymorphic arguments
 
        let mut poly_vars = Vec::with_capacity(literal.poly_args2.len());
 
        let mut total_num_poly_parts = 0;
 
        for poly_arg_type_id in literal.poly_args2.clone() { // TODO: @performance
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, poly_arg_type_id, true
 
            ); 
 
            total_num_poly_parts += inference_type.parts.len();
 
            poly_vars.push(inference_type);
 
        }
 

	
 
        // Handle parser types on struct definition
 
        let definition = &ctx.heap[literal.definition.unwrap()];
 
        let definition = match definition {
 
            Definition::Struct(definition) => {
 
                debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                definition
 
            },
 
            _ => unreachable!("definition for struct literal does not point to struct definition")
 
        };
 

	
 
        // Note: programmer is capable of specifying fields in a struct literal
 
        // in a different order than on the definition. We take the programmer-
 
        // in a different order than on the definition. We take the literal-
 
        // specified order to be leading.
 
        let mut embedded_types = Vec::with_capacity(definition.fields.len());
 
        for lit_field in literal.fields.iter() {
 
            let def_field = &definition.fields[lit_field.field_idx];
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, def_field.parser_type, false
 
            );
 
            embedded_types.push(inference_type);
 
        }
 

	
 
        // Return type is the struct type itself, with the appropriate 
 
        // polymorphic variables. So:
 
        // - 1 part for definition
 
        // - N_poly_arg marker parts for each polymorphic argument
 
        // - all the parts for the currently known polymorphic arguments 
 
        let parts_reserved = 1 + poly_vars.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(definition.this.upcast(), poly_vars.len()));
 
        let mut return_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
            if !poly_var.is_done { return_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let return_type = InferenceType::new(true, return_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
            poly_vars, 
 
            embedded: embedded_types,
 
            returned: return_type,
 
        });
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct for enum expressions. These
 
    fn insert_initial_enum_polymorph_data(
 
        &mut self, ctx: &Ctx, lit_id: LiteralExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_enum();
 

	
 
        // Handle polymorphic arguments to the enum
 
        let mut poly_vars = Vec::with_capacity(literal.poly_args2.len());
 
        let mut total_num_poly_parts = 0;
 
        for poly_arg_type_id in literal.poly_args2.clone() { // TODO: @performance
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, poly_arg_type_id, true
 
            );
 
            total_num_poly_parts += inference_type.parts.len();
 
            poly_vars.push(inference_type);
 
        }
 

	
 
        // Handle enum type itself
 
        let parts_reserved = 1 + poly_vars.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(literal.definition.unwrap(), poly_vars.len()));
 
        let mut enum_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
            if !poly_var.is_done { enum_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let enum_type = InferenceType::new(true, enum_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
            poly_vars,
 
            embedded: Vec::new(),
 
            returned: enum_type,
 
        });
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct. Assumes that the select
 
    /// expression's referenced (definition_id, field_idx) has been resolved.
 
    fn insert_initial_select_polymorph_data(
 
        &mut self, ctx: &Ctx, select_id: SelectExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 

	
 
        // Retrieve relevant data
 
        let expr = &ctx.heap[select_id];
 
        let field = expr.field.as_symbolic();
 

	
 
        let definition_id = field.definition.unwrap();
 
        let definition = ctx.heap[definition_id].as_struct();
 
        let field_idx = field.field_idx;
 

	
 
        // Generate initial polyvar types and struct type
 
        let num_poly_vars = definition.poly_vars.len();
 
        let mut poly_vars = Vec::with_capacity(num_poly_vars);
 
        let struct_parts_reserved = 1 + 2 * num_poly_vars;
 
        let mut struct_parts = Vec::with_capacity(struct_parts_reserved);
 
        struct_parts.push(ITP::Instance(definition_id, num_poly_vars));        
 

	
 
        for poly_idx in 0..num_poly_vars {
 
            poly_vars.push(InferenceType::new(true, false, vec![
 
                ITP::MarkerBody(poly_idx), ITP::Unknown,
 
            ]));
 
            struct_parts.push(ITP::MarkerBody(poly_idx));
 
            struct_parts.push(ITP::Unknown);
 
        }
 
        debug_assert_eq!(struct_parts.len(), struct_parts_reserved);
 

	
 
        // Generate initial field type
 
        let field_type = self.determine_inference_type_from_parser_type(
 
            ctx, definition.fields[field_idx].parser_type, false
 
        );
 

	
 
        self.extra_data.insert(select_id.upcast(), ExtraData{
 
            poly_vars,
 
            embedded: vec![InferenceType::new(true, false, struct_parts)],
 
            returned: field_type
 
        });
 
    }
 

	
 
    /// Determines the initial InferenceType from the provided ParserType. This
 
    /// may be called with two kinds of intentions:
 
    /// 1. To resolve a ParserType within the body of a function, or on
 
    ///     polymorphic arguments to calls/instantiations within that body. This
 
    ///     means that the polymorphic variables are known and can be replaced
 
    ///     with the monomorph we're instantiating.
 
    /// 2. To resolve a ParserType on a called function's definition or on
 
    ///     an instantiated datatype's members. This means that the polymorphic
 
    ///     arguments inside those ParserTypes refer to the polymorphic
 
    ///     variables in the called/instantiated type's definition.
 
    /// In the second case we place InferenceTypePart::Marker instances such
 
    /// that we can perform type inference on the polymorphic variables.
 
    fn determine_inference_type_from_parser_type(
 
        &mut self, ctx: &Ctx, parser_type_id: ParserTypeId,
 
        parser_type_in_body: bool
 
    ) -> InferenceType {
 
        use ParserTypeVariant as PTV;
 
        use InferenceTypePart as ITP;
 

	
 
        let mut to_consider = VecDeque::with_capacity(16);
 
        to_consider.push_back(parser_type_id);
 

	
 
        let mut infer_type = Vec::new();
 
        let mut has_inferred = false;
 
        let mut has_markers = false;
 

	
 
        while !to_consider.is_empty() {
 
            let parser_type_id = to_consider.pop_front().unwrap();
 
            let parser_type = &ctx.heap[parser_type_id];
 
            match &parser_type.variant {
 
                PTV::Message => {
 
                    // TODO: @types Remove the Message -> Byte hack at some point...
 
                    infer_type.push(ITP::Message);
 
                    infer_type.push(ITP::Byte);
 
                },
 
                PTV::Bool => { infer_type.push(ITP::Bool); },
 
                PTV::Byte => { infer_type.push(ITP::Byte); },
 
                PTV::Short => { infer_type.push(ITP::Short); },
 
                PTV::Int => { infer_type.push(ITP::Int); },
 
                PTV::Long => { infer_type.push(ITP::Long); },
 
                PTV::String => { infer_type.push(ITP::String); },
 
                PTV::IntegerLiteral => { unreachable!("integer literal type on variable type"); },
 
                PTV::Inferred => {
 
                    infer_type.push(ITP::Unknown);
 
                    has_inferred = true;
 
                },
 
                PTV::Array(subtype_id) => {
 
                    infer_type.push(ITP::Array);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Input(subtype_id) => {
 
                    infer_type.push(ITP::Input);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Output(subtype_id) => {
 
                    infer_type.push(ITP::Output);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Symbolic(symbolic) => {
 
                    debug_assert!(symbolic.variant.is_some(), "symbolic variant not yet determined");
 
                    match symbolic.variant.as_ref().unwrap() {
 
                        SymbolicParserTypeVariant::PolyArg(_, arg_idx) => {
 
                            let arg_idx = *arg_idx;
 
                            debug_assert!(symbolic.poly_args2.is_empty()); // TODO: @hkt
 

	
 
                            if parser_type_in_body {
 
                                // Polymorphic argument refers to definition's
 
                                // polymorphic variables
 
                                debug_assert!(arg_idx < self.poly_vars.len());
 
                                debug_assert!(!self.poly_vars[arg_idx].has_marker());
 
                                infer_type.push(ITP::MarkerDefinition(arg_idx));
 
                                for concrete_part in &self.poly_vars[arg_idx].parts {
 
                                    infer_type.push(ITP::from(*concrete_part));
 
                                }
 
                            } else {
 
                                // Polymorphic argument has to be inferred
 
                                has_markers = true;
 
                                has_inferred = true;
 
                                infer_type.push(ITP::MarkerBody(arg_idx));
 
                                infer_type.push(ITP::Unknown);
 
                            }
 
                        },
 
                        SymbolicParserTypeVariant::Definition(definition_id) => {
 
                            // TODO: @cleanup
 
                            if cfg!(debug_assertions) {
 
                                let definition = &ctx.heap[*definition_id];
 
                                debug_assert!(definition.is_struct() || definition.is_enum()); // TODO: @function_ptrs
 
                                let num_poly = match definition {
 
                                    Definition::Struct(v) => v.poly_vars.len(),
 
                                    Definition::Enum(v) => v.poly_vars.len(),
 
                                    _ => unreachable!(),
 
                                };
 
                                debug_assert_eq!(symbolic.poly_args2.len(), num_poly);
 
                            }
 

	
 
                            infer_type.push(ITP::Instance(*definition_id, symbolic.poly_args2.len()));
 
                            let mut poly_arg_idx = symbolic.poly_args2.len();
 
                            while poly_arg_idx > 0 {
 
                                poly_arg_idx -= 1;
 
                                to_consider.push_front(symbolic.poly_args2[poly_arg_idx]);
 
                            }
 
                        }
 
                    }
 
                }
 
            }
 
        }
 

	
 
        InferenceType::new(has_markers, !has_inferred, infer_type)
 
    }
 

	
 
    /// Construct an error when an expression's type does not match. This
 
    /// happens if we infer the expression type from its arguments (e.g. the
 
    /// expression type of an addition operator is the type of the arguments)
 
    /// But the expression type was already set due to our parent (e.g. an
 
    /// "if statement" or a "logical not" always expecting a boolean)
 
    fn construct_expr_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, arg_id: ExpressionId
 
    ) -> ParseError {
 
        // TODO: Expand and provide more meaningful information for humans
 
        let expr = &ctx.heap[expr_id];
 
        let arg_expr = &ctx.heap[arg_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 
        let arg_type = self.expr_types.get(&arg_id).unwrap();
 

	
 
        return ParseError::new_error(
 
            &ctx.module.source, expr.position(),
 
            &format!(
 
                "Incompatible types: this expression expected a '{}'", 
 
                expr_type.display_name(&ctx.heap)
 
            )
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg_expr.position(),
 
            &format!(
 
                "But this expression yields a '{}'",
 
                arg_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_arg_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let arg1 = &ctx.heap[arg1_id];
 
        let arg2 = &ctx.heap[arg2_id];
 

	
 
        let arg1_type = self.expr_types.get(&arg1_id).unwrap();
 
        let arg2_type = self.expr_types.get(&arg2_id).unwrap();
 

	
 
        return ParseError::new_error(
 
            &ctx.module.source, expr.position(),
 
            "Incompatible types: cannot apply this expression"
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg1.position(),
 
            &format!(
 
                "Because this expression has type '{}'",
 
                arg1_type.display_name(&ctx.heap)
 
            )
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg2.position(),
 
            &format!(
 
                "But this expression has type '{}'",
 
                arg2_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_template_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 

	
 
        return ParseError::new_error(
 
            &ctx.module.source, expr.position(),
 
            &format!(
 
                "Incompatible types: got a '{}' but expected a '{}'",
 
                expr_type.display_name(&ctx.heap), 
 
                InferenceType::partial_display_name(&ctx.heap, template)
 
            )
 
        )
 
    }
 

	
 
    /// Constructs a human interpretable error in the case that type inference
 
    /// on a polymorphic variable to a function call or literal construction 
 
    /// failed. This may only be caused by a pair of inference types (which may 
 
    /// come from arguments or the return type) having two different inferred 
 
    /// values for that polymorphic variable.
 
    ///
 
    /// So we find this pair and construct the error using it.
 
    ///
 
    /// We assume that the expression is a function call or a struct literal,
 
    /// and that an actual error has occurred.
 
    fn construct_poly_arg_error(
 
        ctx: &Ctx, poly_data: &ExtraData, expr_id: ExpressionId
 
    ) -> ParseError {
 
        // Helper function to check for polymorph mismatch between two inference
 
        // types.
 
        fn has_poly_mismatch<'a>(type_a: &'a InferenceType, type_b: &'a InferenceType) -> Option<(usize, &'a [InferenceTypePart], &'a [InferenceTypePart])> {
 
            if !type_a.has_body_marker || !type_b.has_body_marker {
 
                return None
 
            }
 

	
 
            for (marker_a, section_a) in type_a.body_marker_iter() {
 
                for (marker_b, section_b) in type_b.body_marker_iter() {
 
                    if marker_a != marker_b {
 
                        // Not the same polymorphic variable
 
                        continue;
 
                    }
 

	
 
                    if !InferenceType::check_subtrees(section_a, 0, section_b, 0) {
 
                        // Not compatible
 
                        return Some((marker_a, section_a, section_b))
 
                    }
 
                }
 
            }
 

	
 
            None
 
        }
 

	
 
        // Helpers function to retrieve polyvar name and definition name
 
        fn get_poly_var_and_func_name(ctx: &Ctx, poly_var_idx: usize, expr: &CallExpression) -> (String, String) {
 
            match &expr.method {
 
                Method::Create => unreachable!(),
 
                Method::Fires => (String::from('T'), String::from("fires")),
 
                Method::Get => (String::from('T'), String::from("get")),
 
                Method::Put => (String::from('T'), String::from("put")),
 
                Method::Symbolic(symbolic) => {
 
                    let definition = &ctx.heap[symbolic.definition.unwrap()];
 
                    let poly_var = match definition {
 
                        Definition::Struct(_) | Definition::Enum(_) => unreachable!(),
 
                        Definition::Function(definition) => {
 
                            String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string()
 
                        },
 
                        Definition::Component(definition) => {
 
                            String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string()
 
                        }
 
                    };
 
                    let func_name = String::from_utf8_lossy(&symbolic.identifier.value).to_string();
 
                    (poly_var, func_name)
 
                }
 
            }
 
        }
 

	
 
        fn get_poly_var_and_type_name(ctx: &Ctx, poly_var_idx: usize, definition_id: DefinitionId) -> (String, String) {
 
            let definition = &ctx.heap[definition_id];
 
            match definition {
 
                Definition::Enum(_) | Definition::Function(_) | Definition::Component(_) =>
 
                    unreachable!("get_poly_var_and_type_name called on non-struct value"),
 
                Definition::Struct(definition) => (
 
                    String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string(),
 
                    String::from_utf8_lossy(&definition.identifier.value).to_string()
 
                ),
 
            }
 
        }
 

	
 
        // Helper function to construct initial error
 
        fn construct_main_error(ctx: &Ctx, poly_var_idx: usize, expr: &Expression) -> ParseError {
 
            match expr {
 
                Expression::Call(expr) => {
 
                    let (poly_var, func_name) = get_poly_var_and_func_name(ctx, poly_var_idx, expr);
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' of '{}'",
 
                            poly_var, func_name
 
                        )
 
                    )
 
                },
 
                Expression::Literal(expr) => {
 
                    let lit_struct = expr.value.as_struct();
 
                    let (poly_var, struct_name) = get_poly_var_and_type_name(ctx, poly_var_idx, lit_struct.definition.unwrap());
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' of instantiation of '{}'",
 
                            poly_var, struct_name
 
                        )
 
                    )
 
                },
 
                Expression::Select(expr) => {
 
                    let field = expr.field.as_symbolic();
 
                    let (poly_var, struct_name) = get_poly_var_and_type_name(ctx, poly_var_idx, field.definition.unwrap());
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' while accessing field '{}' of '{}'",
 
                            poly_var, &String::from_utf8_lossy(&field.identifier.value), struct_name
 
                        )
 
                    )
 
                }
 
                _ => unreachable!("called construct_poly_arg_error without a call/literal expression")
 
            }
 
        }
 

	
 
        // Actual checking
 
        let expr = &ctx.heap[expr_id];
 
        let (expr_args, expr_return_name) = match expr {
 
            Expression::Call(expr) => 
 
                (
 
                    expr.arguments.clone(),
 
                    "return type"
 
                ),
 
            Expression::Literal(expr) => 
 
                (
 
                    expr.value.as_struct().fields
 
                        .iter()
 
                        .map(|f| f.value)
 
                        .collect(),
 
                    "literal"
 
                ),
 
            Expression::Select(expr) =>
 
                // Select expression uses the polymorphic variables of the 
 
                // struct it is accessing, so get the subject expression.
 
                (
 
                    vec![expr.subject],
 
                    "selected field"
 
                ),
 
            _ => unreachable!(),
 
        };
 

	
 
        // - check return type with itself
 
        if let Some((poly_idx, section_a, section_b)) = has_poly_mismatch(
 
            &poly_data.returned, &poly_data.returned
 
        ) {
 
            return construct_main_error(ctx, poly_idx, expr)
 
                .with_postfixed_info(
 
                    &ctx.module.source, expr.position(),
 
                    &format!(
 
                        "The {} inferred the conflicting types '{}' and '{}'",
 
                        expr_return_name,
 
                        InferenceType::partial_display_name(&ctx.heap, section_a),
 
                        InferenceType::partial_display_name(&ctx.heap, section_b)
 
                    )
 
                )
 
        }
 

	
 
        // - check arguments with each other argument and with return type
 
        for (arg_a_idx, arg_a) in poly_data.embedded.iter().enumerate() {
 
            for (arg_b_idx, arg_b) in poly_data.embedded.iter().enumerate() {
src/protocol/parser/type_table.rs
Show inline comments
 
/**
 
TypeTable
 

	
 
Contains the type table: a datastructure that, when compilation succeeds,
 
contains a concrete type definition for each AST type definition. In general
 
terms the type table will go through the following phases during the compilation
 
process:
 

	
 
1. The base type definitions are resolved after the parser phase has
 
    finished. This implies that the AST is fully constructed, but not yet
 
    annotated.
 
2. With the base type definitions resolved, the validation/linker phase will
 
    use the type table (together with the symbol table) to disambiguate
 
    terms (e.g. does an expression refer to a variable, an enum, a constant,
 
    etc.)
 
3. During the type checking/inference phase the type table is used to ensure
 
    that the AST contains valid use of types in expressions and statements.
 
    At the same time type inference will find concrete instantiations of
 
    polymorphic types, these will be stored in the type table as monomorphed
 
    instantiations of a generic type.
 
4. After type checking and inference (and possibly when constructing byte
 
    code) the type table will construct a type graph and solidify each
 
    non-polymorphic type and monomorphed instantiations of polymorphic types
 
    into concrete types.
 

	
 
So a base type is defined by its (optionally polymorphic) representation in the
 
AST. A concrete type has concrete types for each of the polymorphic arguments. A
 
struct, enum or union may have polymorphic arguments but not actually be a
 
polymorphic type. This happens when the polymorphic arguments are not used in
 
the type definition itself. Similarly for functions/components: but here we just
 
check the arguments/return type of the signature.
 

	
 
Apart from base types and concrete types, we also use the term "embedded type"
 
for types that are embedded within another type, such as a type of a struct
 
struct field or of a union variant. Embedded types may themselves have
 
polymorphic arguments and therefore form an embedded type tree.
 

	
 
NOTE: for now a polymorphic definition of a function/component is illegal if the
 
    polymorphic arguments are not used in the arguments/return type. It should
 
    be legal, but we disallow it for now.
 

	
 
TODO: Allow potentially cyclic datatypes and reject truly cyclic datatypes.
 
TODO: Allow for the full potential of polymorphism
 
TODO: Detect "true" polymorphism: for datatypes like structs/enum/unions this
 
    is simple. For functions we need to check the entire body. Do it here? Or
 
    do it somewhere else?
 
TODO: Do we want to check fn argument collision here, or in validation phase?
 
TODO: Make type table an on-demand thing instead of constructing all base types.
 
TODO: Cleanup everything, feels like a lot can be written cleaner and with less
 
    assumptions on each function call.
 
// TODO: Review all comments
 
*/
 

	
 
use std::fmt::{Formatter, Result as FmtResult};
 
use std::collections::{HashMap, VecDeque};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::parser::symbol_table::{SymbolTable, Symbol};
 
use crate::protocol::inputsource::*;
 
use crate::protocol::parser::*;
 

	
 
//------------------------------------------------------------------------------
 
// Defined Types
 
//------------------------------------------------------------------------------
 

	
 
#[derive(Copy, Clone, PartialEq, Eq)]
 
pub enum TypeClass {
 
    Enum,
 
    Union,
 
    Struct,
 
    Function,
 
    Component
 
}
 

	
 
impl TypeClass {
 
    pub(crate) fn display_name(&self) -> &'static str {
 
        match self {
 
            TypeClass::Enum => "enum",
 
            TypeClass::Union => "enum",
 
            TypeClass::Struct => "struct",
 
            TypeClass::Function => "function",
 
            TypeClass::Component => "component",
 
        }
 
    }
 

	
 
    pub(crate) fn is_data_type(&self) -> bool {
 
        *self == TypeClass::Enum || *self == TypeClass::Union || *self == TypeClass::Struct
 
    }
 

	
 
    pub(crate) fn is_proc_type(&self) -> bool {
 
        *self == TypeClass::Function || *self == TypeClass::Component
 
    }
 
}
 

	
 
impl std::fmt::Display for TypeClass {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
 
        write!(f, "{}", self.display_name())
 
    }
 
}
 

	
 
/// Struct wrapping around a potentially polymorphic type. If the type does not
 
/// have any polymorphic arguments then it will not have any monomorphs and
 
/// `is_polymorph` will be set to `false`. A type with polymorphic arguments
 
/// only has `is_polymorph` set to `true` if the polymorphic arguments actually
 
/// appear in the types associated types (function return argument, struct
 
/// field, enum variant, etc.). Otherwise the polymorphic argument is just a
 
/// marker and does not influence the bytesize of the type.
 
pub struct DefinedType {
 
    pub(crate) ast_root: RootId,
 
    pub(crate) ast_definition: DefinitionId,
 
    pub(crate) definition: DefinedTypeVariant,
 
    pub(crate) poly_vars: Vec<PolyVar>,
 
    pub(crate) is_polymorph: bool,
 
    pub(crate) is_pointerlike: bool,
 
    // TODO: @optimize
 
    pub(crate) monomorphs: Vec<Vec<ConcreteType>>,
 
}
 

	
 
impl DefinedType {
 
    fn add_monomorph(&mut self, types: Vec<ConcreteType>) {
 
        debug_assert!(!self.has_monomorph(&types), "monomorph already exists");
 
        self.monomorphs.push(types);
 
    }
 

	
 
    pub(crate) fn has_any_monomorph(&self) -> bool {
 
        !self.monomorphs.is_empty()
 
    }
 

	
 
    pub(crate) fn has_monomorph(&self, types: &Vec<ConcreteType>) -> bool {
 
        debug_assert_eq!(self.poly_vars.len(), types.len(), "mismatch in number of polymorphic types");
 
        for monomorph in &self.monomorphs {
 
            if monomorph == types { return true; }
 
        }
 

	
 
        return false;
 
    }
 
}
 

	
 
pub enum DefinedTypeVariant {
 
    Enum(EnumType),
 
    Union(UnionType),
 
    Struct(StructType),
 
    Function(FunctionType),
 
    Component(ComponentType)
 
}
 

	
 
pub struct PolyVar {
 
    identifier: Identifier,
 
    /// Whether the polymorphic variables is used directly in the definition of
 
    /// the type (not including bodies of function/component types)
 
    is_in_use: bool,
 
}
 

	
 
impl DefinedTypeVariant {
 
    pub(crate) fn type_class(&self) -> TypeClass {
 
        match self {
 
            DefinedTypeVariant::Enum(_) => TypeClass::Enum,
 
            DefinedTypeVariant::Union(_) => TypeClass::Union,
 
            DefinedTypeVariant::Struct(_) => TypeClass::Struct,
 
            DefinedTypeVariant::Function(_) => TypeClass::Function,
 
            DefinedTypeVariant::Component(_) => TypeClass::Component
 
        }
 
    }
 

	
 
    pub(crate) fn as_struct(&self) -> &StructType {
 
        match self {
 
            DefinedTypeVariant::Struct(v) => v,
 
            _ => unreachable!("Cannot convert {} to struct variant", self.type_class())
 
        }
 
    }
 

	
 
    pub(crate) fn as_enum(&self) -> &EnumType {
 
        match self {
 
            DefinedTypeVariant::Enum(v) => v,
 
            _ => unreachable!("Cannot convert {} to enum variant", self.type_class())
 
        }
 
    }
 
}
 

	
 
/// `EnumType` is the classical C/C++ enum type. It has various variants with
 
/// an assigned integer value. The integer values may be user-defined,
 
/// compiler-defined, or a mix of the two. If a user assigns the same enum
 
/// value multiple times, we assume the user is an expert and we consider both
 
/// variants to be equal to one another.
 
pub struct EnumType {
 
    variants: Vec<EnumVariant>,
 
    representation: PrimitiveType,
 
    pub(crate) variants: Vec<EnumVariant>,
 
    pub(crate) representation: PrimitiveType,
 
}
 

	
 
// TODO: Also support maximum u64 value
 
pub struct EnumVariant {
 
    identifier: Identifier,
 
    value: i64,
 
    pub(crate) identifier: Identifier,
 
    pub(crate) value: i64,
 
}
 

	
 
/// `UnionType` is the algebraic datatype (or sum type, or discriminated union).
 
/// A value is an element of the union, identified by its tag, and may contain
 
/// a single subtype.
 
pub struct UnionType {
 
    variants: Vec<UnionVariant>,
 
    tag_representation: PrimitiveType
 
}
 

	
 
pub struct UnionVariant {
 
    identifier: Identifier,
 
    parser_type: Option<ParserTypeId>,
 
    tag_value: i64,
 
}
 

	
 
pub struct StructType {
 
    pub(crate) fields: Vec<StructField>,
 
}
 

	
 
pub struct StructField {
 
    pub(crate) identifier: Identifier,
 
    pub(crate) parser_type: ParserTypeId,
 
}
 

	
 
pub struct FunctionType {
 
    pub return_type: ParserTypeId,
 
    pub arguments: Vec<FunctionArgument>
 
}
 

	
 
pub struct ComponentType {
 
    pub variant: ComponentVariant,
 
    pub arguments: Vec<FunctionArgument>
 
}
 

	
 
pub struct FunctionArgument {
 
    identifier: Identifier,
 
    parser_type: ParserTypeId,
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Type table
 
//------------------------------------------------------------------------------
 

	
 
// TODO: @cleanup Do I really need this, doesn't make the code that much cleaner
 
struct TypeIterator {
 
    breadcrumbs: Vec<(RootId, DefinitionId)>
 
}
 

	
 
impl TypeIterator {
 
    fn new() -> Self {
 
        Self{ breadcrumbs: Vec::with_capacity(32) }
 
    }
 

	
 
    fn reset(&mut self, root_id: RootId, definition_id: DefinitionId) {
 
        self.breadcrumbs.clear();
 
        self.breadcrumbs.push((root_id, definition_id))
 
    }
 

	
 
    fn push(&mut self, root_id: RootId, definition_id: DefinitionId) {
 
        self.breadcrumbs.push((root_id, definition_id));
 
    }
 

	
 
    fn contains(&self, root_id: RootId, definition_id: DefinitionId) -> bool {
 
        for (stored_root_id, stored_definition_id) in self.breadcrumbs.iter() {
 
            if *stored_root_id == root_id && *stored_definition_id == definition_id { return true; }
 
        }
 

	
 
        return false
 
    }
 

	
 
    fn top(&self) -> Option<(RootId, DefinitionId)> {
 
        self.breadcrumbs.last().map(|(r, d)| (*r, *d))
 
    }
 

	
 
    fn pop(&mut self) {
 
        debug_assert!(!self.breadcrumbs.is_empty());
 
        self.breadcrumbs.pop();
 
    }
 
}
 

	
 
/// Result from attempting to resolve a `ParserType` using the symbol table and
 
/// the type table.
 
enum ResolveResult {
 
    /// ParserType is a builtin type
 
    BuiltIn,
 
    /// ParserType points to a polymorphic argument, contains the index of the
 
    /// polymorphic argument in the outermost definition (e.g. we may have 
 
    /// structs nested three levels deep, but in the innermost struct we can 
 
    /// only use the polyargs that are specified in the type definition of the
 
    /// outermost struct).
 
    PolyArg(usize),
 
    /// ParserType points to a user-defined type that is already resolved in the
 
    /// type table.
 
    Resolved((RootId, DefinitionId)),
 
    /// ParserType points to a user-defined type that is not yet resolved into
 
    /// the type table.
 
    Unresolved((RootId, DefinitionId))
 
}
 

	
 
pub(crate) struct TypeTable {
 
    /// Lookup from AST DefinitionId to a defined type. Considering possible
 
    /// polymorphs is done inside the `DefinedType` struct.
 
    lookup: HashMap<DefinitionId, DefinedType>,
 
    /// Iterator over `(module, definition)` tuples used as workspace to make sure
 
    /// that each base definition of all a type's subtypes are resolved.
 
    iter: TypeIterator,
 
    /// Iterator over `parser type`s during the process where `parser types` are
 
    /// resolved into a `(module, definition)` tuple.
 
    parser_type_iter: VecDeque<ParserTypeId>,
 
}
 

	
 
pub(crate) struct TypeCtx<'a> {
 
    symbols: &'a SymbolTable,
 
    heap: &'a mut Heap,
 
    modules: &'a [LexedModule]
 
}
 

	
 
impl<'a> TypeCtx<'a> {
 
    pub(crate) fn new(symbols: &'a SymbolTable, heap: &'a mut Heap, modules: &'a [LexedModule]) -> Self {
 
        Self{ symbols, heap, modules }
 
    }
 
}
 

	
 
impl TypeTable {
 
    /// Construct a new type table without any resolved types.
 
    pub(crate) fn new() -> Self {
 
        Self{ 
 
            lookup: HashMap::new(), 
 
            iter: TypeIterator::new(), 
 
            parser_type_iter: VecDeque::with_capacity(64), 
 
        }
 
    }
 

	
 
    pub(crate) fn build_base_types(&mut self, ctx: &mut TypeCtx) -> Result<(), ParseError> {
 
        // Make sure we're allowed to cast root_id to index into ctx.modules
 
        debug_assert!(self.lookup.is_empty());
 
        debug_assert!(self.iter.top().is_none());
 
        debug_assert!(self.parser_type_iter.is_empty());
 

	
 
        if cfg!(debug_assertions) {
 
            for (index, module) in ctx.modules.iter().enumerate() {
 
                debug_assert_eq!(index, module.root_id.index as usize);
 
            }
 
        }
 

	
 
        // Use context to guess hashmap size
 
        let reserve_size = ctx.heap.definitions.len();
 
        self.lookup.reserve(reserve_size);
 

	
 
        // TODO: @cleanup Rework this hack
 
        for root_idx in 0..ctx.modules.len() {
 
            let last_definition_idx = ctx.heap[ctx.modules[root_idx].root_id].definitions.len();
 
            for definition_idx in 0..last_definition_idx {
 
                let definition_id = ctx.heap[ctx.modules[root_idx].root_id].definitions[definition_idx];
 
                self.resolve_base_definition(ctx, definition_id)?;
 
            }
 
        }
 

	
 
        debug_assert_eq!(self.lookup.len(), reserve_size, "mismatch in reserved size of type table");
 

	
 
        Ok(())
 
    }
 

	
 
    /// Retrieves base definition from type table. We must be able to retrieve
 
    /// it as we resolve all base types upon type table construction (for now).
 
    /// However, in the future we might do on-demand type resolving, so return
 
    /// an option anyway
 
    pub(crate) fn get_base_definition(&self, definition_id: &DefinitionId) -> Option<&DefinedType> {
 
        self.lookup.get(&definition_id)
 
    }
 

	
 
    /// Instantiates a monomorph for a given base definition.
 
    pub(crate) fn add_monomorph(&mut self, definition_id: &DefinitionId, types: Vec<ConcreteType>) {
 
        debug_assert!(
 
            self.lookup.contains_key(definition_id),
 
            "attempting to instantiate monomorph of definition unknown to type table"
 
        );
 

	
 
        let definition = self.lookup.get_mut(definition_id).unwrap();
 
        definition.add_monomorph(types);
 
    }
 

	
 
    /// Checks if a given definition already has a specific monomorph
 
    pub(crate) fn has_monomorph(&mut self, definition_id: &DefinitionId, types: &Vec<ConcreteType>) -> bool {
 
        debug_assert!(
 
            self.lookup.contains_key(definition_id),
 
            "attempting to check monomorph existence of definition unknown to type table"
 
        );
 

	
 
        let definition = self.lookup.get(definition_id).unwrap();
 
        definition.has_monomorph(types)
 
    }
 

	
 
    /// This function will resolve just the basic definition of the type, it
 
    /// will not handle any of the monomorphized instances of the type.
 
    fn resolve_base_definition<'a>(&'a mut self, ctx: &mut TypeCtx, definition_id: DefinitionId) -> Result<(), ParseError> {
 
        // Check if we have already resolved the base definition
 
        if self.lookup.contains_key(&definition_id) { return Ok(()); }
 

	
 
        let root_id = Self::find_root_id(ctx, definition_id);
 
        self.iter.reset(root_id, definition_id);
 

	
 
        while let Some((root_id, definition_id)) = self.iter.top() {
 
            // We have a type to resolve
 
            let definition = &ctx.heap[definition_id];
 

	
 
            let can_pop_breadcrumb = match definition {
 
                // TODO: @cleanup Borrow rules hax
 
                Definition::Enum(_) => self.resolve_base_enum_definition(ctx, root_id, definition_id),
 
                Definition::Struct(_) => self.resolve_base_struct_definition(ctx, root_id, definition_id),
 
                Definition::Component(_) => self.resolve_base_component_definition(ctx, root_id, definition_id),
 
                Definition::Function(_) => self.resolve_base_function_definition(ctx, root_id, definition_id),
 
            }?;
 

	
 
            // Otherwise: `ingest_resolve_result` has pushed a new breadcrumb
 
            // that we must follow before we can resolve the current type
 
            if can_pop_breadcrumb {
 
                self.iter.pop();
 
            }
 
        }
 

	
 
        // We must have resolved the type
 
        debug_assert!(self.lookup.contains_key(&definition_id), "base type not resolved");
 
        Ok(())
 
    }
 

	
 
    /// Resolve the basic enum definition to an entry in the type table. It will
 
    /// not instantiate any monomorphized instances of polymorphic enum
 
    /// definitions. If a subtype has to be resolved first then this function
 
    /// will return `false` after calling `ingest_resolve_result`.
 
    fn resolve_base_enum_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_enum());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base enum already resolved");
 
        
 
        let definition = ctx.heap[definition_id].as_enum();
 

	
 
        // Check if the enum should be implemented as a classic enumeration or
 
        // a tagged union. Keep track of variant index for error messages. Make
 
        // sure all embedded types are resolved.
 
        let mut first_tag_value = None;
 
        let mut first_int_value = None;
 
        for variant in &definition.variants {
 
            match &variant.value {
 
                EnumVariantValue::None => {},
 
                EnumVariantValue::Integer(_) => if first_int_value.is_none() {
 
                    first_int_value = Some(variant.position);
 
                },
 
                EnumVariantValue::Type(variant_type_id) => {
 
                    if first_tag_value.is_none() {
 
                        first_tag_value = Some(variant.position);
 
                    }
 

	
 
                    // Check if the embedded type needs to be resolved
 
                    let resolve_result = self.resolve_base_parser_type(ctx, &definition.poly_vars, root_id, *variant_type_id)?;
 
                    if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                        return Ok(false)
 
                    }
 
                }
 
            }
 
        }
 

	
 
        if first_tag_value.is_some() && first_int_value.is_some() {
 
            // Not illegal, but useless and probably a programmer mistake
 
            let module_source = &ctx.modules[root_id.index as usize].source;
 
            let tag_pos = first_tag_value.unwrap();
 
            let int_pos = first_int_value.unwrap();
 
            return Err(
 
                ParseError::new_error(
 
                    module_source, definition.position,
 
                    "Illegal combination of enum integer variant(s) and enum union variant(s)"
 
                )
 
                    .with_postfixed_info(module_source, int_pos, "Assigning an integer value here")
 
                    .with_postfixed_info(module_source, tag_pos, "Embedding a type in a union variant here")
 
            );
 
        }
 

	
 
        // Enumeration is legal
 
        if first_tag_value.is_some() {
 
            // Implement as a tagged union
 

	
 
            // Determine the union variants
 
            let mut tag_value = -1;
 
            let mut variants = Vec::with_capacity(definition.variants.len());
 
            for variant in &definition.variants {
 
                tag_value += 1;
 
                let parser_type = match &variant.value {
 
                    EnumVariantValue::None => {
 
                        None
 
                    },
 
                    EnumVariantValue::Type(parser_type_id) => {
 
                        // Type should be resolvable, we checked this above
 
                        Some(*parser_type_id)
 
                    },
 
                    EnumVariantValue::Integer(_) => {
 
                        debug_assert!(false, "Encountered `Integer` variant after asserting enum is a discriminated union");
 
                        unreachable!();
 
                    }
 
                };
 

	
 
                variants.push(UnionVariant{
 
                    identifier: variant.identifier.clone(),
 
                    parser_type,
 
                    tag_value,
 
                })
 
            }
 

	
 
            // Ensure union names and polymorphic args do not conflict
 
            self.check_identifier_collision(
 
                ctx, root_id, &variants, |variant| &variant.identifier, "enum variant"
 
            )?;
 
            self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 

	
 
            let mut poly_args = self.create_initial_poly_vars(&definition.poly_vars);
 
            for variant in &variants {
 
                if let Some(embedded) = variant.parser_type {
 
                    self.check_and_resolve_embedded_type_and_modify_poly_args(ctx, definition_id, &mut poly_args, root_id, embedded)?;
 
                }
 
            }
 
            let is_polymorph = poly_args.iter().any(|arg| arg.is_in_use);
 

	
 
            // Insert base definition in type table
 
            self.lookup.insert(definition_id, DefinedType {
 
                ast_root: root_id,
 
                ast_definition: definition_id,
 
                definition: DefinedTypeVariant::Union(UnionType{
 
                    variants,
 
                    tag_representation: Self::enum_tag_type(-1, tag_value),
 
                }),
 
                poly_vars: poly_args,
 
                is_polymorph,
 
                is_pointerlike: false, // TODO: @cyclic_types
 
                monomorphs: Vec::new()
 
            });
 
        } else {
 
            // Implement as a regular enum
 
            let mut enum_value = -1;
 
            let mut min_enum_value = 0;
 
            let mut max_enum_value = 0;
 
            let mut variants = Vec::with_capacity(definition.variants.len());
 
            for variant in &definition.variants {
 
                enum_value += 1;
 
                match &variant.value {
 
                    EnumVariantValue::None => {
 
                        variants.push(EnumVariant{
 
                            identifier: variant.identifier.clone(),
 
                            value: enum_value,
 
                        });
 
                    },
 
                    EnumVariantValue::Integer(override_value) => {
 
                        enum_value = *override_value;
 
                        variants.push(EnumVariant{
 
                            identifier: variant.identifier.clone(),
 
                            value: enum_value,
 
                        });
 
                    },
 
                    EnumVariantValue::Type(_) => {
 
                        debug_assert!(false, "Encountered `Type` variant after asserting enum is not a discriminated union");
 
                        unreachable!();
 
                    }
 
                }
 
                if enum_value < min_enum_value { min_enum_value = enum_value; }
 
                else if enum_value > max_enum_value { max_enum_value = enum_value; }
 
            }
 

	
 
            // Ensure enum names and polymorphic args do not conflict
 
            self.check_identifier_collision(
 
                ctx, root_id, &variants, |variant| &variant.identifier, "enum variant"
 
            )?;
 
            self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 

	
 
            // Note: although we cannot have embedded type dependent on the
 
            // polymorphic variables, they might still be present as tokens
 
            let definition_id = definition.this.upcast();
 
            self.lookup.insert(definition_id, DefinedType {
 
                ast_root: root_id,
 
                ast_definition: definition_id,
 
                definition: DefinedTypeVariant::Enum(EnumType{
 
                    variants,
 
                    representation: Self::enum_tag_type(min_enum_value, max_enum_value)
 
                }),
 
                poly_vars: self.create_initial_poly_vars(&definition.poly_vars),
 
                is_polymorph: false,
 
                is_pointerlike: false,
src/protocol/parser/utils.rs
Show inline comments
 
    use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use super::symbol_table::*;
 
use super::type_table::*;
 

	
 
/// Utility result type.
 
pub(crate) enum FindTypeResult<'t, 'i> {
 
    // Found the type exactly
 
    Found((&'t DefinedType, NamespacedIdentifierIter<'i>)),
 
    // Could not match symbol
 
    SymbolNotFound{ident_pos: InputPosition},
 
    // Matched part of the namespaced identifier, but not completely
 
    SymbolPartial{ident_pos: InputPosition, ident_iter: NamespacedIdentifierIter<'i>},
 
    // Symbol matched, but points to a namespace/module instead of a type
 
    SymbolNamespace{ident_pos: InputPosition, symbol_pos: InputPosition},
 
}
 

	
 
// TODO: @cleanup Find other uses of this pattern
 
// TODO: Hindsight is 20/20: this belongs in the visitor_linker, not in a 
 
//  separate file.
 
impl<'t, 'i> FindTypeResult<'t, 'i> {
 
    /// Utility function to transform the `FindTypeResult` into a `Result` where
 
    /// `Ok` contains the resolved type, and `Err` contains a `ParseError` which
 
    /// can be readily returned. This is the most common use.
 
    pub(crate) fn as_parse_error(self, module_source: &InputSource) -> Result<(&'t DefinedType, NamespacedIdentifierIter<'i>), ParseError> {
 
        match self {
 
            FindTypeResult::Found(defined_type) => Ok(defined_type),
 
            FindTypeResult::SymbolNotFound{ident_pos} => {
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos,
 
                    "Could not resolve this identifier to a symbol"
 
                ))
 
            },
 
            FindTypeResult::SymbolPartial{ident_pos, ident_iter} => {
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos, 
 
                    &format!(
 
                        "Could not fully resolve this identifier to a symbol, was only able to match '{}'",
 
                        &String::from_utf8_lossy(ident_iter.returned_section())
 
                    )
 
                ))
 
            },
 
            FindTypeResult::SymbolNamespace{ident_pos, symbol_pos} => {
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos,
 
                    "This identifier was resolved to a namespace instead of a type"
 
                ).with_postfixed_info(
 
                    module_source, symbol_pos,
 
                    "This is the referenced namespace"
 
                ))
 
            }
 
        }
 
    }
 
}
 

	
 
/// Attempt to find the type pointer to by a (root, identifier) combination. The
 
/// type must match exactly (no parts in the namespace iterator remaining) and
 
/// must be a type, not a namespace. 
 
pub(crate) fn find_type_definition<'t, 'i>(
 
    symbols: &SymbolTable, types: &'t TypeTable, 
 
    root_id: RootId, identifier: &'i NamespacedIdentifier
 
) -> FindTypeResult<'t, 'i> {
 
    // Lookup symbol
 
    let (symbol, ident_iter) = symbols.resolve_namespaced_identifier(root_id, identifier);
 
    if symbol.is_none() { 
 
        return FindTypeResult::SymbolNotFound{ident_pos: identifier.position};
 
    }
 
    
 
    // Make sure we resolved it exactly
 
    let symbol = symbol.unwrap();
 
    if ident_iter.num_remaining() != 0 { 
 
        return FindTypeResult::SymbolPartial{
 
            ident_pos: identifier.position,
 
            ident_iter
 
        };
 
    }
 

	
 
    match symbol.symbol {
 
        Symbol::Namespace(_) => {
 
            FindTypeResult::SymbolNamespace{
 
                ident_pos: identifier.position, 
 
                symbol_pos: symbol.position
 
            }
 
        },
 
        Symbol::Definition((_, definition_id)) => {
 
            // If this function is called correctly, then we should always be
 
            // able to match the definition's ID to an entry in the type table.
 
            let definition = types.get_base_definition(&definition_id);
 
            debug_assert!(definition.is_some());
 
            FindTypeResult::Found((definition.unwrap(), ident_iter))
 
        }
 
    }
 
}
 

	
 
pub(crate) enum MatchPolymorphResult<'t> {
 
    Matching,
 
    InferAll(usize),
 
    Mismatch{defined_type: &'t DefinedType, ident_position: InputPosition, num_specified: usize},
 
    NoneExpected{defined_type: &'t DefinedType, ident_position: InputPosition},
 
}
 

	
 
impl<'t> MatchPolymorphResult<'t> {
 
    pub(crate) fn as_parse_error(self, heap: &Heap, module_source: &InputSource) -> Result<usize, ParseError> {
 
        match self {
 
            MatchPolymorphResult::Matching => Ok(0),
 
            MatchPolymorphResult::InferAll(count) => {
 
                debug_assert!(count > 0);
 
                Ok(count)
 
            },
 
            MatchPolymorphResult::Mismatch{defined_type, ident_position, num_specified} => {
 
                let type_identifier = heap[defined_type.ast_definition].identifier();
 
                let args_name = if defined_type.poly_vars.len() == 1 {
 
                    "argument"
 
                } else {
 
                    "arguments"
 
                };
 

	
 
                return Err(ParseError::new_error(
 
                    module_source, ident_position,
 
                    &format!(
 
                        "expected {} polymorphic {} (or none, to infer them) for the type {}, but {} were specified",
 
                        defined_type.poly_vars.len(), args_name, 
 
                        &String::from_utf8_lossy(&type_identifier.value),
 
                        num_specified
 
                    )
 
                ))
 
            },
 
            MatchPolymorphResult::NoneExpected{defined_type, ident_position, ..} => {
 
                let type_identifier = heap[defined_type.ast_definition].identifier();
 
                return Err(ParseError::new_error(
 
                    module_source, ident_position,
 
                    &format!(
 
                        "the type {} is not polymorphic",
 
                        &String::from_utf8_lossy(&type_identifier.value)
 
                    )
 
                ))
 
            }
 
        }
 
    }
 
}
 

	
 
/// Attempt to match the polymorphic arguments to the number of polymorphic
 
/// variables in the definition.
 
pub(crate) fn match_polymorphic_args_to_vars<'t>(
 
    defined_type: &'t DefinedType, poly_args: Option<&[ParserTypeId]>, ident_position: InputPosition
 
) -> MatchPolymorphResult<'t> {
 
    if defined_type.poly_vars.is_empty() {
 
        // No polymorphic variables on type
 
        if poly_args.is_some() {
 
            return MatchPolymorphResult::NoneExpected{
 
                defined_type,
 
                ident_position, 
 
            };
 
        }
 
    } else {
 
        // Polymorphic variables on type
 
        let has_specified = poly_args.map_or(false, |a| a.len() != 0);
 
        if !has_specified {
 
            // Implicitly infer all of the polymorphic arguments
 
            return MatchPolymorphResult::InferAll(defined_type.poly_vars.len());
 
        }
 

	
 
        let num_specified = poly_args.unwrap().len();
 
        if num_specified != defined_type.poly_vars.len() {
 
            return MatchPolymorphResult::Mismatch{
 
                defined_type,
 
                ident_position,
 
                num_specified,
 
            };
 
        }
 
    }
 

	
 
    MatchPolymorphResult::Matching
 
}
 
\ No newline at end of file
src/protocol/parser/visitor_linker.rs
Show inline comments
 
@@ -318,1308 +318,1451 @@ impl Visitor2 for ValidityAndLinkerVisitor {
 
            let old_while = self.in_while.replace(id);
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::While(id);
 
            self.visit_expr(ctx, test_id)?;
 
            self.expr_parent = ExpressionParent::None;
 

	
 
            self.visit_stmt(ctx, body_id)?;
 
            self.in_while = old_while;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_break_stmt(&mut self, ctx: &mut Ctx, id: BreakStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            // Should be able to resolve break statements with a label in the
 
            // breadth pass, no need to do after resolving all labels
 
            let target_end_while = {
 
                let stmt = &ctx.heap[id];
 
                let target_while_id = self.resolve_break_or_continue_target(ctx, stmt.position, &stmt.label)?;
 
                let target_while = &ctx.heap[target_while_id];
 
                debug_assert!(target_while.end_while.is_some());
 
                target_while.end_while.unwrap()
 
            };
 

	
 
            let stmt = &mut ctx.heap[id];
 
            stmt.target = Some(target_end_while);
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_continue_stmt(&mut self, ctx: &mut Ctx, id: ContinueStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let target_while_id = {
 
                let stmt = &ctx.heap[id];
 
                self.resolve_break_or_continue_target(ctx, stmt.position, &stmt.label)?
 
            };
 

	
 
            let stmt = &mut ctx.heap[id];
 
            stmt.target = Some(target_while_id)
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_synchronous_stmt(&mut self, ctx: &mut Ctx, id: SynchronousStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            // Check for validity of synchronous statement
 
            let cur_sync_position = ctx.heap[id].position;
 
            if self.in_sync.is_some() {
 
                // Nested synchronous statement
 
                let old_sync = &ctx.heap[self.in_sync.unwrap()];
 
                return Err(
 
                    ParseError::new_error(&ctx.module.source, cur_sync_position, "Illegal nested synchronous statement")
 
                        .with_postfixed_info(&ctx.module.source, old_sync.position, "It is nested in this synchronous statement")
 
                );
 
            }
 

	
 
            if !self.def_type.is_primitive() {
 
                return Err(ParseError::new_error(
 
                    &ctx.module.source, cur_sync_position,
 
                    "Synchronous statements may only be used in primitive components"
 
                ));
 
            }
 

	
 
            // Append SynchronousEnd pseudo-statement
 
            let sync_end_id = ctx.heap.alloc_end_synchronous_statement(|this| EndSynchronousStatement{
 
                this,
 
                position: cur_sync_position,
 
                start_sync: id,
 
                next: None,
 
            });
 
            let sync_start = &mut ctx.heap[id];
 
            sync_start.end_sync = Some(sync_end_id);
 
            self.insert_buffer.push((self.relative_pos_in_block + 1, sync_end_id.upcast()));
 
        } else {
 
            let sync_body = ctx.heap[id].body;
 
            let old = self.in_sync.replace(id);
 
            self.visit_stmt_with_hint(ctx, sync_body, Some(id))?;
 
            self.in_sync = old;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_return_stmt(&mut self, ctx: &mut Ctx, id: ReturnStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let stmt = &ctx.heap[id];
 
            if !self.def_type.is_function() {
 
                return Err(
 
                    ParseError::new_error(&ctx.module.source, stmt.position, "Return statements may only appear in function bodies")
 
                );
 
            }
 
        } else {
 
            // If here then we are within a function
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::Return(id);
 
            self.visit_expr(ctx, ctx.heap[id].expression)?;
 
            self.expr_parent = ExpressionParent::None;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_assert_stmt(&mut self, ctx: &mut Ctx, id: AssertStatementId) -> VisitorResult {
 
        let stmt = &ctx.heap[id];
 
        if self.performing_breadth_pass {
 
            if self.def_type.is_function() {
 
                // TODO: We probably want to allow this. Mark the function as
 
                //  using asserts, and then only allow calls to these functions
 
                //  within components. Such a marker will cascade through any
 
                //  functions that then call an asserting function
 
                return Err(
 
                    ParseError::new_error(&ctx.module.source, stmt.position, "Illegal assert statement in a function")
 
                );
 
            }
 

	
 
            // We are in a component of some sort, but we also need to be within a
 
            // synchronous statement
 
            if self.in_sync.is_none() {
 
                return Err(
 
                    ParseError::new_error(&ctx.module.source, stmt.position, "Illegal assert statement outside of a synchronous block")
 
                );
 
            }
 
        } else {
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            let expr_id = stmt.expression;
 

	
 
            self.expr_parent = ExpressionParent::Assert(id);
 
            self.visit_expr(ctx, expr_id)?;
 
            self.expr_parent = ExpressionParent::None;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_goto_stmt(&mut self, ctx: &mut Ctx, id: GotoStatementId) -> VisitorResult {
 
        if !self.performing_breadth_pass {
 
            // Must perform goto label resolving after the breadth pass, this
 
            // way we are able to find all the labels in current and outer
 
            // scopes.
 
            let target_id = self.find_label(ctx, &ctx.heap[id].label)?;
 
            ctx.heap[id].target = Some(target_id);
 

	
 
            let target = &ctx.heap[target_id];
 
            if self.in_sync != target.in_sync {
 
                // We can only goto the current scope or outer scopes. Because
 
                // nested sync statements are not allowed so if the value does
 
                // not match, then we must be inside a sync scope
 
                debug_assert!(self.in_sync.is_some());
 
                let goto_stmt = &ctx.heap[id];
 
                let sync_stmt = &ctx.heap[self.in_sync.unwrap()];
 
                return Err(
 
                    ParseError::new_error(&ctx.module.source, goto_stmt.position, "Goto may not escape the surrounding synchronous block")
 
                        .with_postfixed_info(&ctx.module.source, target.position, "This is the target of the goto statement")
 
                        .with_postfixed_info(&ctx.module.source, sync_stmt.position, "Which will jump past this statement")
 
                );
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_new_stmt(&mut self, ctx: &mut Ctx, id: NewStatementId) -> VisitorResult {
 
        // Link the call expression following the new statement
 
        if self.performing_breadth_pass {
 
            // TODO: Cleanup error messages, can be done cleaner
 
            // Make sure new statement occurs within a composite component
 
            let call_expr_id = ctx.heap[id].expression;
 
            if !self.def_type.is_composite() {
 
                let new_stmt = &ctx.heap[id];
 
                return Err(
 
                    ParseError::new_error(&ctx.module.source, new_stmt.position, "Instantiating components may only be done in composite components")
 
                );
 
            }
 

	
 
            // We make sure that we point to a symbolic method. Checking that it
 
            // points to a component is done in the depth pass.
 
            let call_expr = &ctx.heap[call_expr_id];
 
            if let Method::Symbolic(_) = &call_expr.method {
 
                // We're fine
 
            } else {
 
                return Err(
 
                    ParseError::new_error(&ctx.module.source, call_expr.position, "Must instantiate a component")
 
                );
 
            }
 
        } else {
 
            // Just call `visit_call_expr`. We do some extra work we don't have
 
            // to, but this prevents silly mistakes.
 
            let call_expr_id = ctx.heap[id].expression;
 

	
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::New(id);
 
            self.visit_call_expr(ctx, call_expr_id)?;
 
            self.expr_parent = ExpressionParent::None;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_expr_stmt(&mut self, ctx: &mut Ctx, id: ExpressionStatementId) -> VisitorResult {
 
        if !self.performing_breadth_pass {
 
            let expr_id = ctx.heap[id].expression;
 

	
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::ExpressionStmt(id);
 
            self.visit_expr(ctx, expr_id)?;
 
            self.expr_parent = ExpressionParent::None;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 

	
 
    //--------------------------------------------------------------------------
 
    // Expression visitors
 
    //--------------------------------------------------------------------------
 

	
 
    fn visit_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let upcast_id = id.upcast();
 
        let assignment_expr = &mut ctx.heap[id];
 

	
 
        let left_expr_id = assignment_expr.left;
 
        let right_expr_id = assignment_expr.right;
 
        let old_expr_parent = self.expr_parent;
 
        assignment_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, right_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 
        Ok(())
 
    }
 

	
 
    fn visit_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 
        let upcast_id = id.upcast();
 
        let conditional_expr = &mut ctx.heap[id];
 

	
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        conditional_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, true_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 2);
 
        self.visit_expr(ctx, false_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 
        let upcast_id = id.upcast();
 
        let binary_expr = &mut ctx.heap[id];
 
        let left_expr_id = binary_expr.left;
 
        let right_expr_id = binary_expr.right;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        binary_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, right_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let unary_expr = &mut ctx.heap[id];
 
        let expr_id = unary_expr.expression;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        unary_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(id.upcast(), 0);
 
        self.visit_expr(ctx, expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 
        let upcast_id = id.upcast();
 
        let indexing_expr = &mut ctx.heap[id];
 

	
 
        let subject_expr_id = indexing_expr.subject;
 
        let index_expr_id = indexing_expr.index;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        indexing_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, index_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 
        let upcast_id = id.upcast();
 
        let slicing_expr = &mut ctx.heap[id];
 

	
 
        let subject_expr_id = slicing_expr.subject;
 
        let from_expr_id = slicing_expr.from_index;
 
        let to_expr_id = slicing_expr.to_index;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        slicing_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, from_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 2);
 
        self.visit_expr(ctx, to_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let select_expr = &mut ctx.heap[id];
 
        let expr_id = select_expr.subject;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        select_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(id.upcast(), 0);
 
        self.visit_expr(ctx, expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let upcast_id = id.upcast();
 
        let array_expr = &mut ctx.heap[id];
 

	
 
        let old_num_exprs = self.expression_buffer.len();
 
        self.expression_buffer.extend(&array_expr.elements);
 
        let new_num_exprs = self.expression_buffer.len();
 

	
 
        let old_expr_parent = self.expr_parent;
 
        array_expr.parent = old_expr_parent;
 

	
 
        for field_expr_idx in old_num_exprs..new_num_exprs {
 
            let field_expr_id = self.expression_buffer[field_expr_idx];
 
            self.expr_parent = ExpressionParent::Expression(upcast_id, field_expr_idx as u32);
 
            self.visit_expr(ctx, field_expr_id)?;
 
        }
 

	
 
        self.expression_buffer.truncate(old_num_exprs);
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        const FIELD_NOT_FOUND_SENTINEL: usize = usize::max_value();
 
        const VARIANT_NOT_FOUND_SENTINEL: usize = FIELD_NOT_FOUND_SENTINEL;
 

	
 
        let constant_expr = &mut ctx.heap[id];
 
        let old_expr_parent = self.expr_parent;
 
        constant_expr.parent = old_expr_parent;
 

	
 
        match &mut constant_expr.value {
 
            Literal::Null | Literal::True | Literal::False |
 
            Literal::Character(_) | Literal::Integer(_) => {
 
                // Just the parent has to be set, done above
 
            },
 
            Literal::Struct(literal) => {
 
                let upcast_id = id.upcast();
 

	
 
                // Retrieve and set the literal's definition
 
                let definition = self.find_symbol_of_type(
 
                    &ctx.module.source, ctx.module.root_id, &ctx.symbols, &ctx.types,
 
                    &literal.identifier, TypeClass::Struct
 
                )?;
 
                literal.definition = Some(definition.ast_definition);
 

	
 
                let definition = definition.definition.as_struct();
 

	
 
                // Make sure all fields are specified, none are specified twice
 
                // and all fields exist on the struct definition
 
                let mut specified = Vec::new(); // TODO: @performance
 
                specified.resize(definition.fields.len(), false);
 

	
 
                for field in &mut literal.fields {
 
                    // Find field in the struct definition
 
                    field.field_idx = FIELD_NOT_FOUND_SENTINEL;
 
                    for (def_field_idx, def_field) in definition.fields.iter().enumerate() {
 
                        if field.identifier == def_field.identifier {
 
                            field.field_idx = def_field_idx;
 
                            break;
 
                        }
 
                    }
 

	
 
                    // Check if not found
 
                    if field.field_idx == FIELD_NOT_FOUND_SENTINEL {
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, field.identifier.position,
 
                            &format!(
 
                                "This field does not exist on the struct '{}'",
 
                                &String::from_utf8_lossy(&literal.identifier.value),
 
                            )
 
                        ));
 
                    }
 

	
 
                    // Check if specified more than once
 
                    if specified[field.field_idx] {
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, field.identifier.position,
 
                            "This field is specified more than once"
 
                        ));
 
                    }
 

	
 
                    specified[field.field_idx] = true;
 
                }
 

	
 
                if !specified.iter().all(|v| *v) {
 
                    // Some fields were not specified
 
                    let mut not_specified = String::new();
 
                    for (def_field_idx, is_specified) in specified.iter().enumerate() {
 
                        if !is_specified {
 
                            if !not_specified.is_empty() { not_specified.push_str(", ") }
 
                            let field_ident = &definition.fields[def_field_idx].identifier;
 
                            not_specified.push_str(&String::from_utf8_lossy(&field_ident.value));
 
                        }
 
                    }
 

	
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, literal.identifier.position,
 
                        &format!("Not all fields are specified, [{}] are missing", not_specified)
 
                    ));
 
                }
 

	
 
                // Need to traverse fields expressions in struct and evaluate
 
                // the poly args
 
                let old_num_exprs = self.expression_buffer.len();
 
                self.expression_buffer.extend(literal.fields.iter().map(|v| v.value));
 
                let new_num_exprs = self.expression_buffer.len();
 

	
 
                self.visit_literal_poly_args(ctx, id)?;
 

	
 
                for expr_idx in old_num_exprs..new_num_exprs {
 
                    let expr_id = self.expression_buffer[expr_idx];
 
                    self.expr_parent = ExpressionParent::Expression(upcast_id, expr_idx as u32);
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 

	
 
                self.expression_buffer.truncate(old_num_exprs);
 
            },
 
            Literal::Enum(literal) => {
 
                let upcast_id = id.upcast();
 

	
 
                // Retrieve and set type of enumeration
 
                let (definition, ident_iter) = self.find_symbol_of_type_variant(
 
                    &ctx.module.source, ctx.module.root_id, &ctx.symbols, &ctx.types, 
 
                    &literal.identifier, TypeClass::Enum
 
                )?;
 
                literal.definition = Some(definition.ast_definition);
 

	
 
                // Make sure the variant exists
 
                let (variant_ident, _) = ident_iter.prev().unwrap();
 
                let enum_definition = definition.definition.as_enum();
 
                literal.variant_idx = VARIANT_NOT_FOUND_SENTINEL;
 

	
 
                for (variant_idx, variant) in enum_definition.variants.iter().enumerate() {
 
                    if variant.identifier.value == variant_ident {
 
                        literal.variant_idx = variant_idx;
 
                        break;
 
                    }
 
                }
 

	
 
                if literal.variant_idx == VARIANT_NOT_FOUND_SENTINEL {
 
                    let variant = String::from_utf8_lossy(variant_ident).to_string();
 
                    let literal = ctx.heap[id].value.as_enum();
 
                    let enum_definition = ctx.heap[definition.ast_definition].as_enum();
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, literal.identifier.position,
 
                        &format!(
 
                            "The variant '{}' does not exist on the enum '{}'",
 
                            &variant, &String::from_utf8_lossy(&enum_definition.identifier.value)
 
                        )
 
                    ))
 
                }
 

	
 
                self.visit_literal_poly_args(ctx, id)?;
 
            }
 
        }
 

	
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let call_expr = &mut ctx.heap[id];
 
        let num_expr_args = call_expr.arguments.len();
 

	
 
        // Resolve the method to the appropriate definition and check the
 
        // legality of the particular method call.
 
        // TODO: @cleanup Unify in some kind of signature call, see similar
 
        //  cleanup comments with this `match` format.
 
        let num_definition_args;
 
        match &mut call_expr.method {
 
            Method::Create => {
 
                num_definition_args = 1;
 
            },
 
            Method::Fires => {
 
                if !self.def_type.is_primitive() {
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'fires' may only occur in primitive component definitions"
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'fires' may only occur inside synchronous blocks"
 
                    ));
 
                }
 
                num_definition_args = 1;
 
            },
 
            Method::Get => {
 
                if !self.def_type.is_primitive() {
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'get' may only occur in primitive component definitions"
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'get' may only occur inside synchronous blocks"
 
                    ));
 
                }
 
                num_definition_args = 1;
 
            },
 
            Method::Put => {
 
                if !self.def_type.is_primitive() {
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'put' may only occur in primitive component definitions"
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'put' may only occur inside synchronous blocks"
 
                    ));
 
                }
 
                num_definition_args = 2;
 
            }
 
            Method::Symbolic(symbolic) => {
 
                // Find symbolic procedure
 
                let expected_type = if let ExpressionParent::New(_) = self.expr_parent {
 
                    // Expect to find a component
 
                    TypeClass::Component
 
                } else {
 
                    // Expect to find a function
 
                    TypeClass::Function
 
                };
 

	
 
                let definition = self.find_symbol_of_type(
 
                    &ctx.module.source, ctx.module.root_id, &ctx.symbols, &ctx.types,
 
                    &symbolic.identifier, expected_type
 
                )?;
 

	
 
                symbolic.definition = Some(definition.ast_definition);
 
                match &definition.definition {
 
                    DefinedTypeVariant::Function(definition) => {
 
                        num_definition_args = definition.arguments.len();
 
                    },
 
                    DefinedTypeVariant::Component(definition) => {
 
                        num_definition_args = definition.arguments.len();
 
                    }
 
                    _ => unreachable!(),
 
                }
 
            }
 
        }
 

	
 
        // Check the poly args and the number of variables in the call
 
        // expression
 
        self.visit_call_poly_args(ctx, id)?;
 
        let call_expr = &mut ctx.heap[id];
 
        if num_expr_args != num_definition_args {
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, call_expr.position,
 
                &format!(
 
                    "This call expects {} arguments, but {} were provided",
 
                    num_definition_args, num_expr_args
 
                )
 
            ));
 
        }
 

	
 
        // Recurse into all of the arguments and set the expression's parent
 
        let upcast_id = id.upcast();
 

	
 
        let old_num_exprs = self.expression_buffer.len();
 
        self.expression_buffer.extend(&call_expr.arguments);
 
        let new_num_exprs = self.expression_buffer.len();
 

	
 
        let old_expr_parent = self.expr_parent;
 
        call_expr.parent = old_expr_parent;
 

	
 
        for arg_expr_idx in old_num_exprs..new_num_exprs {
 
            let arg_expr_id = self.expression_buffer[arg_expr_idx];
 
            self.expr_parent = ExpressionParent::Expression(upcast_id, arg_expr_idx as u32);
 
            self.visit_expr(ctx, arg_expr_id)?;
 
        }
 

	
 
        self.expression_buffer.truncate(old_num_exprs);
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let var_expr = &ctx.heap[id];
 
        let variable_id = self.find_variable(ctx, self.relative_pos_in_block, &var_expr.identifier)?;
 
        let var_expr = &mut ctx.heap[id];
 
        var_expr.declaration = Some(variable_id);
 
        var_expr.parent = self.expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // ParserType visitors
 
    //--------------------------------------------------------------------------
 

	
 
    fn visit_parser_type(&mut self, ctx: &mut Ctx, id: ParserTypeId) -> VisitorResult {
 
        let old_num_types = self.parser_type_buffer.len();
 
        match self.visit_parser_type_without_buffer_cleanup(ctx, id) {
 
            Ok(_) => {
 
                debug_assert_eq!(self.parser_type_buffer.len(), old_num_types);
 
                Ok(())
 
            },
 
            Err(err) => {
 
                self.parser_type_buffer.truncate(old_num_types);
 
                Err(err)
 
            }
 
        }
 
    }
 
}
 

	
 
impl ValidityAndLinkerVisitor {
 
    //--------------------------------------------------------------------------
 
    // Special traversal
 
    //--------------------------------------------------------------------------
 

	
 
    /// Will visit a statement with a hint about its wrapping statement. This is
 
    /// used to distinguish block statements with a wrapping synchronous
 
    /// statement from normal block statements.
 
    fn visit_stmt_with_hint(&mut self, ctx: &mut Ctx, id: StatementId, hint: Option<SynchronousStatementId>) -> VisitorResult {
 
        if let Statement::Block(block_stmt) = &ctx.heap[id] {
 
            let block_id = block_stmt.this;
 
            self.visit_block_stmt_with_hint(ctx, block_id, hint)
 
        } else {
 
            self.visit_stmt(ctx, id)
 
        }
 
    }
 

	
 
    fn visit_block_stmt_with_hint(&mut self, ctx: &mut Ctx, id: BlockStatementId, hint: Option<SynchronousStatementId>) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            // Performing a breadth pass, so don't traverse into the statements
 
            // of the block.
 
            return Ok(())
 
        }
 

	
 
        // Set parent scope and relative position in the parent scope. Remember
 
        // these values to set them back to the old values when we're done with
 
        // the traversal of the block's statements.
 
        let body = &mut ctx.heap[id];
 
        body.parent_scope = self.cur_scope.clone();
 
        body.relative_pos_in_parent = self.relative_pos_in_block;
 

	
 
        let old_scope = self.cur_scope.replace(match hint {
 
            Some(sync_id) => Scope::Synchronous((sync_id, id)),
 
            None => Scope::Regular(id),
 
        });
 
        let old_relative_pos = self.relative_pos_in_block;
 

	
 
        // Copy statement IDs into buffer
 
        let old_num_stmts = self.statement_buffer.len();
 
        {
 
            let body = &ctx.heap[id];
 
            self.statement_buffer.extend_from_slice(&body.statements);
 
        }
 
        let new_num_stmts = self.statement_buffer.len();
 

	
 
        // Perform the breadth-first pass. Its main purpose is to find labeled
 
        // statements such that we can find the `goto`-targets immediately when
 
        // performing the depth pass
 
        self.performing_breadth_pass = true;
 
        for stmt_idx in old_num_stmts..new_num_stmts {
 
            self.relative_pos_in_block = (stmt_idx - old_num_stmts) as u32;
 
            self.visit_stmt(ctx, self.statement_buffer[stmt_idx])?;
 
        }
 

	
 
        if !self.insert_buffer.is_empty() {
 
            let body = &mut ctx.heap[id];
 
            for (insert_idx, (pos, stmt)) in self.insert_buffer.drain(..).enumerate() {
 
                body.statements.insert(pos as usize + insert_idx, stmt);
 
            }
 
        }
 

	
 
        // And the depth pass. Because we're not actually visiting any inserted
 
        // nodes because we're using the statement buffer, we may safely use the
 
        // relative_pos_in_block counter.
 
        self.performing_breadth_pass = false;
 
        for stmt_idx in old_num_stmts..new_num_stmts {
 
            self.relative_pos_in_block = (stmt_idx - old_num_stmts) as u32;
 
            self.visit_stmt(ctx, self.statement_buffer[stmt_idx])?;
 
        }
 

	
 
        self.cur_scope = old_scope;
 
        self.relative_pos_in_block = old_relative_pos;
 

	
 
        // Pop statement buffer
 
        debug_assert!(self.insert_buffer.is_empty(), "insert buffer not empty after depth pass");
 
        self.statement_buffer.truncate(old_num_stmts);
 

	
 
        Ok(())
 
    }
 

	
 
    /// Visits a particular ParserType in the AST and resolves temporary and
 
    /// implicitly inferred types into the appropriate tree. Note that a
 
    /// ParserType node is a tree. Only call this function on the root node of
 
    /// that tree to prevent doing work more than once.
 
    fn visit_parser_type_without_buffer_cleanup(&mut self, ctx: &mut Ctx, id: ParserTypeId) -> VisitorResult {
 
        use ParserTypeVariant as PTV;
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let init_num_types = self.parser_type_buffer.len();
 
        self.parser_type_buffer.push(id);
 

	
 
        'resolve_loop: while self.parser_type_buffer.len() > init_num_types {
 
            let parser_type_id = self.parser_type_buffer.pop().unwrap();
 
            let parser_type = &ctx.heap[parser_type_id];
 

	
 
            let (symbolic_pos, symbolic_variant, num_inferred_to_allocate) = match &parser_type.variant {
 
                PTV::Message | PTV::Bool |
 
                PTV::Byte | PTV::Short | PTV::Int | PTV::Long |
 
                PTV::String |
 
                PTV::IntegerLiteral | PTV::Inferred => {
 
                    // Builtin types or types that do not require recursion
 
                    continue 'resolve_loop;
 
                },
 
                PTV::Array(subtype_id) |
 
                PTV::Input(subtype_id) |
 
                PTV::Output(subtype_id) => {
 
                    // Requires recursing
 
                    self.parser_type_buffer.push(*subtype_id);
 
                    continue 'resolve_loop;
 
                },
 
                PTV::Symbolic(symbolic) => {
 
                    // Retrieve poly_vars from function/component definition to
 
                    // match against.
 
                    let (definition_id, poly_vars) = match self.def_type {
 
                        DefinitionType::None => unreachable!(),
 
                        DefinitionType::Primitive(id) => (id.upcast(), &ctx.heap[id].poly_vars),
 
                        DefinitionType::Composite(id) => (id.upcast(), &ctx.heap[id].poly_vars),
 
                        DefinitionType::Function(id) => (id.upcast(), &ctx.heap[id].poly_vars),
 
                    };
 

	
 
                    let mut symbolic_variant = None;
 
                    for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
                        if symbolic.identifier.matches_identifier(poly_var) {
 
                            // Type refers to a polymorphic variable.
 
                            // TODO: @hkt Maybe allow higher-kinded types?
 
                            if symbolic.identifier.get_poly_args().is_some() {
 
                                return Err(ParseError::new_error(
 
                                    &ctx.module.source, symbolic.identifier.position,
 
                                    "Polymorphic arguments to a polymorphic variable (higher-kinded types) are not allowed (yet)"
 
                                ));
 
                            }
 
                            symbolic_variant = Some(SymbolicParserTypeVariant::PolyArg(definition_id, poly_var_idx));
 
                        }
 
                    }
 

	
 
                    if let Some(symbolic_variant) = symbolic_variant {
 
                        // Identifier points to a polymorphic argument
 
                        (symbolic.identifier.position, symbolic_variant, 0)
 
                    } else {
 
                        // Must be a user-defined type, otherwise an error
 
                        let (found_type, ident_iter) = find_type_definition(
 
                            &ctx.symbols, &ctx.types, ctx.module.root_id, &symbolic.identifier
 
                        ).as_parse_error(&ctx.module.source)?;
 

	
 
                        // TODO: @function_ptrs: Allow function pointers at some
 
                        //  point in the future
 
                        if found_type.definition.type_class().is_proc_type() {
 
                            return Err(ParseError::new_error(
 
                                &ctx.module.source, symbolic.identifier.position,
 
                                &format!(
 
                                    "This identifier points to a {} type, expected a datatype",
 
                                    found_type.definition.type_class()
 
                                )
 
                            ));
 
                        }
 

	
 
                        // If the type is polymorphic then we have two cases: if
 
                        // the programmer did not specify the polyargs then we
 
                        // assume we're going to infer all of them. Otherwise we
 
                        // make sure that they match in count.
 
                        let (_, poly_args) = ident_iter.prev().unwrap();
 
                        let num_to_infer = match_polymorphic_args_to_vars(
 
                            found_type, poly_args, symbolic.identifier.position
 
                        ).as_parse_error(&ctx.heap, &ctx.module.source)?;
 

	
 
                        (
 
                            symbolic.identifier.position,
 
                            SymbolicParserTypeVariant::Definition(found_type.ast_definition),
 
                            num_to_infer
 
                        )
 
                    }
 
                }
 
            };
 

	
 
            // If here then type is symbolic, perform a mutable borrow (and do
 
            // some rust shenanigans) to set the required information.
 
            for _ in 0..num_inferred_to_allocate {
 
                // TODO: @hack, not very user friendly to manually allocate
 
                //  `inferred` ParserTypes with the InputPosition of the
 
                //  symbolic type's identifier.
 
                // We reuse the `parser_type_buffer` to temporarily store these
 
                // and we'll take them out later
 
                self.parser_type_buffer.push(ctx.heap.alloc_parser_type(|this| ParserType{
 
                    this,
 
                    pos: symbolic_pos,
 
                    variant: ParserTypeVariant::Inferred,
 
                }));
 
            }
 

	
 
            if let PTV::Symbolic(symbolic) = &mut ctx.heap[parser_type_id].variant {
 
                if num_inferred_to_allocate != 0 {
 
                    symbolic.poly_args2.reserve(num_inferred_to_allocate);
 
                    for _ in 0..num_inferred_to_allocate {
 
                        symbolic.poly_args2.push(self.parser_type_buffer.pop().unwrap());
 
                    }
 
                } else if !symbolic.identifier.poly_args.is_empty() {
 
                    symbolic.poly_args2.extend(&symbolic.identifier.poly_args)
 
                }
 
                symbolic.variant = Some(symbolic_variant);
 
            } else {
 
                unreachable!();
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Utilities
 
    //--------------------------------------------------------------------------
 

	
 
    /// Adds a local variable to the current scope. It will also annotate the
 
    /// `Local` in the AST with its relative position in the block.
 
    fn checked_local_add(&mut self, ctx: &mut Ctx, relative_pos: u32, id: LocalId) -> Result<(), ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 

	
 
        // Make sure we do not conflict with any global symbols
 
        {
 
            let ident = &ctx.heap[id].identifier;
 
            if let Some(symbol) = ctx.symbols.resolve_symbol(ctx.module.root_id, &ident.value) {
 
                return Err(
 
                    ParseError::new_error(&ctx.module.source, ident.position, "Local variable declaration conflicts with symbol")
 
                        .with_postfixed_info(&ctx.module.source, symbol.position, "Conflicting symbol is found here")
 
                );
 
            }
 
        }
 

	
 
        let local = &mut ctx.heap[id];
 
        local.relative_pos_in_block = relative_pos;
 

	
 
        // Make sure we do not shadow any variables in any of the scopes. Note
 
        // that variables in parent scopes may be declared later
 
        let local = &ctx.heap[id];
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        let mut local_relative_pos = self.relative_pos_in_block;
 

	
 
        loop {
 
            debug_assert!(scope.is_block(), "scope is not a block");
 
            let block = &ctx.heap[scope.to_block()];
 
            for other_local_id in &block.locals {
 
                let other_local = &ctx.heap[*other_local_id];
 
                // Position check in case another variable with the same name
 
                // is defined in a higher-level scope, but later than the scope
 
                // in which the current variable resides.
 
                if local.this != *other_local_id &&
 
                    local_relative_pos >= other_local.relative_pos_in_block &&
 
                    local.identifier == other_local.identifier {
 
                    // Collision within this scope
 
                    return Err(
 
                        ParseError::new_error(&ctx.module.source, local.position, "Local variable name conflicts with another variable")
 
                            .with_postfixed_info(&ctx.module.source, other_local.position, "Previous variable is found here")
 
                    );
 
                }
 
            }
 

	
 
            // Current scope is fine, move to parent scope if any
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if let Scope::Definition(definition_id) = scope {
 
                // At outer scope, check parameters of function/component
 
                for parameter_id in ctx.heap[*definition_id].parameters() {
 
                    let parameter = &ctx.heap[*parameter_id];
 
                    if local.identifier == parameter.identifier {
 
                        return Err(
 
                            ParseError::new_error(&ctx.module.source, local.position, "Local variable name conflicts with parameter")
 
                                .with_postfixed_info(&ctx.module.source, parameter.position, "Parameter definition is found here")
 
                        );
 
                    }
 
                }
 

	
 
                break;
 
            }
 

	
 
            // If here, then we are dealing with a block-like parent block
 
            local_relative_pos = ctx.heap[scope.to_block()].relative_pos_in_parent;
 
        }
 

	
 
        // No collisions at all
 
        let block = &mut ctx.heap[self.cur_scope.as_ref().unwrap().to_block()];
 
        block.locals.push(id);
 

	
 
        Ok(())
 
    }
 

	
 
    /// Finds a variable in the visitor's scope that must appear before the
 
    /// specified relative position within that block.
 
    fn find_variable(&self, ctx: &Ctx, mut relative_pos: u32, identifier: &NamespacedIdentifier) -> Result<VariableId, ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 
        debug_assert!(identifier.parts.len() == 1, "implement namespaced seeking of target associated with identifier");
 

	
 
        // TODO: May still refer to an alias of a global symbol using a single
 
        //  identifier in the namespace.
 
        // No need to use iterator over namespaces if here
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        
 
        loop {
 
            debug_assert!(scope.is_block());
 
            let block = &ctx.heap[scope.to_block()];
 
            
 
            for local_id in &block.locals {
 
                let local = &ctx.heap[*local_id];
 
                
 
                if local.relative_pos_in_block < relative_pos && identifier.matches_identifier(&local.identifier) {
 
                    return Ok(local_id.upcast());
 
                }
 
            }
 

	
 
            debug_assert!(block.parent_scope.is_some());
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                // Definition scope, need to check arguments to definition
 
                match scope {
 
                    Scope::Definition(definition_id) => {
 
                        let definition = &ctx.heap[*definition_id];
 
                        for parameter_id in definition.parameters() {
 
                            let parameter = &ctx.heap[*parameter_id];
 
                            if identifier.matches_identifier(&parameter.identifier) {
 
                                return Ok(parameter_id.upcast());
 
                            }
 
                        }
 
                    },
 
                    _ => unreachable!(),
 
                }
 

	
 
                // Variable could not be found
 
                return Err(ParseError::new_error(
 
                    &ctx.module.source, identifier.position, "This variable is not declared"
 
                ));
 
            } else {
 
                relative_pos = block.relative_pos_in_parent;
 
            }
 
        }
 
    }
 

	
 
    /// Adds a particular label to the current scope. Will return an error if
 
    /// there is another label with the same name visible in the current scope.
 
    fn checked_label_add(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> Result<(), ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 

	
 
        // Make sure label is not defined within the current scope or any of the
 
        // parent scope.
 
        let label = &ctx.heap[id];
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 

	
 
        loop {
 
            debug_assert!(scope.is_block(), "scope is not a block");
 
            let block = &ctx.heap[scope.to_block()];
 
            for other_label_id in &block.labels {
 
                let other_label = &ctx.heap[*other_label_id];
 
                if other_label.label == label.label {
 
                    // Collision
 
                    return Err(
 
                        ParseError::new_error(&ctx.module.source, label.position, "Label name conflicts with another label")
 
                            .with_postfixed_info(&ctx.module.source, other_label.position, "Other label is found here")
 
                    );
 
                }
 
            }
 

	
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                break;
 
            }
 
        }
 

	
 
        // No collisions
 
        let block = &mut ctx.heap[self.cur_scope.as_ref().unwrap().to_block()];
 
        block.labels.push(id);
 

	
 
        Ok(())
 
    }
 

	
 
    /// Finds a particular labeled statement by its identifier. Once found it
 
    /// will make sure that the target label does not skip over any variable
 
    /// declarations within the scope in which the label was found.
 
    fn find_label(&self, ctx: &Ctx, identifier: &Identifier) -> Result<LabeledStatementId, ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 

	
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        loop {
 
            debug_assert!(scope.is_block(), "scope is not a block");
 
            let relative_scope_pos = ctx.heap[scope.to_block()].relative_pos_in_parent;
 

	
 
            let block = &ctx.heap[scope.to_block()];
 
            for label_id in &block.labels {
 
                let label = &ctx.heap[*label_id];
 
                if label.label == *identifier {
 
                    for local_id in &block.locals {
 
                        // TODO: Better to do this in control flow analysis, it
 
                        //  is legal to skip over a variable declaration if it
 
                        //  is not actually being used. I might be missing
 
                        //  something here when laying out the bytecode...
 
                        let local = &ctx.heap[*local_id];
 
                        if local.relative_pos_in_block > relative_scope_pos && local.relative_pos_in_block < label.relative_pos_in_block {
 
                            return Err(
 
                                ParseError::new_error(&ctx.module.source, identifier.position, "This target label skips over a variable declaration")
 
                                    .with_postfixed_info(&ctx.module.source, label.position, "Because it jumps to this label")
 
                                    .with_postfixed_info(&ctx.module.source, local.position, "Which skips over this variable")
 
                            );
 
                        }
 
                    }
 
                    return Ok(*label_id);
 
                }
 
            }
 

	
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                return Err(ParseError::new_error(&ctx.module.source, identifier.position, "Could not find this label"));
 
            }
 

	
 
        }
 
    }
 

	
 
    /// Finds a particular symbol in the symbol table which must correspond to
 
    /// a definition of a particular type.
 
    // Note: root_id, symbols and types passed in explicitly to prevent
 
    //  borrowing errors
 
    fn find_symbol_of_type<'a>(
 
        &self, source: &InputSource, root_id: RootId, symbols: &SymbolTable, types: &'a TypeTable,
 
        identifier: &NamespacedIdentifier, expected_type_class: TypeClass
 
    ) -> Result<&'a DefinedType, ParseError> {
 
    fn find_symbol_of_type<'t>(
 
        &self, source: &InputSource, root_id: RootId, symbols: &SymbolTable, 
 
        types: &'t TypeTable, identifier: &NamespacedIdentifier, 
 
        expected_type_class: TypeClass
 
    ) -> Result<&'t DefinedType, ParseError> {
 
        // Find symbol associated with identifier
 
        let (find_result, _) = find_type_definition(symbols, types, root_id, identifier)
 
            .as_parse_error(source)?;
 

	
 
        let definition_type_class = find_result.definition.type_class();
 
        if expected_type_class != definition_type_class {
 
            return Err(ParseError::new_error(
 
                source, identifier.position,
 
                &format!(
 
                    "Expected to find a {}, this symbol points to a {}",
 
                    expected_type_class, definition_type_class
 
                )
 
            ))
 
        }
 

	
 
        Ok(find_result)
 
    }
 

	
 
    /// Finds a particular enum/union using a namespaced identifier. We allow 
 
    /// for one more element to exist in the namespaced identifier that 
 
    /// supposedly resolves to the enum/union variant.
 
    fn find_symbol_of_type_variant<'t, 'i>(
 
        &self, source: &InputSource, root_id: RootId, symbols: &SymbolTable,
 
        types: &'t TypeTable, identifier: &'i NamespacedIdentifier,
 
        expected_type_class: TypeClass
 
    ) -> Result<(&'t DefinedType, NamespacedIdentifierIter<'i>), ParseError> {
 
        debug_assert!(expected_type_class == TypeClass::Enum || expected_type_class == TypeClass::Union);
 
        let (symbol, mut ident_iter) = symbols.resolve_namespaced_identifier(root_id, identifier);
 

	
 
        if symbol.is_none() {
 
            return Err(ParseError::new_error(
 
                source, identifier.position, 
 
                "Could not resolve this identifier to a symbol"
 
            ));
 
        }
 

	
 
        let symbol = symbol.unwrap();
 
        match symbol.symbol {
 
            Symbol::Namespace(_) => {
 
                return Err(ParseError::new_error(
 
                    source, identifier.position, 
 
                    "This identifier was resolved to a namespace instead of a type"
 
                ))
 
            },
 
            Symbol::Definition((_, definition_id)) => {
 
                let definition = types.get_base_definition(&definition_id);
 
                debug_assert!(definition.is_some());
 
                let definition = definition.unwrap();
 

	
 
                let definition_type_class = definition.definition.type_class();
 
                if expected_type_class != definition_type_class {
 
                    return Err(ParseError::new_error(
 
                        source, identifier.position,
 
                        &format!(
 
                            "Expected to find a {}, this symbols points to a {}",
 
                            expected_type_class, definition_type_class
 
                        )
 
                    ));
 
                }
 

	
 
                // Make sure we have a variant (that doesn't contain any 
 
                // polymorphic args)
 
                let next_part = ident_iter.next();
 
                if next_part.is_none() {
 
                    return Err(ParseError::new_error(
 
                        source, identifier.position,
 
                        &format!(
 
                            "This identifier points to the type '{}', did you mean to instantiate a variant?",
 
                            String::from_utf8_lossy(&identifier.value)
 
                        )
 
                    ));
 
                }
 
                let (_, next_polyargs) = next_part.unwrap();
 

	
 
                // Now we make sure that there aren't even more identifiers, and
 
                // make sure that the variant does not contain any polymorphic
 
                // arguments. In that case we can simplify the later visit of
 
                // the (optional) polymorphic args of the enum.
 
                let returned_section = ident_iter.returned_section();
 
                if ident_iter.num_remaining() != 0 {
 
                    return Err(ParseError::new_error(
 
                        source, identifier.position,
 
                        &format!(
 
                            "Too many identifiers, did you mean to write '{}'",
 
                            &String::from_utf8_lossy(returned_section)
 
                        )
 
                    ));
 
                }
 

	
 
                if next_polyargs.is_some() {
 
                    return Err(ParseError::new_error(
 
                        source, identifier.position,
 
                        "Encountered polymorphic args to an enum variant. These can only be specified for the enum type."
 
                    ))
 
                }
 

	
 
                // We're absolutely fine
 
                Ok((definition, ident_iter))
 
            }
 
        }
 
    }
 

	
 
    /// This function will check if the provided while statement ID has a block
 
    /// statement that is one of our current parents.
 
    fn has_parent_while_scope(&self, ctx: &Ctx, id: WhileStatementId) -> bool {
 
        debug_assert!(self.cur_scope.is_some());
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        let while_stmt = &ctx.heap[id];
 
        loop {
 
            debug_assert!(scope.is_block());
 
            let block = scope.to_block();
 
            if while_stmt.body == block.upcast() {
 
                return true;
 
            }
 

	
 
            let block = &ctx.heap[block];
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                return false;
 
            }
 
        }
 
    }
 

	
 
    /// This function should be called while dealing with break/continue
 
    /// statements. It will try to find the targeted while statement, using the
 
    /// target label if provided. If a valid target is found then the loop's
 
    /// ID will be returned, otherwise a parsing error is constructed.
 
    /// The provided input position should be the position of the break/continue
 
    /// statement.
 
    fn resolve_break_or_continue_target(&self, ctx: &Ctx, position: InputPosition, label: &Option<Identifier>) -> Result<WhileStatementId, ParseError> {
 
        let target = match label {
 
            Some(label) => {
 
                let target_id = self.find_label(ctx, label)?;
 

	
 
                // Make sure break target is a while statement
 
                let target = &ctx.heap[target_id];
 
                if let Statement::While(target_stmt) = &ctx.heap[target.body] {
 
                    // Even though we have a target while statement, the break might not be
 
                    // present underneath this particular labeled while statement
 
                    if !self.has_parent_while_scope(ctx, target_stmt.this) {
 
                        ParseError::new_error(&ctx.module.source, label.position, "Break statement is not nested under the target label's while statement")
 
                            .with_postfixed_info(&ctx.module.source, target.position, "The targeted label is found here");
 
                    }
 

	
 
                    target_stmt.this
 
                } else {
 
                    return Err(
 
                        ParseError::new_error(&ctx.module.source, label.position, "Incorrect break target label, it must target a while loop")
 
                            .with_postfixed_info(&ctx.module.source, target.position, "The targeted label is found here")
 
                    );
 
                }
 
            },
 
            None => {
 
                // Use the enclosing while statement, the break must be
 
                // nested within that while statement
 
                if self.in_while.is_none() {
 
                    return Err(
 
                        ParseError::new_error(&ctx.module.source, position, "Break statement is not nested under a while loop")
 
                    );
 
                }
 

	
 
                self.in_while.unwrap()
 
            }
 
        };
 

	
 
        // We have a valid target for the break statement. But we need to
 
        // make sure we will not break out of a synchronous block
 
        {
 
            let target_while = &ctx.heap[target];
 
            if target_while.in_sync != self.in_sync {
 
                // Break is nested under while statement, so can only escape a
 
                // sync block if the sync is nested inside the while statement.
 
                debug_assert!(self.in_sync.is_some());
 
                let sync_stmt = &ctx.heap[self.in_sync.unwrap()];
 
                return Err(
 
                    ParseError::new_error(&ctx.module.source, position, "Break may not escape the surrounding synchronous block")
 
                        .with_postfixed_info(&ctx.module.source, target_while.position, "The break escapes out of this loop")
 
                        .with_postfixed_info(&ctx.module.source, sync_stmt.position, "And would therefore escape this synchronous block")
 
                );
 
            }
 
        }
 

	
 
        Ok(target)
 
    }
 

	
 
    // TODO: @cleanup, merge with function below
 
    fn visit_call_poly_args(&mut self, ctx: &mut Ctx, call_id: CallExpressionId) -> VisitorResult {
 
        // TODO: @token Revisit when tokenizer is implemented
 
        let call_expr = &mut ctx.heap[call_id];
 
        if let Method::Symbolic(symbolic) = &mut call_expr.method {
 
            if let Some(poly_args) = symbolic.identifier.get_poly_args() {
 
                call_expr.poly_args.extend(poly_args);
 
            }
 
        }
 

	
 
        let call_expr = &ctx.heap[call_id];
 

	
 
        // Determine the polyarg signature
 
        let num_expected_poly_args = match &call_expr.method {
 
            Method::Create => {
 
                0
 
            },
 
            Method::Fires => {
 
                1
 
            },
 
            Method::Get => {
 
                1
 
            },
 
            Method::Put => {
 
                1
 
            }
 
            Method::Symbolic(symbolic) => {
 
                // Retrieve type and make sure number of specified polymorphic 
 
                // arguments is correct
 

	
 
                let definition = &ctx.heap[symbolic.definition.unwrap()];
 
                match definition {
 
                    Definition::Function(definition) => definition.poly_vars.len(),
 
                    Definition::Component(definition) => definition.poly_vars.len(),
 
                    _ => {
 
                        debug_assert!(false, "expected function or component definition while visiting call poly args");
 
                        unreachable!();
 
                    }
 
                }
 
            }
 
        };
 

	
 
        // We allow zero polyargs to imply all args are inferred. Otherwise the
 
        // number of arguments must be equal
 
        if call_expr.poly_args.is_empty() {
 
            if num_expected_poly_args != 0 {
 
                // Infer all polyargs
 
                // TODO: @cleanup Not nice to use method position as implicitly
 
                //  inferred parser type pos.
 
                let pos = call_expr.position();
 
                for _ in 0..num_expected_poly_args {
 
                    self.parser_type_buffer.push(ctx.heap.alloc_parser_type(|this| ParserType {
 
                        this,
 
                        pos,
 
                        variant: ParserTypeVariant::Inferred,
 
                    }));
 
                }
 

	
 
                let call_expr = &mut ctx.heap[call_id];
 
                call_expr.poly_args.reserve(num_expected_poly_args);
 
                for _ in 0..num_expected_poly_args {
 
                    call_expr.poly_args.push(self.parser_type_buffer.pop().unwrap());
 
                }
 
            }
 
            Ok(())
 
        } else if call_expr.poly_args.len() == num_expected_poly_args {
 
            // Number of args is not 0, so parse all the specified ParserTypes
 
            let old_num_types = self.parser_type_buffer.len();
 
            self.parser_type_buffer.extend(&call_expr.poly_args);
 
            while self.parser_type_buffer.len() > old_num_types {
 
                let parser_type_id = self.parser_type_buffer.pop().unwrap();
 
                self.visit_parser_type(ctx, parser_type_id)?;
 
            }
 
            self.parser_type_buffer.truncate(old_num_types);
 
            Ok(())
 
        } else {
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, call_expr.position,
 
                &format!(
 
                    "Expected {} polymorphic arguments (or none, to infer them), but {} were specified",
 
                    num_expected_poly_args, call_expr.poly_args.len()
 
                )
 
            ));
 
        }
 
    }
 

	
 
    fn visit_literal_poly_args(&mut self, ctx: &mut Ctx, lit_id: LiteralExpressionId) -> VisitorResult {
 
        // TODO: @token Revisit when tokenizer is implemented
 
        let literal_expr = &mut ctx.heap[lit_id];
 
        if let Literal::Struct(literal) = &mut literal_expr.value {
 
        match &mut literal_expr.value {
 
            Literal::Struct(literal) => {
 
                literal.poly_args2.extend(&literal.identifier.poly_args);
 
            },
 
            Literal::Enum(literal) => {
 
                literal.poly_args2.extend(&literal.identifier.poly_args);
 
            },
 
            _ => {
 
                debug_assert!(false, "called visit_literal_poly_args on a non-polymorphic literal");
 
                unreachable!();
 
            }
 
        }
 

	
 
        let literal_expr = &ctx.heap[lit_id];
 
        let literal_pos = literal_expr.position;
 
        let num_poly_args_to_infer = match &literal_expr.value {
 
        let (num_poly_args_to_infer, poly_args_to_visit) = match &literal_expr.value {
 
            Literal::Null | Literal::False | Literal::True |
 
            Literal::Character(_) | Literal::Integer(_) => {
 
                // Not really an error, but a programmer error as we're likely
 
                // doing work twice
 
                debug_assert!(false, "called visit_literal_poly_args on a non-polymorphic literal");
 
                unreachable!();
 
            },
 
            Literal::Struct(literal) => {
 
                // Retrieve type and make sure number of specified polymorphic
 
                // arguments is correct.
 
                let defined_type = ctx.types.get_base_definition(literal.definition.as_ref().unwrap())
 
                    .unwrap();
 
                let maybe_poly_args = literal.identifier.get_poly_args();
 
                let num_to_infer = match_polymorphic_args_to_vars(
 
                    defined_type, maybe_poly_args, literal.identifier.position
 
                ).as_parse_error(&ctx.heap, &ctx.module.source)?;
 

	
 
                // Visit all specified parser types
 
                (num_to_infer, &literal.poly_args2)
 
            },
 
            Literal::Enum(literal) => {
 
                let defined_type = ctx.types.get_base_definition(literal.definition.as_ref().unwrap())
 
                    .unwrap();
 
                let maybe_poly_args = literal.identifier.get_poly_args();
 
                let num_to_infer = match_polymorphic_args_to_vars(
 
                    defined_type, maybe_poly_args, literal.identifier.position
 
                ).as_parse_error(&ctx.heap, &ctx.module.source)?;
 

	
 
                println!("DEBUG: poly args 2: {:?}", &literal.poly_args2);
 
                (num_to_infer, &literal.poly_args2)
 
            }
 
        };
 

	
 
        // Visit all specified parser types in the polymorphic arguments
 
        let old_num_types = self.parser_type_buffer.len();
 
                self.parser_type_buffer.extend(&literal.poly_args2);
 
        self.parser_type_buffer.extend(poly_args_to_visit);
 
        while self.parser_type_buffer.len() > old_num_types {
 
            let parser_type_id = self.parser_type_buffer.pop().unwrap();
 
            self.visit_parser_type(ctx, parser_type_id)?;
 
        }
 
        self.parser_type_buffer.truncate(old_num_types);
 

	
 
                num_to_infer
 
            }
 
        };
 

	
 
        if num_poly_args_to_infer != 0 {
 
            for _ in 0..num_poly_args_to_infer {
 
                self.parser_type_buffer.push(ctx.heap.alloc_parser_type(|this| ParserType{
 
                    this, pos: literal_pos, variant: ParserTypeVariant::Inferred
 
                }));
 
            }
 

	
 
            let literal = match &mut ctx.heap[lit_id].value {
 
                Literal::Struct(literal) => literal,
 
            let poly_args = match &mut ctx.heap[lit_id].value {
 
                Literal::Struct(literal) => &mut literal.poly_args2,
 
                Literal::Enum(literal) => &mut literal.poly_args2,
 
                _ => unreachable!(),
 
            };
 
            literal.poly_args2.reserve(num_poly_args_to_infer);
 
            poly_args.reserve(num_poly_args_to_infer);
 
            for _ in 0..num_poly_args_to_infer {
 
                literal.poly_args2.push(self.parser_type_buffer.pop().unwrap());
 
                poly_args.push(self.parser_type_buffer.pop().unwrap());
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 
}
 
\ No newline at end of file
src/protocol/tests/parser_inference.rs
Show inline comments
 
/// parser_inference.rs
 
///
 
/// Simple tests for the type inferences
 

	
 
use super::*;
 

	
 
#[test]
 
fn test_integer_inference() {
 
    Tester::new_single_source_expect_ok(
 
        "by arguments",
 
        "
 
        int call(byte b, short s, int i, long l) {
 
            auto b2 = b;
 
            auto s2 = s;
 
            auto i2 = i;
 
            auto l2 = l;
 
            return i2;
 
        }
 
        "
 
    ).for_function("call", |f| { f
 
        .for_variable("b2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("byte");
 
        })
 
        .for_variable("s2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("short");
 
        })
 
        .for_variable("i2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("int");
 
        })
 
        .for_variable("l2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("long");
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "by assignment",
 
        "
 
        int call() {
 
            byte b1 = 0; short s1 = 0; int i1 = 0; long l1 = 0;
 
            auto b2 = b1;
 
            auto s2 = s1;
 
            auto i2 = i1;
 
            auto l2 = l1;
 
            return 0;
 
        }"
 
    ).for_function("call", |f| { f
 
        .for_variable("b2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("byte");
 
        })
 
        .for_variable("s2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("short");
 
        })
 
        .for_variable("i2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("int");
 
        })
 
        .for_variable("l2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("long");
 
        });
 
    });
 
}
 

	
 
#[test]
 
fn test_binary_expr_inference() {
 
    Tester::new_single_source_expect_ok(
 
        "compatible types",
 
        "int call() {
 
            byte b0 = 0;
 
            byte b1 = 1;
 
            short s0 = 0;
 
            short s1 = 1;
 
            int i0 = 0;
 
            int i1 = 1;
 
            long l0 = 0;
 
            long l1 = 1;
 
            auto b = b0 + b1;
 
            auto s = s0 + s1;
 
            auto i = i0 + i1;
 
            auto l = l0 + l1;
 
            return i;
 
        }"
 
    ).for_function("call", |f| { f
 
        .for_expression_by_source(
 
            "b0 + b1", "+", 
 
            |e| { e.assert_concrete_type("byte"); }
 
        )
 
        .for_expression_by_source(
 
            "s0 + s1", "+", 
 
            |e| { e.assert_concrete_type("short"); }
 
        )
 
        .for_expression_by_source(
 
            "i0 + i1", "+", 
 
            |e| { e.assert_concrete_type("int"); }
 
        )
 
        .for_expression_by_source(
 
            "l0 + l1", "+", 
 
            |e| { e.assert_concrete_type("long"); }
 
        );
 
    });
 

	
 
    Tester::new_single_source_expect_err(
 
        "incompatible types", 
 
        "int call() {
 
            byte b = 0;
 
            long l = 1;
 
            auto r = b + l;
 
            return 0;
 
        }"
 
    ).error(|e| { e
 
        .assert_ctx_has(0, "b + l")
 
        .assert_msg_has(0, "cannot apply")
 
        .assert_occurs_at(0, "+")
 
        .assert_msg_has(1, "has type 'byte'")
 
        .assert_msg_has(2, "has type 'long'");
 
    });
 
}
 

	
 

	
 

	
 
#[test]
 
fn test_struct_inference() {
 
    Tester::new_single_source_expect_ok(
 
        "by function calls",
 
        "
 
        struct Pair<T1, T2>{ T1 first, T2 second }
 
        Pair<T1, T2> construct<T1, T2>(T1 first, T2 second) { 
 
            return Pair{ first: first, second: second };
 
        }
 
        int fix_t1<T2>(Pair<byte, T2> arg) { return 0; }
 
        int fix_t2<T1>(Pair<T1, int> arg) { return 0; }
 
        int test() {
 
            auto first = 0;
 
            auto second = 1;
 
            auto pair = construct(first, second);
 
            fix_t1(pair);
 
            fix_t2(pair);
 
            return 0;
 
        }
 
        "
 
    ).for_function("test", |f| { f
 
        .for_variable("first", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("byte");
 
        })
 
        .for_variable("second", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("int");
 
        })
 
        .for_variable("pair", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Pair<byte,int>");
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "by field access",
 
        "
 
        struct Pair<T1, T2>{ T1 first, T2 second }
 
        Pair<T1, T2> construct<T1, T2>(T1 first, T2 second) {
 
            return Pair{ first: first, second: second };
 
        }
 
        int test() {
 
            auto first = 0;
 
            auto second = 1;
 
            auto pair = construct(first, second);
 
            byte assign_first = 0;
 
            long assign_second = 1;
 
            pair.first = assign_first;
 
            pair.second = assign_second;
 
            return 0;
 
        }
 
        "
 
    ).for_function("test", |f| { f
 
        .for_variable("first", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("byte");
 
        })
 
        .for_variable("second", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("long");
 
        })
 
        .for_variable("pair", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Pair<byte,long>");
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "by nested field access",
 
        "
 
        struct Node<T1, T2>{ T1 l, T2 r }
 
        Node<T1, T2> construct<T1, T2>(T1 l, T2 r) { return Node{ l: l, r: r }; }
 
        int fix_poly<T>(Node<T, T> a) { return 0; }
 
        int test() {
 
            byte assigned = 0;
 
            auto thing = construct(assigned, construct(0, 1));
 
            fix_poly(thing.r);
 
            thing.r.r = assigned;
 
            return 0;
 
        }
 
        ",
 
    ).for_function("test", |f| { f
 
        .for_variable("thing", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Node<byte,Node<byte,byte>>");
 
        });
 
    });
 
}
 

	
 
#[test]
 
fn test_enum_inference() {
 
    Tester::new_single_source_expect_ok(
 
        "no polymorphic vars",
 
        "
 
        enum Choice { A, B }
 
        int test_instances() {
 
            auto foo = Choice::A;
 
            auto bar = Choice::B;
 
            return 0;
 
        }
 
        "
 
    ).for_function("test_instances", |f| { f
 
        .for_variable("foo", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Choice");
 
        })
 
        .for_variable("bar", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Choice");
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "one polymorphic var",
 
        "
 
        enum Choice<T>{
 
            A,
 
            B,
 
        }
 
        int fix_as_byte(Choice<byte> arg) { return 0; }
 
        int fix_as_int(Choice<int> arg) { return 0; }
 
        int test_instances() {
 
            auto choice_byte = Choice::A;
 
            auto choice_int1 = Choice::B;
 
            Choice<auto> choice_int2 = Choice::B;
 
            fix_as_byte(choice_byte);
 
            fix_as_int(choice_int1);
 
            return fix_as_int(choice_int2);
 
        }
 
        "
 
    ).for_function("test_instances", |f| { f
 
        .for_variable("choice_byte", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Choice<byte>");
 
        })
 
        .for_variable("choice_int1", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Choice<int>");
 
        })
 
        .for_variable("choice_int2", |v| { v
 
            .assert_parser_type("Choice<auto>")
 
            .assert_concrete_type("Choice<int>");
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "two polymorphic vars",
 
        "
 
        enum Choice<T1, T2>{ A, B, }
 
        int fix_t1<T>(Choice<byte, T> arg) { return 0; }
 
        int fix_t2<T>(Choice<T, int> arg) { return 0; }
 
        int test_instances() {
 
            Choice<byte, auto> choice1 = Choice::A;
 
            Choice<auto, int> choice2 = Choice::A;
 
            Choice<auto, auto> choice3 = Choice::B;
 
            auto choice4 = Choice::B;
 
            fix_t1(choice1); fix_t1(choice2); fix_t1(choice3); fix_t1(choice4);
 
            fix_t2(choice1); fix_t2(choice2); fix_t2(choice3); fix_t2(choice4);
 
            return 0;
 
        }
 
        "
 
    ).for_function("test_instances", |f| { f
 
        .for_variable("choice1", |v| { v
 
            .assert_parser_type("Choice<byte,auto>")
 
            .assert_concrete_type("Choice<byte,int>");
 
        })
 
        .for_variable("choice2", |v| { v
 
            .assert_parser_type("Choice<auto,int>")
 
            .assert_concrete_type("Choice<byte,int>");
 
        })
 
        .for_variable("choice3", |v| { v
 
            .assert_parser_type("Choice<auto,auto>")
 
            .assert_concrete_type("Choice<byte,int>");
 
        })
 
        .for_variable("choice4", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Choice<byte,int>");
 
        });
 
    });
 
}
 

	
 
#[test]
 
fn test_failed_polymorph_inference() {
 
    Tester::new_single_source_expect_err(
 
        "function call inference mismatch",
 
        "
 
        int poly<T>(T a, T b) { return 0; }
 
        int call() {
 
            byte first_arg = 5;
 
            long second_arg = 2;
 
            return poly(first_arg, second_arg);
 
        }
 
        "
 
    ).error(|e| { e
 
        .assert_num(3)
 
        .assert_ctx_has(0, "poly(first_arg, second_arg)")
 
        .assert_occurs_at(0, "poly")
 
        .assert_msg_has(0, "Conflicting type for polymorphic variable 'T'")
 
        .assert_occurs_at(1, "second_arg")
 
        .assert_msg_has(1, "inferred it to 'long'")
 
        .assert_occurs_at(2, "first_arg")
 
        .assert_msg_has(2, "inferred it to 'byte'");
 
    });
 

	
 
    Tester::new_single_source_expect_err(
 
        "struct literal inference mismatch",
 
        "
 
        struct Pair<T>{ T first, T second }
 
        int call() {
 
            byte first_arg = 5;
 
            long second_arg = 2;
 
            auto pair = Pair{ first: first_arg, second: second_arg };
 
            return 3;
 
        }
 
        "
 
    ).error(|e| { e
 
        .assert_num(3)
 
        .assert_ctx_has(0, "Pair{ first: first_arg, second: second_arg }")
 
        .assert_occurs_at(0, "Pair{")
 
        .assert_msg_has(0, "Conflicting type for polymorphic variable 'T'")
 
        .assert_occurs_at(1, "second_arg")
 
        .assert_msg_has(1, "inferred it to 'long'")
 
        .assert_occurs_at(2, "first_arg")
 
        .assert_msg_has(2, "inferred it to 'byte'");
 
    });
 

	
 
    Tester::new_single_source_expect_err(
 
        "field access inference mismatch",
 
        "
 
        struct Holder<Shazam>{ Shazam a }
 
        int call() {
 
            byte to_hold = 0;
 
            auto holder = Holder{ a: to_hold };
 
            return holder.a;
 
        }
 
        "
 
    ).error(|e| { e
 
        .assert_num(3)
 
        .assert_ctx_has(0, "holder.a")
 
        .assert_occurs_at(0, ".")
 
        .assert_msg_has(0, "Conflicting type for polymorphic variable 'Shazam'")
 
        .assert_msg_has(1, "inferred it to 'byte'")
 
        .assert_msg_has(2, "inferred it to 'int'");
 
    });
 

	
 
    // TODO: Needs better error messages anyway, but this failed before
 
    Tester::new_single_source_expect_err(
 
        "by nested field access",
 
        "
 
        struct Node<T1, T2>{ T1 l, T2 r }
 
        Node<T1, T2> construct<T1, T2>(T1 l, T2 r) { return Node{ l: l, r: r }; }
 
        int fix_poly<T>(Node<T, T> a) { return 0; }
 
        int test() {
 
            byte assigned = 0;
 
            long another = 1;
 
            auto thing = construct(assigned, construct(another, 1));
 
            fix_poly(thing.r);
 
            thing.r.r = assigned;
 
            return 0;
 
        }
 
        ",
 
    );
 
}
 
\ No newline at end of file
src/protocol/tests/parser_monomorphs.rs
Show inline comments
 
/// parser_monomorphs.rs
 
///
 
/// Simple tests to make sure that all of the appropriate monomorphs are 
 
/// instantiated
 

	
 
use super::*;
 

	
 
#[test]
 
fn test_struct_monomorphs() {
 
    Tester::new_single_source_expect_ok(
 
        "no polymorph",
 
        "struct Integer{ int field }"
 
    ).for_struct("Integer", |s| { s
 
        .assert_num_monomorphs(0);
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "single polymorph",
 
        "
 
        struct Number<T>{ T number }
 
        int instantiator() {
 
            auto a = Number<byte>{ number: 0 };
 
            auto b = Number<byte>{ number: 1 };
 
            auto c = Number<int>{ number: 2 };
 
            auto d = Number<long>{ number: 3 };
 
            auto e = Number<Number<short>>{ number: Number{ number: 4 }};
 
            return 0;
 
        }
 
        "
 
    ).for_struct("Number", |s| { s
 
        .assert_has_monomorph("byte")
 
        .assert_has_monomorph("short")
 
        .assert_has_monomorph("int")
 
        .assert_has_monomorph("long")
 
        .assert_has_monomorph("Number<short>")
 
        .assert_num_monomorphs(5);
 
    }).for_function("instantiator", |f| { f
 
        .for_variable("a", |v| {v.assert_concrete_type("Number<byte>");} )
 
        .for_variable("e", |v| {v.assert_concrete_type("Number<Number<short>>");} );
 
    });
 
}
 

	
 
#[test]
 
fn test_enum_monomorphs() {
 
    Tester::new_single_source_expect_ok(
 
        "no polymorph",
 
        "
 
        enum Answer{ Yes, No }
 
        int do_it() { auto a = Answer::Yes; return 0; }
 
        "
 
    ).for_enum("Answer", |e| { e
 
        .assert_num_monomorphs(0);
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "single polymorph",
 
        "
 
        enum Answer<T> { Yes, No }
 
        int instantiator() {
 
            auto a = Answer<byte>::Yes;
 
            auto b = Answer<byte>::No;
 
            auto c = Answer<int>::Yes;
 
            auto d = Answer<Answer<Answer<long>>>::No;
 
            return 0;
 
        }
 
        "
 
    ).for_enum("Answer", |e| { e
 
        .assert_num_monomorphs(3)
 
        .assert_has_monomorph("byte")
 
        .assert_has_monomorph("int")
 
        .assert_has_monomorph("Answer<Answer<long>>");
 
    });
 
}
 
\ No newline at end of file
src/protocol/tests/utils.rs
Show inline comments
 
use crate::protocol::{
 
    ast::*,
 
    inputsource::*,
 
    parser::{
 
        *,
 
        type_table::TypeTable,
 
        symbol_table::SymbolTable,
 
    },
 
};
 

	
 
// Carries information about the test into utility structures for builder-like
 
// assertions
 
#[derive(Clone, Copy)]
 
struct TestCtx<'a> {
 
    test_name: &'a str,
 
    heap: &'a Heap,
 
    modules: &'a Vec<LexedModule>,
 
    types: &'a TypeTable,
 
    symbols: &'a SymbolTable,
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Interface for parsing and compiling
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct Tester {
 
    test_name: String,
 
    sources: Vec<String>
 
}
 

	
 
impl Tester {
 
    /// Constructs a new tester, allows adding multiple sources before compiling
 
    pub(crate) fn new<S: ToString>(test_name: S) -> Self {
 
        Self{
 
            test_name: test_name.to_string(),
 
            sources: Vec::new()
 
        }
 
    }
 

	
 
    /// Utility for quick tests that use a single source file and expect the
 
    /// compilation to succeed.
 
    pub(crate) fn new_single_source_expect_ok<T: ToString, S: ToString>(test_name: T, source: S) -> AstOkTester {
 
        Self::new(test_name)
 
            .with_source(source)
 
            .compile()
 
            .expect_ok()
 
    }
 

	
 
    /// Utility for quick tests that use a single source file and expect the
 
    /// compilation to fail.
 
    pub(crate) fn new_single_source_expect_err<T: ToString, S: ToString>(test_name: T, source: S) -> AstErrTester {
 
        Self::new(test_name)
 
            .with_source(source)
 
            .compile()
 
            .expect_err()
 
    }
 

	
 
    pub(crate) fn with_source<S: ToString>(mut self, source: S) -> Self {
 
        self.sources.push(source.to_string());
 
        self
 
    }
 

	
 
    pub(crate) fn compile(self) -> AstTesterResult {
 
        let mut parser = Parser::new();
 
        for (source_idx, source) in self.sources.into_iter().enumerate() {
 
            let mut cursor = std::io::Cursor::new(source);
 
            let input_source = InputSource::new("", &mut cursor)
 
                .expect(&format!("parsing source {}", source_idx + 1));
 

	
 
            if let Err(err) = parser.feed(input_source) {
 
                return AstTesterResult::Err(AstErrTester::new(self.test_name, err))
 
            }
 
        }
 

	
 
        if let Err(err) = parser.parse() {
 
            return AstTesterResult::Err(AstErrTester::new(self.test_name, err))
 
        }
 

	
 
        AstTesterResult::Ok(AstOkTester::new(self.test_name, parser))
 
    }
 
}
 

	
 
pub(crate) enum AstTesterResult {
 
    Ok(AstOkTester),
 
    Err(AstErrTester)
 
}
 

	
 
impl AstTesterResult {
 
    pub(crate) fn expect_ok(self) -> AstOkTester {
 
        match self {
 
            AstTesterResult::Ok(v) => v,
 
            AstTesterResult::Err(err) => {
 
                let wrapped = ErrorTester{ test_name: &err.test_name, error: &err.error };
 
                println!("DEBUG: Full error:\n{}", &err.error);
 
                assert!(
 
                    false,
 
                    "[{}] Expected compilation to succeed, but it failed with {}",
 
                    err.test_name, wrapped.assert_postfix()
 
                );
 
                unreachable!();
 
            }
 
        }
 
    }
 

	
 
    pub(crate) fn expect_err(self) -> AstErrTester {
 
        match self {
 
            AstTesterResult::Ok(ok) => {
 
                assert!(false, "[{}] Expected compilation to fail, but it succeeded", ok.test_name);
 
                unreachable!();
 
            },
 
            AstTesterResult::Err(err) => err,
 
        }
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Interface for successful compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct AstOkTester {
 
    test_name: String,
 
    modules: Vec<LexedModule>,
 
    heap: Heap,
 
    symbols: SymbolTable,
 
    types: TypeTable,
 
}
 

	
 
impl AstOkTester {
 
    fn new(test_name: String, parser: Parser) -> Self {
 
        Self {
 
            test_name,
 
            modules: parser.modules,
 
            heap: parser.heap,
 
            symbols: parser.symbol_table,
 
            types: parser.type_table,
 
        }
 
    }
 

	
 
    pub(crate) fn for_struct<F: Fn(StructTester)>(self, name: &str, f: F) -> Self {
 
        let mut found = false;
 
        for definition in self.heap.definitions.iter() {
 
            if let Definition::Struct(definition) = definition {
 
                if String::from_utf8_lossy(&definition.identifier.value) != name {
 
                    continue;
 
                }
 

	
 
                // Found struct with the same name
 
                let tester = StructTester::new(self.ctx(), definition);
 
                f(tester);
 
                found = true;
 
                break
 
            }
 
        }
 

	
 
        if found { return self }
 

	
 
        assert!(
 
            false, "[{}] Failed to find definition for struct '{}'",
 
            self.test_name, name
 
        );
 
        unreachable!()
 
    }
 

	
 
    pub(crate) fn for_enum<F: Fn(EnumTester)>(self, name: &str, f: F) -> Self {
 
        let mut found = false;
 
        for definition in self.heap.definitions.iter() {
 
            if let Definition::Enum(definition) = definition {
 
                if String::from_utf8_lossy(&definition.identifier.value) != name {
 
                    continue;
 
                }
 

	
 
                // Found enum with the same name
 
                let tester = EnumTester::new(self.ctx(), definition);
 
                f(tester);
 
                found = true;
 
                break;
 
            }
 
        }
 

	
 
        if found { return self }
 

	
 
        assert!(
 
            false, "[{}] Failed to find definition for enum '{}'",
 
            self.test_name, name
 
        );
 
        unreachable!()
 
    }
 

	
 
    pub(crate) fn for_function<F: Fn(FunctionTester)>(self, name: &str, f: F) -> Self {
 
        let mut found = false;
 
        for definition in self.heap.definitions.iter() {
 
            if let Definition::Function(definition) = definition {
 
                if String::from_utf8_lossy(&definition.identifier.value) != name {
 
                    continue;
 
                }
 

	
 
                // Found function
 
                let tester = FunctionTester::new(self.ctx(), definition);
 
                f(tester);
 
                found = true;
 
                break;
 
            }
 
        }
 

	
 
        if found { return self }
 

	
 
        assert!(
 
            false, "[{}] failed to find definition for function '{}'",
 
            self.test_name, name
 
        );
 
        unreachable!();
 
    }
 

	
 
    fn ctx(&self) -> TestCtx {
 
        TestCtx{
 
            test_name: &self.test_name,
 
            modules: &self.modules,
 
            heap: &self.heap,
 
            types: &self.types,
 
            symbols: &self.symbols,
 
        }
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Utilities for successful compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct StructTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a StructDefinition,
 
}
 

	
 
impl<'a> StructTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a StructDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn assert_num_fields(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.def.fields.len(),
 
            "[{}] Expected {} struct fields, but found {} for {}",
 
            self.ctx.test_name, num, self.def.fields.len(), self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_num_monomorphs(self, num: usize) -> Self {
 
        let type_def = self.ctx.types.get_base_definition(&self.def.this.upcast()).unwrap();
 
        assert_eq!(
 
            num, type_def.monomorphs.len(),
 
            "[{}] Expected {} monomorphs, but found {} for {}",
 
            self.ctx.test_name, num, type_def.monomorphs.len(), self.assert_postfix()
 
        let (is_equal, num_encountered) = has_equal_num_monomorphs(self.ctx, num, self.def.this.upcast());
 
        assert!(
 
            is_equal, "[{}] Expected {} monomorphs, but got {} for {}",
 
            self.ctx.test_name, num, num_encountered, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    /// Asserts that a monomorph exist, separate polymorphic variable types by
 
    /// a semicolon.
 
    pub(crate) fn assert_has_monomorph(self, serialized_monomorph: &str) -> Self {
 
        let definition_id = self.def.this.upcast();
 
        let type_def = self.ctx.types.get_base_definition(&definition_id).unwrap();
 

	
 
        let mut full_buffer = String::new();
 
        full_buffer.push('[');
 
        for (monomorph_idx, monomorph) in type_def.monomorphs.iter().enumerate() {
 
            let mut buffer = String::new();
 
            for (element_idx, monomorph_element) in monomorph.iter().enumerate() {
 
                if element_idx != 0 { buffer.push(';'); }
 
                serialize_concrete_type(&mut buffer, self.ctx.heap, definition_id, monomorph_element);
 
            }
 

	
 
            if buffer == serialized_monomorph {
 
                // Found an exact match
 
                return self
 
            }
 

	
 
            if monomorph_idx != 0 {
 
                full_buffer.push_str(", ");
 
            }
 
            full_buffer.push('"');
 
            full_buffer.push_str(&buffer);
 
            full_buffer.push('"');
 
        }
 
        full_buffer.push(']');
 

	
 
        let (has_monomorph, serialized) = has_monomorph(self.ctx, self.def.this.upcast(), serialized_monomorph);
 
        assert!(
 
            false, "[{}] Expected to find monomorph {}, but got {} for {}",
 
            self.ctx.test_name, serialized_monomorph, &full_buffer, self.assert_postfix()
 
            has_monomorph, "[{}] Expected to find monomorph {}, but got {} for {}",
 
            self.ctx.test_name, serialized_monomorph, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn for_field<F: Fn(StructFieldTester)>(self, name: &str, f: F) -> Self {
 
        // Find field with specified name
 
        for field in &self.def.fields {
 
            if String::from_utf8_lossy(&field.field.value) == name {
 
                let tester = StructFieldTester::new(self.ctx, field);
 
                f(tester);
 
                return self;
 
            }
 
        }
 

	
 
        assert!(
 
            false, "[{}] Could not find struct field '{}' for {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 
        unreachable!();
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("Struct{ name: ");
 
        v.push_str(&String::from_utf8_lossy(&self.def.identifier.value));
 
        v.push_str(", fields: [");
 
        for (field_idx, field) in self.def.fields.iter().enumerate() {
 
            if field_idx != 0 { v.push_str(", "); }
 
            v.push_str(&String::from_utf8_lossy(&field.field.value));
 
        }
 
        v.push_str("] }");
 
        v
 
    }
 
}
 

	
 
pub(crate) struct StructFieldTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a StructFieldDefinition,
 
}
 

	
 
impl<'a> StructFieldTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a StructFieldDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn assert_parser_type(self, expected: &str) -> Self {
 
        let mut serialized_type = String::new();
 
        serialize_parser_type(&mut serialized_type, &self.ctx.heap, self.def.parser_type);
 
        assert_eq!(
 
            expected, &serialized_type,
 
            "[{}] Expected type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized_type, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut serialized_type = String::new();
 
        serialize_parser_type(&mut serialized_type, &self.ctx.heap, self.def.parser_type);
 
        format!(
 
            "StructField{{ name: {}, parser_type: {} }}",
 
            String::from_utf8_lossy(&self.def.field.value), serialized_type
 
        )
 
    }
 
}
 

	
 
pub(crate) struct EnumTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a EnumDefinition,
 
}
 

	
 
impl<'a> EnumTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a EnumDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn assert_num_variants(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.def.variants.len(),
 
            "[{}] Expected {} enum variants, but found {} for {}",
 
            self.ctx.test_name, num, self.def.variants.len(), self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_num_monomorphs(self, num: usize) -> Self {
 
        let (is_equal, num_encountered) = has_equal_num_monomorphs(self.ctx, num, self.def.this.upcast());
 
        assert!(
 
            is_equal, "[{}] Expected {} monomorphs, but got {} for {}",
 
            self.ctx.test_name, num, num_encountered, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_has_monomorph(self, serialized_monomorph: &str) -> Self {
 
        let (has_monomorph, serialized) = has_monomorph(self.ctx, self.def.this.upcast(), serialized_monomorph);
 
        assert!(
 
            has_monomorph, "[{}] Expected to find monomorph {}, but got {} for {}",
 
            self.ctx.test_name, serialized_monomorph, serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("Enum{ name: ");
 
        v.push_str(&String::from_utf8_lossy(&self.def.identifier.value));
 
        v.push_str(", variants: [");
 
        for (variant_idx, variant) in self.def.variants.iter().enumerate() {
 
            if variant_idx != 0 { v.push_str(", "); }
 
            v.push_str(&String::from_utf8_lossy(&variant.identifier.value));
 
        }
 
        v.push_str("] }");
 
        v
 
    }
 
}
 

	
 
pub(crate) struct FunctionTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a Function,
 
}
 

	
 
impl<'a> FunctionTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a Function) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn for_variable<F: Fn(VariableTester)>(self, name: &str, f: F) -> Self {
 
        // Find the memory statement in order to find the local
 
        let mem_stmt_id = seek_stmt(
 
            self.ctx.heap, self.def.body,
 
            &|stmt| {
 
                if let Statement::Local(local) = stmt {
 
                    if let LocalStatement::Memory(memory) = local {
 
                        let local = &self.ctx.heap[memory.variable];
 
                        if local.identifier.value == name.as_bytes() {
 
                            return true;
 
                        }
 
                    }
 
                }
 

	
 
                false
 
            }
 
        );
 

	
 
        assert!(
 
            mem_stmt_id.is_some(), "[{}] Failed to find variable '{}' in {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 

	
 
        let mem_stmt_id = mem_stmt_id.unwrap();
 
        let local_id = self.ctx.heap[mem_stmt_id].as_memory().variable;
 
        let local = &self.ctx.heap[local_id];
 

	
 
        // Find the assignment expression that follows it
 
        let assignment_id = seek_expr_in_stmt(
 
            self.ctx.heap, self.def.body,
 
            &|expr| {
 
                if let Expression::Assignment(assign_expr) = expr {
 
                    if let Expression::Variable(variable_expr) = &self.ctx.heap[assign_expr.left] {
 
                        if variable_expr.position.offset == local.identifier.position.offset {
 
                            return true;
 
                        }
 
                    }
 
                }
 

	
 
                false
 
            }
 
        );
 

	
 
        assert!(
 
            assignment_id.is_some(), "[{}] Failed to find assignment to variable '{}' in {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 

	
 
        let assignment = &self.ctx.heap[assignment_id.unwrap()];
 

	
 
        // Construct tester and pass to tester function
 
        let tester = VariableTester::new(
 
            self.ctx, self.def.this.upcast(), local, 
 
            assignment.as_assignment()
 
        );
 
        f(tester);
 

	
 
        self
 
    }
 

	
 
    /// Finds a specific expression within a function. There are two matchers:
 
    /// one outer matcher (to find a rough indication of the expression) and an
 
    /// inner matcher to find the exact expression. 
 
    ///
 
    /// The reason being that, for example, a function's body might be littered
 
    /// with addition symbols, so we first match on "some_var + some_other_var",
 
    /// and then match exactly on "+".
 
    pub(crate) fn for_expression_by_source<F: Fn(ExpressionTester)>(self, outer_match: &str, inner_match: &str, f: F) -> Self {
 
        // Seek the expression in the source code
 
        assert!(outer_match.contains(inner_match), "improper testing code");
 

	
 
        let module = seek_def_in_modules(
 
            &self.ctx.heap, &self.ctx.modules, self.def.this.upcast()
 
        ).unwrap();
 

	
 
        // Find the first occurrence of the expression after the definition of
 
        // the function, we'll check that it is included in the body later.
 
        let mut outer_match_idx = self.def.position.offset;
 
        while outer_match_idx < module.source.input.len() {
 
            if module.source.input[outer_match_idx..].starts_with(outer_match.as_bytes()) {
 
                break;
 
            }
 
            outer_match_idx += 1
 
        }
 

	
 
        assert!(
 
            outer_match_idx < module.source.input.len(),
 
            "[{}] Failed to find '{}' within the source that contains {}",
 
            self.ctx.test_name, outer_match, self.assert_postfix()
 
        );
 
        let inner_match_idx = outer_match_idx + outer_match.find(inner_match).unwrap();
 

	
 
        // Use the inner match index to find the expression
 
        let expr_id = seek_expr_in_stmt(
 
            &self.ctx.heap, self.def.body,
 
            &|expr| expr.position().offset == inner_match_idx
 
        );
 
        assert!(
 
            expr_id.is_some(),
 
            "[{}] Failed to find '{}' within the source that contains {} \
 
            (note: expression was found, but not within the specified function",
 
            self.ctx.test_name, outer_match, self.assert_postfix()
 
        );
 
        let expr_id = expr_id.unwrap();
 

	
 
        // We have the expression, call the testing function
 
        let tester = ExpressionTester::new(
 
            self.ctx, self.def.this.upcast(), &self.ctx.heap[expr_id]
 
        );
 
        f(tester);
 

	
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!(
 
            "Function{{ name: {} }}",
 
            &String::from_utf8_lossy(&self.def.identifier.value)
 
        )
 
    }
 
}
 

	
 
pub(crate) struct VariableTester<'a> {
 
    ctx: TestCtx<'a>,
 
    definition_id: DefinitionId,
 
    local: &'a Local,
 
    assignment: &'a AssignmentExpression,
 
}
 

	
 
impl<'a> VariableTester<'a> {
 
    fn new(
 
        ctx: TestCtx<'a>, definition_id: DefinitionId, local: &'a Local, assignment: &'a AssignmentExpression
 
    ) -> Self {
 
        Self{ ctx, definition_id, local, assignment }
 
    }
 

	
 
    pub(crate) fn assert_parser_type(self, expected: &str) -> Self {
 
        let mut serialized = String::new();
 
        serialize_parser_type(&mut serialized, self.ctx.heap, self.local.parser_type);
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected parser type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_concrete_type(self, expected: &str) -> Self {
 
        let mut serialized = String::new();
 
        serialize_concrete_type(
 
            &mut serialized, self.ctx.heap, self.definition_id, 
 
            &self.assignment.concrete_type
 
        );
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected concrete type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!(
 
            "Variable{{ name: {} }}",
 
            &String::from_utf8_lossy(&self.local.identifier.value)
 
        )
 
    }
 
}
 

	
 
pub(crate) struct ExpressionTester<'a> {
 
    ctx: TestCtx<'a>,
 
    definition_id: DefinitionId, // of the enclosing function/component
 
    expr: &'a Expression
 
}
 

	
 
impl<'a> ExpressionTester<'a> {
 
    fn new(
 
        ctx: TestCtx<'a>, definition_id: DefinitionId, expr: &'a Expression
 
    ) -> Self {
 
        Self{ ctx, definition_id, expr }
 
    }
 

	
 
    pub(crate) fn assert_concrete_type(self, expected: &str) -> Self {
 
        let mut serialized = String::new();
 
        serialize_concrete_type(
 
            &mut serialized, self.ctx.heap, self.definition_id,
 
            self.expr.get_type()
 
        );
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected concrete type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!(
 
            "Expression{{ debug: {:?} }}",
 
            self.expr
 
        )
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Interface for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct AstErrTester {
 
    test_name: String,
 
    error: ParseError,
 
}
 

	
 
impl AstErrTester {
 
    fn new(test_name: String, error: ParseError) -> Self {
 
        Self{ test_name, error }
 
    }
 

	
 
    pub(crate) fn error<F: Fn(ErrorTester)>(&self, f: F) {
 
        // Maybe multiple errors will be supported in the future
 
        let tester = ErrorTester{ test_name: &self.test_name, error: &self.error };
 
        f(tester)
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Utilities for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct ErrorTester<'a> {
 
    test_name: &'a str,
 
    error: &'a ParseError,
 
}
 

	
 
impl<'a> ErrorTester<'a> {
 
    pub(crate) fn assert_num(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.error.statements.len(),
 
            "[{}] expected error to consist of '{}' parts, but encountered '{}' for {}",
 
            self.test_name, num, self.error.statements.len(), self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    pub(crate) fn assert_ctx_has(self, idx: usize, msg: &str) -> Self {
 
        assert!(
 
            self.error.statements[idx].context.contains(msg),
 
            "[{}] expected error statement {}'s context to contain '{}' for {}",
 
            self.test_name, idx, msg, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    pub(crate) fn assert_msg_has(self, idx: usize, msg: &str) -> Self {
 
        assert!(
 
            self.error.statements[idx].message.contains(msg),
 
            "[{}] expected error statement {}'s message to contain '{}' for {}",
 
            self.test_name, idx, msg, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    /// Seeks the index of the pattern in the context message, then checks if
 
    /// the input position corresponds to that index.
 
    pub (crate) fn assert_occurs_at(self, idx: usize, pattern: &str) -> Self {
 
        let pos = self.error.statements[idx].context.find(pattern);
 
        assert!(
 
            pos.is_some(),
 
            "[{}] incorrect occurs_at: '{}' could not be found in the context for {}",
 
            self.test_name, pattern, self.assert_postfix()
 
        );
 
        let pos = pos.unwrap();
 
        let col = self.error.statements[idx].position.col();
 
        assert_eq!(
 
            pos + 1, col,
 
            "[{}] Expected error to occur at column {}, but found it at {} for {}",
 
            self.test_name, pos + 1, col, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("error: [");
 
        for (idx, stmt) in self.error.statements.iter().enumerate() {
 
            if idx != 0 {
 
                v.push_str(", ");
 
            }
 

	
 
            v.push_str(&format!("{{ context: {}, message: {} }}", &stmt.context, stmt.message));
 
        }
 
        v.push(']');
 
        v
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Generic utilities
 
//------------------------------------------------------------------------------
 

	
 
fn has_equal_num_monomorphs<'a>(ctx: TestCtx<'a>, num: usize, definition_id: DefinitionId) -> (bool, usize) {
 
    let type_def = ctx.types.get_base_definition(&definition_id).unwrap();
 
    let num_on_type = type_def.monomorphs.len();
 
    
 
    (num_on_type == num, num_on_type)
 
}
 

	
 
fn has_monomorph<'a>(ctx: TestCtx<'a>, definition_id: DefinitionId, serialized_monomorph: &str) -> (bool, String) {
 
    let type_def = ctx.types.get_base_definition(&definition_id).unwrap();
 

	
 
    let mut full_buffer = String::new();
 
    let mut has_match = false;
 
    full_buffer.push('[');
 
    for (monomorph_idx, monomorph) in type_def.monomorphs.iter().enumerate() {
 
        let mut buffer = String::new();
 
        for (element_idx, monomorph_element) in monomorph.iter().enumerate() {
 
            if element_idx != 0 { buffer.push(';'); }
 
            serialize_concrete_type(&mut buffer, ctx.heap, definition_id, monomorph_element);
 
        }
 

	
 
        if buffer == serialized_monomorph {
 
            // Found an exact match
 
            has_match = true;
 
        }
 

	
 
        if monomorph_idx != 0 {
 
            full_buffer.push_str(", ");
 
        }
 
        full_buffer.push('"');
 
        full_buffer.push_str(&buffer);
 
        full_buffer.push('"');
 
    }
 
    full_buffer.push(']');
 

	
 
    (has_match, full_buffer)
 
}
 

	
 
fn serialize_parser_type(buffer: &mut String, heap: &Heap, id: ParserTypeId) {
 
    use ParserTypeVariant as PTV;
 

	
 
    let p = &heap[id];
 
    match &p.variant {
 
        PTV::Message => buffer.push_str("msg"),
 
        PTV::Bool => buffer.push_str("bool"),
 
        PTV::Byte => buffer.push_str("byte"),
 
        PTV::Short => buffer.push_str("short"),
 
        PTV::Int => buffer.push_str("int"),
 
        PTV::Long => buffer.push_str("long"),
 
        PTV::String => buffer.push_str("string"),
 
        PTV::IntegerLiteral => buffer.push_str("intlit"),
 
        PTV::Inferred => buffer.push_str("auto"),
 
        PTV::Array(sub_id) => {
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            buffer.push_str("[]");
 
        },
 
        PTV::Input(sub_id) => {
 
            buffer.push_str("in<");
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            buffer.push('>');
 
        },
 
        PTV::Output(sub_id) => {
 
            buffer.push_str("out<");
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            buffer.push('>');
 
        },
 
        PTV::Symbolic(symbolic) => {
 
            buffer.push_str(&String::from_utf8_lossy(&symbolic.identifier.value));
 
            if symbolic.poly_args2.len() > 0 {
 
                buffer.push('<');
 
                for (poly_idx, poly_arg) in symbolic.poly_args2.iter().enumerate() {
 
                    if poly_idx != 0 { buffer.push(','); }
 
                    serialize_parser_type(buffer, heap, *poly_arg);
 
                }
 
                buffer.push('>');
 
            }
 
        }
 
    }
 
}
 

	
 
fn serialize_concrete_type(buffer: &mut String, heap: &Heap, def: DefinitionId, concrete: &ConcreteType) {
 
    // Retrieve polymorphic variables, if present (since we're dealing with a 
 
    // concrete type we only expect procedure types)
 
    let poly_vars = match &heap[def] {
 
        Definition::Function(definition) => &definition.poly_vars,
 
        Definition::Component(definition) => &definition.poly_vars,
 
        Definition::Struct(definition) => &definition.poly_vars,
 
        _ => unreachable!("Error in testing utility: unexpected type for concrete type serialization"),
 
    };
 

	
 
    fn serialize_recursive(
 
        buffer: &mut String, heap: &Heap, poly_vars: &Vec<Identifier>, concrete: &ConcreteType, mut idx: usize
 
    ) -> usize {
 
        use ConcreteTypePart as CTP;
 

	
 
        let part = &concrete.parts[idx];
 
        match part {
 
            CTP::Marker(poly_idx) => {
 
                buffer.push_str(&String::from_utf8_lossy(&poly_vars[*poly_idx].value));
 
            },
 
            CTP::Void => buffer.push_str("void"),
 
            CTP::Message => buffer.push_str("msg"),
 
            CTP::Bool => buffer.push_str("bool"),
 
            CTP::Byte => buffer.push_str("byte"),
 
            CTP::Short => buffer.push_str("short"),
 
            CTP::Int => buffer.push_str("int"),
 
            CTP::Long => buffer.push_str("long"),
 
            CTP::String => buffer.push_str("string"),
 
            CTP::Array => {
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push_str("[]");
 
                idx += 1;
 
            },
 
            CTP::Slice => {
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push_str("[..]");
 
                idx += 1;
 
            },
 
            CTP::Input => {
 
                buffer.push_str("in<");
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push('>');
 
                idx += 1;
 
            },
 
            CTP::Output => {
 
                buffer.push_str("out<");
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push('>');
 
                idx += 1
 
            },
 
            CTP::Instance(definition_id, num_sub) => {
 
                let definition_name = heap[*definition_id].identifier();
 
                buffer.push_str(&String::from_utf8_lossy(&definition_name.value));
 
                if *num_sub != 0 {
 
                    buffer.push('<');
 
                    for sub_idx in 0..*num_sub {
 
                        if sub_idx != 0 { buffer.push(','); }
 
                        idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                    }
 
                    buffer.push('>');
 
                }
 
                idx += 1;
 
            }
 
        }
 

	
 
        idx
 
    }
 

	
 
    serialize_recursive(buffer, heap, poly_vars, concrete, 0);
 
}
 

	
 
fn seek_def_in_modules<'a>(heap: &Heap, modules: &'a [LexedModule], def_id: DefinitionId) -> Option<&'a LexedModule> {
 
    for module in modules {
 
        let root = &heap.protocol_descriptions[module.root_id];
 
        for definition in &root.definitions {
 
            if *definition == def_id {
 
                return Some(module)
 
            }
 
        }
 
    }
 

	
 
    None
 
}
 

	
 
fn seek_stmt<F: Fn(&Statement) -> bool>(heap: &Heap, start: StatementId, f: &F) -> Option<StatementId> {
 
    let stmt = &heap[start];
 
    if f(stmt) { return Some(start); }
 

	
 
    // This statement wasn't it, try to recurse
 
    let matched = match stmt {
 
        Statement::Block(block) => {
 
            for sub_id in &block.statements {
 
                if let Some(id) = seek_stmt(heap, *sub_id, f) {
 
                    return Some(id);
 
                }
 
            }
 

	
 
            None
 
        },
 
        Statement::Labeled(stmt) => seek_stmt(heap, stmt.body, f),
 
        Statement::If(stmt) => {
 
            if let Some(id) = seek_stmt(heap,stmt.true_body, f) {
 
                return Some(id);
 
            } else if let Some(id) = seek_stmt(heap, stmt.false_body, f) {
 
                return Some(id);
 
            }
 
            None
 
        },
 
        Statement::While(stmt) => seek_stmt(heap, stmt.body, f),
 
        Statement::Synchronous(stmt) => seek_stmt(heap, stmt.body, f),
 
        _ => None
 
    };
 

	
 
    matched
 
}
 

	
 
fn seek_expr_in_expr<F: Fn(&Expression) -> bool>(heap: &Heap, start: ExpressionId, f: &F) -> Option<ExpressionId> {
 
    let expr = &heap[start];
 
    if f(expr) { return Some(start); }
 

	
 
    match expr {
 
        Expression::Assignment(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.left, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.right, f))
 
        },
 
        Expression::Conditional(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.test, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.true_expression, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.false_expression, f))
 
        },
 
        Expression::Binary(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.left, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.right, f))
 
        },
 
        Expression::Unary(expr) => {
 
            seek_expr_in_expr(heap, expr.expression, f)
 
        },
 
        Expression::Indexing(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.subject, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.index, f))
 
        },
 
        Expression::Slicing(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.subject, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.from_index, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.to_index, f))
 
        },
 
        Expression::Select(expr) => {
 
            seek_expr_in_expr(heap, expr.subject, f)
 
        },
 
        Expression::Array(expr) => {
 
            for element in &expr.elements {
 
                if let Some(id) = seek_expr_in_expr(heap, *element, f) {
 
                    return Some(id)
 
                }
 
            }
 
            None
 
        },
 
        Expression::Literal(expr) => {
 
            if let Literal::Struct(lit) = &expr.value {
 
                for field in &lit.fields {
 
                    if let Some(id) = seek_expr_in_expr(heap, field.value, f) {
 
                        return Some(id)
 
                    }
 
                }
 
            }
 
            None
 
        },
 
        Expression::Call(expr) => {
 
            for arg in &expr.arguments {
 
                if let Some(id) = seek_expr_in_expr(heap, *arg, f) {
 
                    return Some(id)
 
                }
 
            }
 
            None
 
        },
 
        Expression::Variable(expr) => {
 
            None
 
        }
 
    }
 
}
 

	
 
fn seek_expr_in_stmt<F: Fn(&Expression) -> bool>(heap: &Heap, start: StatementId, f: &F) -> Option<ExpressionId> {
 
    let stmt = &heap[start];
 

	
 
    match stmt {
 
        Statement::Block(stmt) => {
 
            for stmt_id in &stmt.statements {
 
                if let Some(id) = seek_expr_in_stmt(heap, *stmt_id, f) {
 
                    return Some(id)
 
                }
 
            }
 
            None
 
        },
 
        Statement::Labeled(stmt) => {
 
            seek_expr_in_stmt(heap, stmt.body, f)
 
        },
 
        Statement::If(stmt) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, stmt.test, f))
 
            .or_else(|| seek_expr_in_stmt(heap, stmt.true_body, f))
 
            .or_else(|| seek_expr_in_stmt(heap, stmt.false_body, f))
 
        },
 
        Statement::While(stmt) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, stmt.test, f))
 
            .or_else(|| seek_expr_in_stmt(heap, stmt.body, f))
 
        },
 
        Statement::Synchronous(stmt) => {
 
            seek_expr_in_stmt(heap, stmt.body, f)
 
        },
 
        Statement::Return(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression, f)
 
        },
 
        Statement::Assert(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression, f)
 
        },
 
        Statement::New(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression.upcast(), f)
 
        },
 
        Statement::Expression(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression, f)
 
        },
 
        _ => None
 
    }
 
}
 
\ No newline at end of file
0 comments (0 inline, 0 general)