Changeset - 7cf6df93d16d
[Not reviewed]
0 11 1
MH - 3 years ago 2022-03-24 16:39:48
contact@maxhenger.nl
Move builtin definitions to std lib
12 files changed with 222 insertions and 169 deletions:
0 comments (0 inline, 0 general)
bin-compiler/src/main.rs
Show inline comments
 
use std::fs::File;
 
use std::io::Read;
 

	
 
use clap::{App, Arg};
 
use reowolf_rs as rw;
 

	
 
fn main() {
 
    let app = App::new("rwc")
 
        .author("Henger, M.")
 
        .version(env!("CARGO_PKG_VERSION"))
 
        .about("Reowolf compiler")
 
        .arg(
 
            Arg::new("input")
 
                .long("input")
 
                .short('i')
 
                .help("input files")
 
                .required(true)
 
                .takes_value(true)
 
                .multiple_occurrences(true)
 
        )
 
        .arg(
 
            Arg::new("threads")
 
                .long("threads")
 
                .short('t')
 
                .help("number of runtime threads")
 
                .default_value("1")
 
                .takes_value(true)
 
        )
 
        .arg(
 
            Arg::new("debug")
 
                .long("debug")
 
                .short('d')
 
                .help("enable debug logging")
 
        );
 

	
 
    // Retrieve arguments and convert
 
    let app = app.get_matches();
 
    let input_files = app.values_of("input");
 
    if input_files.is_none() {
 
        println!("ERROR: Expected at least one input file");
 
        return;
 
    }
 

	
 
    let num_threads = app.value_of("threads").unwrap();
 
    let num_threads = match num_threads.parse::<i32>() {
 
        Ok(num_threads) => {
 
            if num_threads < 0 || num_threads > 255 {
 
                println!("ERROR: Number of threads must be a number between 0 and 256");
 
                return;
 
            }
 

	
 
            num_threads as u32
 
        },
 
        Err(err) => {
 
            println!("ERROR: Failed to parse number of threads\nbecause: {}", err);
 
            return;
 
        }
 
    };
 

	
 
    let debug_enabled = app.is_present("debug");
 

	
 
    // Add input files to file buffer
 
    let input_files = input_files.unwrap();
 
    assert!(input_files.len() > 0); // because arg is required
 

	
 
    let mut builder = rw::ProtocolDescriptionBuilder::new();
 
    let mut builder = rw::ProtocolDescriptionBuilder::new().expect("create protocol description builder");
 
    let mut file_buffer = Vec::with_capacity(4096);
 

	
 
    for input_file in input_files {
 
        print!("Adding file: {} ... ", input_file);
 
        let mut file = match File::open(input_file) {
 
            Ok(file) => file,
 
            Err(err) => {
 
                println!("FAILED (to open file)\nbecause:\n{}", err);
 
                return;
 
            }
 
        };
 

	
 
        file_buffer.clear();
 
        if let Err(err) = file.read_to_end(&mut file_buffer) {
 
            println!("FAILED (to read file)\nbecause:\n{}", err);
 
            return;
 
        }
 

	
 
        if let Err(err) = builder.add(input_file.to_string(), file_buffer.clone()) {
 
            println!("FAILED (to tokenize file)\nbecause:\n{}", err);
 
        }
 

	
 
        println!("Success");
 
    }
 

	
 
    // Compile the program
 
    print!("Compiling program ... ");
 
    let protocol_description = match builder.compile() {
 
        Ok(pd) => pd,
 
        Err(err) => {
 
            println!("FAILED\nbecause:\n{}", err);
 
            return;
 
        }
 
    };
 

	
 
    println!("Success");
 

	
 
    // Make sure there is a nameless module with a main component
 
    print!("Creating main component ... ");
 
    let runtime = rw::runtime2::Runtime::new(num_threads, debug_enabled, protocol_description);
 
    if let Err(err) = runtime.create_component(b"", b"main") {
 
        use rw::ComponentCreationError as CCE;
 
        let reason = match err {
 
            CCE::ModuleDoesntExist => "Input files did not contain a nameless module (that should contain the 'main' component)",
 
            CCE::DefinitionDoesntExist => "Input files did not contain a component called 'main'",
 
            CCE::DefinitionNotComponent => "Input file contained a 'main' function, but not a 'main' component",
 
            _ => "Unexpected error"
 
        };
 
        println!("FAILED\nbecause:\n{} (raw error: {:?})", reason, err);
 
        return;
 
    }
 

	
 
    println!("Success");
 
    println!("Now running until all components have exited");
 
    println!("--------------------------------------------\n\n");
 
}
 
\ No newline at end of file
src/protocol/ast.rs
Show inline comments
 
use std::fmt;
 
use std::fmt::{Debug, Display, Formatter};
 
use std::ops::{Index, IndexMut};
 

	
 
use super::arena::{Arena, Id};
 
use crate::collections::StringRef;
 
use crate::protocol::input_source::InputSpan;
 
use crate::protocol::TypeId;
 

	
 
/// Helper macro that defines a type alias for a AST element ID. In this case 
 
/// only used to alias the `Id<T>` types.
 
macro_rules! define_aliased_ast_id {
 
    // Variant where we just defined the alias, without any indexing
 
    ($name:ident, $parent:ty) => {
 
        pub type $name = $parent;
 
    };
 
    // Variant where we define the type, and the Index and IndexMut traits
 
    (
 
        $name:ident, $parent:ty, 
 
        index($indexed_type:ty, $indexed_arena:ident)
 
    ) => {
 
        define_aliased_ast_id!($name, $parent);
 
        impl Index<$name> for Heap {
 
            type Output = $indexed_type;
 
            fn index(&self, index: $name) -> &Self::Output {
 
                &self.$indexed_arena[index]
 
            }
 
        }
 

	
 
        impl IndexMut<$name> for Heap {
 
            fn index_mut(&mut self, index: $name) -> &mut Self::Output {
 
                &mut self.$indexed_arena[index]
 
            }
 
        }
 
    };
 
    // Variant where we define type, Index(Mut) traits and an allocation function
 
    (
 
        $name:ident, $parent:ty,
 
        index($indexed_type:ty, $indexed_arena:ident),
 
        alloc($fn_name:ident)
 
    ) => {
 
        define_aliased_ast_id!($name, $parent, index($indexed_type, $indexed_arena));
 
        impl Heap {
 
            pub fn $fn_name(&mut self, f: impl FnOnce($name) -> $indexed_type) -> $name {
 
                self.$indexed_arena.alloc_with_id(|id| f(id))
 
            }
 
        }
 
    };
 
}
 

	
 
/// Helper macro that defines a wrapper type for a particular variant of an AST
 
/// element ID. Only used to define single-wrapping IDs.
 
macro_rules! define_new_ast_id {
 
    // Variant where we just defined the new type, without any indexing
 
    ($name:ident, $parent:ty) => {
 
        #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 
        pub struct $name (pub(crate) $parent);
 

	
 
        #[allow(dead_code)]
 
        impl $name {
 
            pub(crate) fn new_invalid() -> Self     { Self(<$parent>::new_invalid()) }
 
            pub(crate) fn is_invalid(&self) -> bool { self.0.is_invalid() }
 
            pub fn upcast(self) -> $parent          { self.0 }
 
        }
 
    };
 
    // Variant where we define the type, and the Index and IndexMut traits
 
    (
 
        $name:ident, $parent:ty, 
 
        index($indexed_type:ty, $wrapper_type:path, $indexed_arena:ident)
 
    ) => {
 
        define_new_ast_id!($name, $parent);
 
        impl Index<$name> for Heap {
 
            type Output = $indexed_type;
 
            fn index(&self, index: $name) -> &Self::Output {
 
                if let $wrapper_type(v) = &self.$indexed_arena[index.0] {
 
                    v
 
                } else {
 
                    unreachable!()
 
                }
 
            }
 
        }
 

	
 
        impl IndexMut<$name> for Heap {
 
            fn index_mut(&mut self, index: $name) -> &mut Self::Output {
 
                if let $wrapper_type(v) = &mut self.$indexed_arena[index.0] {
 
                    v
 
                } else {
 
                    unreachable!()
 
                }
 
            }
 
        }
 
    };
 
    // Variant where we define the type, the Index and IndexMut traits, and an allocation function
 
    (
 
        $name:ident, $parent:ty, 
 
        index($indexed_type:ty, $wrapper_type:path, $indexed_arena:ident),
 
        alloc($fn_name:ident)
 
    ) => {
 
        define_new_ast_id!($name, $parent, index($indexed_type, $wrapper_type, $indexed_arena));
 
        impl Heap {
 
            pub fn $fn_name(&mut self, f: impl FnOnce($name) -> $indexed_type) -> $name {
 
                $name(
 
                    self.$indexed_arena.alloc_with_id(|id| {
 
                        $wrapper_type(f($name(id)))
 
                    })
 
                )
 
            }
 
        }
 
    }
 
}
 

	
 
define_aliased_ast_id!(RootId, Id<Root>, index(Root, protocol_descriptions), alloc(alloc_protocol_description));
 
define_aliased_ast_id!(PragmaId, Id<Pragma>, index(Pragma, pragmas), alloc(alloc_pragma));
 
define_aliased_ast_id!(ImportId, Id<Import>, index(Import, imports), alloc(alloc_import));
 
define_aliased_ast_id!(VariableId, Id<Variable>, index(Variable, variables), alloc(alloc_variable));
 

	
 
define_aliased_ast_id!(DefinitionId, Id<Definition>, index(Definition, definitions));
 
define_new_ast_id!(StructDefinitionId, DefinitionId, index(StructDefinition, Definition::Struct, definitions), alloc(alloc_struct_definition));
 
define_new_ast_id!(EnumDefinitionId, DefinitionId, index(EnumDefinition, Definition::Enum, definitions), alloc(alloc_enum_definition));
 
define_new_ast_id!(UnionDefinitionId, DefinitionId, index(UnionDefinition, Definition::Union, definitions), alloc(alloc_union_definition));
 
define_new_ast_id!(ProcedureDefinitionId, DefinitionId, index(ProcedureDefinition, Definition::Procedure, definitions), alloc(alloc_procedure_definition));
 

	
 
define_aliased_ast_id!(StatementId, Id<Statement>, index(Statement, statements));
 
define_new_ast_id!(BlockStatementId, StatementId, index(BlockStatement, Statement::Block, statements), alloc(alloc_block_statement));
 
define_new_ast_id!(EndBlockStatementId, StatementId, index(EndBlockStatement, Statement::EndBlock, statements), alloc(alloc_end_block_statement));
 
define_new_ast_id!(LocalStatementId, StatementId, index(LocalStatement, Statement::Local, statements));
 
define_new_ast_id!(MemoryStatementId, LocalStatementId);
 
define_new_ast_id!(ChannelStatementId, LocalStatementId);
 
define_new_ast_id!(LabeledStatementId, StatementId, index(LabeledStatement, Statement::Labeled, statements), alloc(alloc_labeled_statement));
 
define_new_ast_id!(IfStatementId, StatementId, index(IfStatement, Statement::If, statements), alloc(alloc_if_statement));
 
define_new_ast_id!(EndIfStatementId, StatementId, index(EndIfStatement, Statement::EndIf, statements), alloc(alloc_end_if_statement));
 
define_new_ast_id!(WhileStatementId, StatementId, index(WhileStatement, Statement::While, statements), alloc(alloc_while_statement));
 
define_new_ast_id!(EndWhileStatementId, StatementId, index(EndWhileStatement, Statement::EndWhile, statements), alloc(alloc_end_while_statement));
 
define_new_ast_id!(BreakStatementId, StatementId, index(BreakStatement, Statement::Break, statements), alloc(alloc_break_statement));
 
define_new_ast_id!(ContinueStatementId, StatementId, index(ContinueStatement, Statement::Continue, statements), alloc(alloc_continue_statement));
 
define_new_ast_id!(SynchronousStatementId, StatementId, index(SynchronousStatement, Statement::Synchronous, statements), alloc(alloc_synchronous_statement));
 
define_new_ast_id!(EndSynchronousStatementId, StatementId, index(EndSynchronousStatement, Statement::EndSynchronous, statements), alloc(alloc_end_synchronous_statement));
 
define_new_ast_id!(ForkStatementId, StatementId, index(ForkStatement, Statement::Fork, statements), alloc(alloc_fork_statement));
 
define_new_ast_id!(EndForkStatementId, StatementId, index(EndForkStatement, Statement::EndFork, statements), alloc(alloc_end_fork_statement));
 
define_new_ast_id!(SelectStatementId, StatementId, index(SelectStatement, Statement::Select, statements), alloc(alloc_select_statement));
 
define_new_ast_id!(EndSelectStatementId, StatementId, index(EndSelectStatement, Statement::EndSelect, statements), alloc(alloc_end_select_statement));
 
define_new_ast_id!(ReturnStatementId, StatementId, index(ReturnStatement, Statement::Return, statements), alloc(alloc_return_statement));
 
define_new_ast_id!(GotoStatementId, StatementId, index(GotoStatement, Statement::Goto, statements), alloc(alloc_goto_statement));
 
define_new_ast_id!(NewStatementId, StatementId, index(NewStatement, Statement::New, statements), alloc(alloc_new_statement));
 
define_new_ast_id!(ExpressionStatementId, StatementId, index(ExpressionStatement, Statement::Expression, statements), alloc(alloc_expression_statement));
 

	
 
define_aliased_ast_id!(ExpressionId, Id<Expression>, index(Expression, expressions));
 
define_new_ast_id!(AssignmentExpressionId, ExpressionId, index(AssignmentExpression, Expression::Assignment, expressions), alloc(alloc_assignment_expression));
 
define_new_ast_id!(BindingExpressionId, ExpressionId, index(BindingExpression, Expression::Binding, expressions), alloc(alloc_binding_expression));
 
define_new_ast_id!(ConditionalExpressionId, ExpressionId, index(ConditionalExpression, Expression::Conditional, expressions), alloc(alloc_conditional_expression));
 
define_new_ast_id!(BinaryExpressionId, ExpressionId, index(BinaryExpression, Expression::Binary, expressions), alloc(alloc_binary_expression));
 
define_new_ast_id!(UnaryExpressionId, ExpressionId, index(UnaryExpression, Expression::Unary, expressions), alloc(alloc_unary_expression));
 
define_new_ast_id!(IndexingExpressionId, ExpressionId, index(IndexingExpression, Expression::Indexing, expressions), alloc(alloc_indexing_expression));
 
define_new_ast_id!(SlicingExpressionId, ExpressionId, index(SlicingExpression, Expression::Slicing, expressions), alloc(alloc_slicing_expression));
 
define_new_ast_id!(SelectExpressionId, ExpressionId, index(SelectExpression, Expression::Select, expressions), alloc(alloc_select_expression));
 
define_new_ast_id!(LiteralExpressionId, ExpressionId, index(LiteralExpression, Expression::Literal, expressions), alloc(alloc_literal_expression));
 
define_new_ast_id!(CastExpressionId, ExpressionId, index(CastExpression, Expression::Cast, expressions), alloc(alloc_cast_expression));
 
define_new_ast_id!(CallExpressionId, ExpressionId, index(CallExpression, Expression::Call, expressions), alloc(alloc_call_expression));
 
define_new_ast_id!(VariableExpressionId, ExpressionId, index(VariableExpression, Expression::Variable, expressions), alloc(alloc_variable_expression));
 

	
 
define_aliased_ast_id!(ScopeId, Id<Scope>, index(Scope, scopes), alloc(alloc_scope));
 

	
 
#[derive(Debug)]
 
pub struct Heap {
 
    // Root arena, contains the entry point for different modules. Each root
 
    // contains lists of IDs that correspond to the other arenas.
 
    pub(crate) protocol_descriptions: Arena<Root>,
 
    // Contents of a file, these are the elements the `Root` elements refer to
 
    pragmas: Arena<Pragma>,
 
    pub(crate) imports: Arena<Import>,
 
    pub(crate) variables: Arena<Variable>,
 
    pub(crate) definitions: Arena<Definition>,
 
    pub(crate) statements: Arena<Statement>,
 
    pub(crate) expressions: Arena<Expression>,
 
    pub(crate) scopes: Arena<Scope>,
 
}
 

	
 
impl Heap {
 
    pub fn new() -> Heap {
 
        Heap {
 
            // string_alloc: StringAllocator::new(),
 
            protocol_descriptions: Arena::new(),
 
            pragmas: Arena::new(),
 
            imports: Arena::new(),
 
            variables: Arena::new(),
 
            definitions: Arena::new(),
 
            statements: Arena::new(),
 
            expressions: Arena::new(),
 
            scopes: Arena::new(),
 
        }
 
    }
 
    pub fn alloc_memory_statement(
 
        &mut self,
 
        f: impl FnOnce(MemoryStatementId) -> MemoryStatement,
 
    ) -> MemoryStatementId {
 
        MemoryStatementId(LocalStatementId(self.statements.alloc_with_id(|id| {
 
            Statement::Local(LocalStatement::Memory(
 
                f(MemoryStatementId(LocalStatementId(id)))
 
            ))
 
        })))
 
    }
 
    pub fn alloc_channel_statement(
 
        &mut self,
 
        f: impl FnOnce(ChannelStatementId) -> ChannelStatement,
 
    ) -> ChannelStatementId {
 
        ChannelStatementId(LocalStatementId(self.statements.alloc_with_id(|id| {
 
            Statement::Local(LocalStatement::Channel(
 
                f(ChannelStatementId(LocalStatementId(id)))
 
            ))
 
        })))
 
    }
 
}
 

	
 
impl Index<MemoryStatementId> for Heap {
 
    type Output = MemoryStatement;
 
    fn index(&self, index: MemoryStatementId) -> &Self::Output {
 
        match &self.statements[index.0.0] {
 
            Statement::Local(LocalStatement::Memory(v)) => v,
 
            _ => unreachable!(),
 
        }
 
    }
 
}
 

	
 
impl Index<ChannelStatementId> for Heap {
 
    type Output = ChannelStatement;
 
    fn index(&self, index: ChannelStatementId) -> &Self::Output {
 
        match &self.statements[index.0.0] {
 
            Statement::Local(LocalStatement::Channel(v)) => v,
 
            _ => unreachable!(),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct Root {
 
    pub this: RootId,
 
    // Phase 1: parser
 
    // pub position: InputPosition,
 
    pub pragmas: Vec<PragmaId>,
 
    pub imports: Vec<ImportId>,
 
    pub definitions: Vec<DefinitionId>,
 
}
 

	
 
impl Root {
 
    pub fn get_definition_ident(&self, h: &Heap, id: &[u8]) -> Option<DefinitionId> {
 
        for &def in self.definitions.iter() {
 
            if h[def].identifier().value.as_bytes() == id {
 
                return Some(def);
 
            }
 
        }
 
        None
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Pragma {
 
    Version(PragmaVersion),
 
    Module(PragmaModule),
 
}
 

	
 
impl Pragma {
 
    pub(crate) fn as_module(&self) -> &PragmaModule {
 
        match self {
 
            Pragma::Module(pragma) => pragma,
 
            _ => unreachable!("Tried to obtain {:?} as PragmaModule", self),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct PragmaVersion {
 
    pub this: PragmaId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of full pragma
 
    pub version: u64,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct PragmaModule {
 
    pub this: PragmaId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of full pragma
 
    pub value: Identifier,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Import {
 
    Module(ImportModule),
 
    Symbols(ImportSymbols)
 
}
 

	
 
impl Import {
 
    pub(crate) fn span(&self) -> InputSpan {
 
        match self {
 
            Import::Module(v) => v.span,
 
            Import::Symbols(v) => v.span,
 
        }
 
    }
 

	
 
    pub(crate) fn as_module(&self) -> &ImportModule {
 
        match self {
 
            Import::Module(m) => m,
 
            _ => unreachable!("Unable to cast 'Import' to 'ImportModule'")
 
        }
 
    }
 
    pub(crate) fn as_symbols(&self) -> &ImportSymbols {
 
        match self {
 
            Import::Symbols(m) => m,
 
            _ => unreachable!("Unable to cast 'Import' to 'ImportSymbols'")
 
        }
 
    }
 
    pub(crate) fn as_symbols_mut(&mut self) -> &mut ImportSymbols {
 
        match self {
 
            Import::Symbols(m) => m,
 
            _ => unreachable!("Unable to cast 'Import' to 'ImportSymbols'")
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ImportModule {
 
    pub this: ImportId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub module: Identifier,
 
    pub alias: Identifier,
 
    pub module_id: RootId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct AliasedSymbol {
 
    pub name: Identifier,
 
    pub alias: Option<Identifier>,
 
    pub definition_id: DefinitionId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ImportSymbols {
 
    pub this: ImportId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub module: Identifier,
 
    pub module_id: RootId,
 
    pub symbols: Vec<AliasedSymbol>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct Identifier {
 
    pub span: InputSpan,
 
    pub value: StringRef<'static>,
 
}
 

	
 
impl Identifier {
 
    pub(crate) const fn new_empty(span: InputSpan) -> Identifier {
 
        return Identifier{
 
            span,
 
            value: StringRef::new_empty(),
 
        };
 
    }
 
}
 

	
 
impl PartialEq for Identifier {
 
    fn eq(&self, other: &Self) -> bool {
 
        return self.value == other.value
 
    }
 
}
 

	
 
impl Display for Identifier {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.value.as_str())
 
    }
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq)]
 
pub enum ParserTypeVariant {
 
    // Special builtin, only usable by the compiler and not constructable by the
 
    // programmer
 
    Void,
 
    InputOrOutput,
 
    ArrayLike,
 
    IntegerLike,
 
    // Basic builtin
 
    Message,
 
    Bool,
 
    UInt8, UInt16, UInt32, UInt64,
 
    SInt8, SInt16, SInt32, SInt64,
 
    Character, String,
 
    // Literals (need to get concrete builtin type during typechecking)
 
    IntegerLiteral,
 
    // Marker for inference
 
    Inferred,
 
    // Builtins expecting one subsequent type
 
    Array,
 
    Input,
 
    Output,
 
    // Tuple: expecting any number of elements. Note that the parser type can
 
    // have one-valued tuples, these will be filtered out later during type
 
    // checking.
 
    Tuple(u32), // u32 = number of subsequent types
 
    // User-defined types
 
    PolymorphicArgument(DefinitionId, u32), // u32 = index into polymorphic variables
 
    Definition(DefinitionId, u32), // u32 = number of subsequent types in the type tree.
 
}
 

	
 
impl ParserTypeVariant {
 
    pub(crate) fn num_embedded(&self) -> usize {
 
        use ParserTypeVariant::*;
 

	
 
        match self {
 
            Void | IntegerLike |
 
            Message | Bool |
 
            UInt8 | UInt16 | UInt32 | UInt64 |
 
            SInt8 | SInt16 | SInt32 | SInt64 |
 
            Character | String | IntegerLiteral |
 
            Inferred | PolymorphicArgument(_, _) =>
 
                0,
 
            ArrayLike | InputOrOutput | Array | Input | Output =>
 
                1,
 
            Definition(_, num) | Tuple(num) => *num as usize,
 
        }
 
    }
 
}
 

	
 
/// ParserTypeElement is an element of the type tree. An element may be
 
/// implicit, meaning that the user didn't specify the type, but it was set by
 
/// the compiler.
 
#[derive(Debug, Clone)]
 
pub struct ParserTypeElement {
 
    pub element_span: InputSpan, // span of this element, not including the child types
 
    pub variant: ParserTypeVariant,
 
}
 

	
 
/// ParserType is a specification of a type during the parsing phase and initial
 
/// linker/validator phase of the compilation process. These types may be
 
/// (partially) inferred or represent literals (e.g. a integer whose bytesize is
 
/// not yet determined).
 
///
 
/// Its contents are the depth-first serialization of the type tree. Each node
 
/// is a type that may accept polymorphic arguments. The polymorphic arguments
 
/// are then the children of the node.
 
#[derive(Debug, Clone)]
 
pub struct ParserType {
 
    pub elements: Vec<ParserTypeElement>,
 
    pub full_span: InputSpan,
 
}
 

	
 
impl ParserType {
 
    pub(crate) fn iter_embedded(&self, parent_idx: usize) -> ParserTypeIter {
 
        ParserTypeIter::new(&self.elements, parent_idx)
 
    }
 
}
 

	
 
/// Iterator over the embedded elements of a specific element.
 
pub struct ParserTypeIter<'a> {
 
    pub elements: &'a [ParserTypeElement],
 
    pub cur_embedded_idx: usize,
 
}
 

	
 
impl<'a> ParserTypeIter<'a> {
 
    fn new(elements: &'a [ParserTypeElement], parent_idx: usize) -> Self {
 
        debug_assert!(parent_idx < elements.len(), "parent index exceeds number of elements in ParserType");
 
        if elements[0].variant.num_embedded() == 0 {
 
            // Parent element does not have any embedded types, place
 
            // `cur_embedded_idx` at end so we will always return `None`
 
            Self{ elements, cur_embedded_idx: elements.len() }
 
        } else {
 
            // Parent element has an embedded type
 
            Self{ elements, cur_embedded_idx: parent_idx + 1 }
 
        }
 
    }
 
}
 

	
 
impl<'a> Iterator for ParserTypeIter<'a> {
 
    type Item = &'a [ParserTypeElement];
 

	
 
    fn next(&mut self) -> Option<Self::Item> {
 
        let elements_len = self.elements.len();
 
        if self.cur_embedded_idx >= elements_len {
 
            return None;
 
        }
 

	
 
        // Seek to the end of the subtree
 
        let mut depth = 1;
 
        let start_element = self.cur_embedded_idx;
 
        while self.cur_embedded_idx < elements_len {
 
            let cur_element = &self.elements[self.cur_embedded_idx];
 
            let depth_change = cur_element.variant.num_embedded() as i32 - 1;
 
            depth += depth_change;
 
            debug_assert!(depth >= 0, "illegally constructed ParserType: {:?}", self.elements);
 

	
 
            self.cur_embedded_idx += 1;
 
            if depth == 0 {
 
                break;
 
            }
 
        }
 

	
 
        debug_assert!(depth == 0, "illegally constructed ParserType: {:?}", self.elements);
 
        return Some(&self.elements[start_element..self.cur_embedded_idx]);
 
    }
 
}
 

	
 
/// ConcreteType is the representation of a type after the type inference and
 
/// checker is finished. These are fully typed.
 
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
 
pub enum ConcreteTypePart {
 
    // Special types (cannot be explicitly constructed by the programmer)
 
    Void,
 
    // Builtin types without nested types
 
    Message,
 
    Bool,
 
    UInt8, UInt16, UInt32, UInt64,
 
    SInt8, SInt16, SInt32, SInt64,
 
    Character, String,
 
    // Builtin types with one nested type
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    Pointer,
 
    // Tuple: variable number of nested types, will never be 1
 
    Tuple(u32),
 
    // User defined type with any number of nested types
 
    Instance(DefinitionId, u32),    // instance of data type
 
    Function(ProcedureDefinitionId, u32),    // instance of function
 
    Component(ProcedureDefinitionId, u32),   // instance of a connector
 
}
 

	
 
impl ConcreteTypePart {
 
    pub(crate) fn num_embedded(&self) -> u32 {
 
        use ConcreteTypePart::*;
 

	
 
        match self {
 
            Void | Message | Bool |
 
            UInt8 | UInt16 | UInt32 | UInt64 |
 
            SInt8 | SInt16 | SInt32 | SInt64 |
 
            Character | String =>
 
                0,
 
            Array | Slice | Input | Output | Pointer =>
 
                1,
 
            Tuple(num_embedded) => *num_embedded,
 
            Instance(_, num_embedded) => *num_embedded,
 
            Function(_, num_embedded) => *num_embedded,
 
            Component(_, num_embedded) => *num_embedded,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Eq, PartialEq)]
 
pub struct ConcreteType {
 
    pub(crate) parts: Vec<ConcreteTypePart>
 
}
 

	
 
impl Default for ConcreteType {
 
    fn default() -> Self {
 
        Self{ parts: Vec::new() }
 
    }
 
}
 

	
 
impl ConcreteType {
 
    /// Returns an iterator over the subtrees that are type arguments (e.g. an
 
    /// array element's type, or a polymorphic type's arguments) to the
 
    /// provided parent type (specified by its index in the `parts` array).
 
    pub(crate) fn embedded_iter(&self, parent_part_idx: usize) -> ConcreteTypeIter {
 
        return ConcreteTypeIter::new(&self.parts, parent_part_idx);
 
    }
 

	
 
    /// Construct a human-readable name for the type. Because this performs
 
    /// a string allocation don't use it for anything else then displaying the
 
    /// type to the user.
 
    pub(crate) fn display_name(&self, heap: &Heap) -> String {
 
        return Self::type_parts_display_name(self.parts.as_slice(), heap);
 
    }
 

	
 
    // --- Utilities that operate on slice of parts
 

	
 
    /// Given the starting position of a type tree, determine the exclusive
 
    /// ending index.
 
    pub(crate) fn type_parts_subtree_end_idx(parts: &[ConcreteTypePart], start_idx: usize) -> usize {
 
        let mut depth = 1;
 
        let num_parts = parts.len();
 
        debug_assert!(start_idx < num_parts);
 

	
 
        for part_idx in start_idx..parts.len() {
 
            let depth_change = parts[part_idx].num_embedded() as i32 - 1;
 
            depth += depth_change;
 
            debug_assert!(depth >= 0);
 

	
 
            if depth == 0 {
 
                return part_idx + 1;
 
            }
 
        }
 

	
 
        debug_assert!(false, "incorrectly constructed ConcreteType instance");
 
        return 0;
 
    }
 

	
 
    /// Produces a human-readable representation of the concrete type parts
 
    fn type_parts_display_name(parts: &[ConcreteTypePart], heap: &Heap) -> String {
 
        let mut name = String::with_capacity(128);
 
        let _final_idx = Self::render_type_part_at(parts, heap, 0, &mut name);
 
        debug_assert_eq!(_final_idx, parts.len());
 

	
 
        return name;
 
    }
 

	
 
    /// Produces a human-readable representation of a single type part. Lower
 
    /// level utility for `type_parts_display_name`.
 
    fn render_type_part_at(parts: &[ConcreteTypePart], heap: &Heap, mut idx: usize, target: &mut String) -> usize {
 
        use ConcreteTypePart as CTP;
 
        use crate::protocol::parser::token_parsing::*;
 

	
 
        let cur_idx = idx;
 
        idx += 1; // increment by 1, because it always happens
 

	
 
        match parts[cur_idx] {
 
            CTP::Void => { target.push_str("void"); },
 
            CTP::Message => { target.push_str(KW_TYPE_MESSAGE_STR); },
 
            CTP::Bool => { target.push_str(KW_TYPE_BOOL_STR); },
 
            CTP::UInt8 => { target.push_str(KW_TYPE_UINT8_STR); },
 
            CTP::UInt16 => { target.push_str(KW_TYPE_UINT16_STR); },
 
            CTP::UInt32 => { target.push_str(KW_TYPE_UINT32_STR); },
 
            CTP::UInt64 => { target.push_str(KW_TYPE_UINT64_STR); },
 
            CTP::SInt8 => { target.push_str(KW_TYPE_SINT8_STR); },
 
            CTP::SInt16 => { target.push_str(KW_TYPE_SINT16_STR); },
 
            CTP::SInt32 => { target.push_str(KW_TYPE_SINT32_STR); },
 
            CTP::SInt64 => { target.push_str(KW_TYPE_SINT64_STR); },
 
            CTP::Character => { target.push_str(KW_TYPE_CHAR_STR); },
 
            CTP::String => { target.push_str(KW_TYPE_STRING_STR); },
 
            CTP::Array | CTP::Slice => {
 
                idx = Self::render_type_part_at(parts, heap, idx, target);
 
                target.push_str("[]");
 
            },
 
            CTP::Input => {
 
                target.push_str(KW_TYPE_IN_PORT_STR);
 
                target.push('<');
 
                idx = Self::render_type_part_at(parts, heap, idx, target);
 
                target.push('>');
 
            },
 
            CTP::Output => {
 
                target.push_str(KW_TYPE_OUT_PORT_STR);
 
                target.push('<');
 
                idx = Self::render_type_part_at(parts, heap, idx, target);
 
                target.push('>');
 
            },
 
            CTP::Pointer => {
 
                target.push('*');
 
                idx = Self::render_type_part_at(parts, heap, idx, target);
 
            }
 
            CTP::Tuple(num_parts) => {
 
                target.push('(');
 
                if num_parts != 0 {
 
                    idx = Self::render_type_part_at(parts, heap, idx, target);
 
                    for _ in 1..num_parts {
 
                        target.push(',');
 
                        idx = Self::render_type_part_at(parts, heap, idx, target);
 
                    }
 
                }
 
                target.push(')');
 
            },
 
            CTP::Instance(definition_id, num_poly_args) => {
 
                idx = Self::render_definition_type_parts_at(parts, heap, definition_id, num_poly_args, idx, target);
 
            }
 
            CTP::Function(definition_id, num_poly_args) |
 
            CTP::Component(definition_id, num_poly_args) => {
 
                idx = Self::render_definition_type_parts_at(parts, heap, definition_id.upcast(), num_poly_args, idx, target);
 
            }
 
        }
 

	
 
        idx
 
    }
 

	
 
    fn render_definition_type_parts_at(parts: &[ConcreteTypePart], heap: &Heap, definition_id: DefinitionId, num_poly_args: u32, mut idx: usize, target: &mut String) -> usize {
 
        let definition = &heap[definition_id];
 
        target.push_str(definition.identifier().value.as_str());
 

	
 
        if num_poly_args != 0 {
 
            target.push('<');
 
            for poly_arg_idx in 0..num_poly_args {
 
                if poly_arg_idx != 0 {
 
                    target.push(',');
 
                }
 
                idx = Self::render_type_part_at(parts, heap, idx, target);
 
            }
 
            target.push('>');
 
        }
 

	
 
        return idx;
 
    }
 
}
 

	
 
#[derive(Debug)]
 
pub struct ConcreteTypeIter<'a> {
 
    parts: &'a [ConcreteTypePart],
 
    idx_embedded: u32,
 
    num_embedded: u32,
 
    part_idx: usize,
 
}
 

	
 
impl<'a> ConcreteTypeIter<'a> {
 
    pub(crate) fn new(parts: &'a[ConcreteTypePart], parent_idx: usize) -> Self {
 
        let num_embedded = parts[parent_idx].num_embedded();
 
        return ConcreteTypeIter{
 
            parts,
 
            idx_embedded: 0,
 
            num_embedded,
 
            part_idx: parent_idx + 1,
 
        }
 
    }
 
}
 

	
 
impl<'a> Iterator for ConcreteTypeIter<'a> {
 
    type Item = &'a [ConcreteTypePart];
 

	
 
    fn next(&mut self) -> Option<Self::Item> {
 
        if self.idx_embedded == self.num_embedded {
 
            return None;
 
        }
 

	
 
        // Retrieve the subtree of interest
 
        let start_idx = self.part_idx;
 
        let end_idx = ConcreteType::type_parts_subtree_end_idx(&self.parts, start_idx);
 

	
 
        self.idx_embedded += 1;
 
        self.part_idx = end_idx;
 

	
 
        return Some(&self.parts[start_idx..end_idx]);
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub enum ScopeAssociation {
 
    Definition(DefinitionId),
 
    Block(BlockStatementId),
 
    If(IfStatementId, bool), // if true, then body of "if", otherwise body of "else"
 
    While(WhileStatementId),
 
    Synchronous(SynchronousStatementId),
 
    SelectCase(SelectStatementId, u32), // index is select case
 
}
 

	
 
/// `ScopeNode` is a helper that links scopes in two directions. It doesn't
 
/// actually contain any information associated with the scope, this may be
 
/// found on the AST elements that `Scope` points to.
 
#[derive(Debug, Clone)]
 
pub struct Scope {
 
    // Relation to other scopes
 
    pub this: ScopeId,
 
    pub parent: Option<ScopeId>,
 
    pub nested: Vec<ScopeId>,
 
    // Locally available variables/labels
 
    pub association: ScopeAssociation,
 
    pub variables: Vec<VariableId>,
 
    pub labels: Vec<LabeledStatementId>,
 
    // Location trackers/counters
 
    pub relative_pos_in_parent: i32,
 
    pub first_unique_id_in_scope: i32,
 
    pub next_unique_id_in_scope: i32,
 
}
 

	
 
impl Scope {
 
    pub(crate) fn new(id: ScopeId, association: ScopeAssociation) -> Self {
 
        return Self{
 
            this: id,
 
            parent: None,
 
            nested: Vec::new(),
 
            association,
 
            variables: Vec::new(),
 
            labels: Vec::new(),
 
            relative_pos_in_parent: -1,
 
            first_unique_id_in_scope: -1,
 
            next_unique_id_in_scope: -1,
 
        }
 
    }
 
}
 

	
 
impl Scope {
 
    pub(crate) fn new_invalid(this: ScopeId) -> Self {
 
        return Self{
 
            this,
 
            parent: None,
 
            nested: Vec::new(),
 
            association: ScopeAssociation::Definition(DefinitionId::new_invalid()),
 
            variables: Vec::new(),
 
            labels: Vec::new(),
 
            relative_pos_in_parent: -1,
 
            first_unique_id_in_scope: -1,
 
            next_unique_id_in_scope: -1,
 
        };
 
    }
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq)]
 
pub enum VariableKind {
 
    Parameter,      // in parameter list of function/component
 
    Local,          // declared in function/component body
 
    Binding,        // may be bound to in a binding expression (determined in validator/linker)
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct Variable {
 
    pub this: VariableId,
 
    // Parsing
 
    pub kind: VariableKind,
 
    pub parser_type: ParserType,
 
    pub identifier: Identifier,
 
    // Validator/linker
 
    pub relative_pos_in_parent: i32,
 
    pub unique_id_in_scope: i32, // Temporary fix until proper bytecode/asm is generated
 
}
 

	
 
#[derive(Debug)]
 
pub enum Definition {
 
    Struct(StructDefinition),
 
    Enum(EnumDefinition),
 
    Union(UnionDefinition),
 
    Procedure(ProcedureDefinition),
 
}
 

	
 
impl Definition {
 
    pub fn is_struct(&self) -> bool {
 
        match self {
 
            Definition::Struct(_) => true,
 
            _ => false
 
        }
 
    }
 
    pub(crate) fn as_struct(&self) -> &StructDefinition {
 
        match self {
 
            Definition::Struct(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'StructDefinition'"),
 
        }
 
    }
 
    pub(crate) fn as_struct_mut(&mut self) -> &mut StructDefinition {
 
        match self {
 
            Definition::Struct(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'StructDefinition'"),
 
        }
 
    }
 
    pub fn is_enum(&self) -> bool {
 
        match self {
 
            Definition::Enum(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub(crate) fn as_enum(&self) -> &EnumDefinition {
 
        match self {
 
            Definition::Enum(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'EnumDefinition'"),
 
        }
 
    }
 
    pub(crate) fn as_enum_mut(&mut self) -> &mut EnumDefinition {
 
        match self {
 
            Definition::Enum(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'EnumDefinition'"),
 
        }
 
    }
 
    pub fn is_union(&self) -> bool {
 
        match self {
 
            Definition::Union(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub(crate) fn as_union(&self) -> &UnionDefinition {
 
        match self {
 
            Definition::Union(result) => result, 
 
            _ => panic!("Unable to cast 'Definition' to 'UnionDefinition'"),
 
        }
 
    }
 

	
 
    pub(crate) fn as_union_mut(&mut self) -> &mut UnionDefinition {
 
        match self {
 
            Definition::Union(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'UnionDefinition'"),
 
        }
 
    }
 

	
 
    pub fn is_procedure(&self) -> bool {
 
        match self {
 
            Definition::Procedure(_) => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    pub(crate) fn as_procedure(&self) -> &ProcedureDefinition {
 
        match self {
 
            Definition::Procedure(result) => result,
 
            _ => panic!("Unable to cast `Definition` to `Function`"),
 
        }
 
    }
 

	
 
    pub(crate) fn as_procedure_mut(&mut self) -> &mut ProcedureDefinition {
 
        match self {
 
            Definition::Procedure(result) => result,
 
            _ => panic!("Unable to cast `Definition` to `Function`"),
 
        }
 
    }
 

	
 
    pub fn defined_in(&self) -> RootId {
 
        match self {
 
            Definition::Struct(def) => def.defined_in,
 
            Definition::Enum(def) => def.defined_in,
 
            Definition::Union(def) => def.defined_in,
 
            Definition::Procedure(def) => def.defined_in,
 
        }
 
    }
 

	
 
    pub fn identifier(&self) -> &Identifier {
 
        match self {
 
            Definition::Struct(def) => &def.identifier,
 
            Definition::Enum(def) => &def.identifier,
 
            Definition::Union(def) => &def.identifier,
 
            Definition::Procedure(def) => &def.identifier,
 
        }
 
    }
 
    pub fn poly_vars(&self) -> &Vec<Identifier> {
 
        match self {
 
            Definition::Struct(def) => &def.poly_vars,
 
            Definition::Enum(def) => &def.poly_vars,
 
            Definition::Union(def) => &def.poly_vars,
 
            Definition::Procedure(def) => &def.poly_vars,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct StructFieldDefinition {
 
    pub span: InputSpan,
 
    pub field: Identifier,
 
    pub parser_type: ParserType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct StructDefinition {
 
    pub this: StructDefinitionId,
 
    pub defined_in: RootId,
 
    // Symbol scanning
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Parsing
 
    pub fields: Vec<StructFieldDefinition>
 
}
 

	
 
impl StructDefinition {
 
    pub(crate) fn new_empty(
 
        this: StructDefinitionId, defined_in: RootId, span: InputSpan,
 
        identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self{ this, defined_in, span, identifier, poly_vars, fields: Vec::new() }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub enum EnumVariantValue {
 
    None,
 
    Integer(i64),
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EnumVariantDefinition {
 
    pub identifier: Identifier,
 
    pub value: EnumVariantValue,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EnumDefinition {
 
    pub this: EnumDefinitionId,
 
    pub defined_in: RootId,
 
    // Symbol scanning
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Parsing
 
    pub variants: Vec<EnumVariantDefinition>,
 
}
 

	
 
impl EnumDefinition {
 
    pub(crate) fn new_empty(
 
        this: EnumDefinitionId, defined_in: RootId, span: InputSpan,
 
        identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self{ this, defined_in, span, identifier, poly_vars, variants: Vec::new() }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct UnionVariantDefinition {
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub value: Vec<ParserType>, // if empty, then union variant does not contain any embedded types
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct UnionDefinition {
 
    pub this: UnionDefinitionId,
 
    pub defined_in: RootId,
 
    // Phase 1: symbol scanning
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Phase 2: parsing
 
    pub variants: Vec<UnionVariantDefinition>,
 
}
 

	
 
impl UnionDefinition {
 
    pub(crate) fn new_empty(
 
        this: UnionDefinitionId, defined_in: RootId, span: InputSpan,
 
        identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self{ this, defined_in, span, identifier, poly_vars, variants: Vec::new() }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
 
pub enum ProcedureKind {
 
    Function, // with return type
 
    Primitive, // without return type
 
    Composite,
 
}
 

	
 
/// Monomorphed instantiation of a procedure (or the sole instantiation of a
 
/// non-polymorphic procedure).
 
#[derive(Debug)]
 
pub struct ProcedureDefinitionMonomorph {
 
    pub argument_types: Vec<TypeId>,
 
    pub expr_info: Vec<ExpressionInfo>
 
}
 

	
 
impl ProcedureDefinitionMonomorph {
 
    pub(crate) fn new_invalid() -> Self {
 
        return Self{
 
            argument_types: Vec::new(),
 
            expr_info: Vec::new(),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub struct ExpressionInfo {
 
    pub type_id: TypeId,
 
    pub variant: ExpressionInfoVariant,
 
}
 

	
 
impl ExpressionInfo {
 
    pub(crate) fn new_invalid() -> Self {
 
        return Self{
 
            type_id: TypeId::new_invalid(),
 
            variant: ExpressionInfoVariant::Generic,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub enum ExpressionInfoVariant {
 
    Generic,
 
    Procedure(TypeId, u32), // procedure TypeID and its monomorph index
 
    Select(i32), // index
 
}
 

	
 
impl ExpressionInfoVariant {
 
    pub(crate) fn as_select(&self) -> i32 {
 
        match self {
 
            ExpressionInfoVariant::Select(v) => *v,
 
            _ => unreachable!(),
 
        }
 
    }
 

	
 
    pub(crate) fn as_procedure(&self) -> (TypeId, u32) {
 
        match self {
 
            ExpressionInfoVariant::Procedure(type_id, monomorph_index) => (*type_id, *monomorph_index),
 
            _ => unreachable!(),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug)]
 
pub enum ProcedureSource {
 
    FuncUserDefined,
 
    CompUserDefined,
 
    // Builtin functions, available to user
 
    FuncGet,
 
    FuncPut,
 
    FuncFires,
 
    FuncCreate,
 
    FuncLength,
 
    FuncAssert,
 
    FuncPrint,
 
    // Buitlin functions, not available to user
 
    FuncSelectStart,
 
    FuncSelectRegisterCasePort,
 
    FuncSelectWait,
 
    // Builtin components, available to user
 
    CompRandomU32, // TODO: Remove, temporary thing
 
}
 

	
 
impl ProcedureSource {
 
    pub(crate) fn is_builtin(&self) -> bool {
 
        match self {
 
            ProcedureSource::FuncUserDefined | ProcedureSource::CompUserDefined => false,
 
            _ => true,
 
        }
 
    }
 
}
 

	
 

	
 
/// Generic storage for functions, primitive components and composite
 
/// components.
 
// Note that we will have function definitions for builtin functions as well. In
 
// that case the span, the identifier span and the body are all invalid.
 
#[derive(Debug)]
 
pub struct ProcedureDefinition {
 
    pub this: ProcedureDefinitionId,
 
    pub defined_in: RootId,
 
    // Symbol scanning
 
    pub builtin: bool,
 
    pub kind: ProcedureKind,
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Parser
 
    pub source: ProcedureSource,
 
    pub return_type: Option<ParserType>, // present on functions, not components
 
    pub parameters: Vec<VariableId>,
 
    pub scope: ScopeId,
 
    pub body: BlockStatementId,
 
    // Monomorphization of typed procedures
 
    pub monomorphs: Vec<ProcedureDefinitionMonomorph>,
 
}
 

	
 
impl ProcedureDefinition {
 
    pub(crate) fn new_empty(
 
        this: ProcedureDefinitionId, defined_in: RootId, span: InputSpan,
 
        kind: ProcedureKind, identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self {
 
            this, defined_in,
 
            builtin: false,
 
            span,
 
            kind, identifier, poly_vars,
 
            source: ProcedureSource::FuncUserDefined,
 
            return_type: None,
 
            parameters: Vec::new(),
 
            scope: ScopeId::new_invalid(),
 
            body: BlockStatementId::new_invalid(),
 
            monomorphs: Vec::new(),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Statement {
 
    Block(BlockStatement),
 
    EndBlock(EndBlockStatement),
 
    Local(LocalStatement),
 
    Labeled(LabeledStatement),
 
    If(IfStatement),
 
    EndIf(EndIfStatement),
 
    While(WhileStatement),
 
    EndWhile(EndWhileStatement),
 
    Break(BreakStatement),
 
    Continue(ContinueStatement),
 
    Synchronous(SynchronousStatement),
 
    EndSynchronous(EndSynchronousStatement),
 
    Fork(ForkStatement),
 
    EndFork(EndForkStatement),
 
    Select(SelectStatement),
 
    EndSelect(EndSelectStatement),
 
    Return(ReturnStatement),
 
    Goto(GotoStatement),
 
    New(NewStatement),
 
    Expression(ExpressionStatement),
 
}
 

	
 
impl Statement {
 
    pub fn as_new(&self) -> &NewStatement {
 
        match self {
 
            Statement::New(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `NewStatement`"),
 
        }
 
    }
 

	
 
    pub fn span(&self) -> InputSpan {
 
        match self {
 
            Statement::Block(v) => v.span,
 
            Statement::Local(v) => v.span(),
 
            Statement::Labeled(v) => v.label.span,
 
            Statement::If(v) => v.span,
 
            Statement::While(v) => v.span,
 
            Statement::Break(v) => v.span,
 
            Statement::Continue(v) => v.span,
 
            Statement::Synchronous(v) => v.span,
 
            Statement::Fork(v) => v.span,
 
            Statement::Select(v) => v.span,
 
            Statement::Return(v) => v.span,
 
            Statement::Goto(v) => v.span,
 
            Statement::New(v) => v.span,
 
            Statement::Expression(v) => v.span,
 
            Statement::EndBlock(_)
 
            | Statement::EndIf(_)
 
            | Statement::EndWhile(_)
 
            | Statement::EndSynchronous(_)
 
            | Statement::EndFork(_)
 
            | Statement::EndSelect(_) => unreachable!(),
 
        }
 
    }
 
    pub fn link_next(&mut self, next: StatementId) {
 
        match self {
 
            Statement::Block(stmt) => stmt.next = next,
 
            Statement::EndBlock(stmt) => stmt.next = next,
 
            Statement::Local(stmt) => match stmt {
 
                LocalStatement::Channel(stmt) => stmt.next = next,
 
                LocalStatement::Memory(stmt) => stmt.next = next,
 
            },
 
            Statement::EndIf(stmt) => stmt.next = next,
 
            Statement::EndWhile(stmt) => stmt.next = next,
 
            Statement::EndSynchronous(stmt) => stmt.next = next,
 
            Statement::EndFork(stmt) => stmt.next = next,
 
            Statement::EndSelect(stmt) => stmt.next = next,
 
            Statement::New(stmt) => stmt.next = next,
 
            Statement::Expression(stmt) => stmt.next = next,
 
            Statement::Return(_)
 
            | Statement::Break(_)
 
            | Statement::Continue(_)
 
            | Statement::Synchronous(_)
 
            | Statement::Fork(_)
 
            | Statement::Select(_)
 
            | Statement::Goto(_)
 
            | Statement::While(_)
 
            | Statement::Labeled(_)
 
            | Statement::If(_) => unreachable!(),
 
        }
 
    }
 

	
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BlockStatement {
 
    pub this: BlockStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the complete block
 
    pub statements: Vec<StatementId>,
 
    pub end_block: EndBlockStatementId,
 
    // Phase 2: linker
 
    pub scope: ScopeId,
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndBlockStatement {
 
    pub this: EndBlockStatementId,
 
    // Parser
 
    pub start_block: BlockStatementId,
 
    // Validation/Linking
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum LocalStatement {
 
    Memory(MemoryStatement),
 
    Channel(ChannelStatement),
 
}
 

	
 
impl LocalStatement {
 
    pub fn this(&self) -> LocalStatementId {
 
        match self {
 
            LocalStatement::Memory(stmt) => stmt.this.upcast(),
 
            LocalStatement::Channel(stmt) => stmt.this.upcast(),
 
        }
 
    }
 
    pub fn span(&self) -> InputSpan {
 
        match self {
 
            LocalStatement::Channel(v) => v.span,
 
            LocalStatement::Memory(v) => v.span,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct MemoryStatement {
 
    pub this: MemoryStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub variable: VariableId,
 
    pub initial_expr: AssignmentExpressionId,
 
    // Phase 2: linker
 
    pub next: StatementId,
 
}
 

	
 
/// ChannelStatement is the declaration of an input and output port associated
 
/// with the same channel. Note that the polarity of the ports are from the
 
/// point of view of the component. So an output port is something that a
 
/// component uses to send data over (i.e. it is the "input end" of the
 
/// channel), and vice versa.
 
#[derive(Debug, Clone)]
 
pub struct ChannelStatement {
 
    pub this: ChannelStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "channel" keyword
 
    pub from: VariableId, // output
 
    pub to: VariableId,   // input
 
    // Phase 2: linker
 
    pub relative_pos_in_parent: i32,
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LabeledStatement {
 
    pub this: LabeledStatementId,
 
    // Phase 1: parser
 
    pub label: Identifier,
 
    pub body: StatementId,
 
    // Phase 2: linker
 
    pub relative_pos_in_parent: i32,
 
    pub in_sync: SynchronousStatementId, // may be invalid
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct IfStatement {
 
    pub this: IfStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "if" keyword
 
    pub test: ExpressionId,
 
    pub true_case: IfStatementCase,
 
    pub false_case: Option<IfStatementCase>,
 
    pub end_if: EndIfStatementId,
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub struct IfStatementCase {
 
    pub body: StatementId,
 
    pub scope: ScopeId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndIfStatement {
 
    pub this: EndIfStatementId,
 
    pub start_if: IfStatementId,
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct WhileStatement {
 
    pub this: WhileStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "while" keyword
 
    pub test: ExpressionId,
 
    pub scope: ScopeId,
 
    pub body: StatementId,
 
    pub end_while: EndWhileStatementId,
 
    pub in_sync: SynchronousStatementId, // may be invalid
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndWhileStatement {
 
    pub this: EndWhileStatementId,
 
    pub start_while: WhileStatementId,
 
    // Phase 2: linker
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BreakStatement {
 
    pub this: BreakStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "break" keyword
 
    pub label: Option<Identifier>,
 
    // Phase 2: linker
 
    pub target: EndWhileStatementId, // invalid if not yet set
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ContinueStatement {
 
    pub this: ContinueStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "continue" keyword
 
    pub label: Option<Identifier>,
 
    // Phase 2: linker
 
    pub target: WhileStatementId, // invalid if not yet set
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SynchronousStatement {
 
    pub this: SynchronousStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "sync" keyword
 
    pub scope: ScopeId,
 
    pub body: StatementId,
 
    pub end_sync: EndSynchronousStatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndSynchronousStatement {
 
    pub this: EndSynchronousStatementId,
 
    pub start_sync: SynchronousStatementId,
 
    // Phase 2: linker
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ForkStatement {
 
    pub this: ForkStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "fork" keyword
 
    pub left_body: StatementId,
 
    pub right_body: Option<StatementId>,
 
    pub end_fork: EndForkStatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndForkStatement {
 
    pub this: EndForkStatementId,
 
    pub start_fork: ForkStatementId,
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SelectStatement {
 
    pub this: SelectStatementId,
 
    pub span: InputSpan, // of the "select" keyword
 
    pub cases: Vec<SelectCase>,
 
    pub end_select: EndSelectStatementId,
 
    pub relative_pos_in_parent: i32,
 
    pub next: StatementId, // note: the select statement will be transformed into other AST elements, this `next` jumps to those replacement statements
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SelectCase {
 
    // The guard statement of a `select` is either a MemoryStatement or an
 
    // ExpressionStatement. Nothing else is allowed by the initial parsing
 
    pub guard: StatementId,
 
    pub body: StatementId,
 
    pub scope: ScopeId,
 
    // Phase 2: Validation and Linking
 
    pub involved_ports: Vec<(CallExpressionId, ExpressionId)>, // call to `get` and its port argument
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndSelectStatement {
 
    pub this: EndSelectStatementId,
 
    pub start_select: SelectStatementId,
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ReturnStatement {
 
    pub this: ReturnStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "return" keyword
 
    pub expressions: Vec<ExpressionId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct GotoStatement {
 
    pub this: GotoStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "goto" keyword
 
    pub label: Identifier,
 
    // Phase 2: linker
 
    pub target: LabeledStatementId, // invalid if not yet set
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct NewStatement {
 
    pub this: NewStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "new" keyword
 
    pub expression: CallExpressionId,
 
    // Phase 2: linker
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ExpressionStatement {
 
    pub this: ExpressionStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub expression: ExpressionId,
 
    // Phase 2: linker
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
 
pub enum ExpressionParent {
 
    None, // only set during initial parsing
 
    Memory(MemoryStatementId),
 
    If(IfStatementId),
 
    While(WhileStatementId),
 
    Return(ReturnStatementId),
 
    New(NewStatementId),
 
    ExpressionStmt(ExpressionStatementId),
 
    Expression(ExpressionId, u32) // index within expression (e.g LHS or RHS of expression, or index in array literal, etc.)
 
}
 

	
 
impl ExpressionParent {
 
    pub fn is_new(&self) -> bool {
 
        match self {
 
            ExpressionParent::New(_) => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    pub fn as_expression(&self) -> ExpressionId {
 
        match self {
 
            ExpressionParent::Expression(id, _) => *id,
 
            _ => panic!("called as_expression() on {:?}", self),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Expression {
 
    Assignment(AssignmentExpression),
 
    Binding(BindingExpression),
 
    Conditional(ConditionalExpression),
 
    Binary(BinaryExpression),
 
    Unary(UnaryExpression),
 
    Indexing(IndexingExpression),
 
    Slicing(SlicingExpression),
 
    Select(SelectExpression),
 
    Literal(LiteralExpression),
 
    Cast(CastExpression),
 
    Call(CallExpression),
 
    Variable(VariableExpression),
 
}
 

	
 
impl Expression {
 
    pub fn as_variable(&self) -> &VariableExpression {
 
        match self {
 
            Expression::Variable(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `VariableExpression`"),
 
        }
 
    }
 

	
 
    /// Returns operator span, function name, a binding's "let" span, etc. An
 
    /// indicator for the kind of expression that is being applied.
 
    pub fn operation_span(&self) -> InputSpan {
 
        match self {
 
            Expression::Assignment(expr) => expr.operator_span,
 
            Expression::Binding(expr) => expr.operator_span,
 
            Expression::Conditional(expr) => expr.operator_span,
 
            Expression::Binary(expr) => expr.operator_span,
 
            Expression::Unary(expr) => expr.operator_span,
 
            Expression::Indexing(expr) => expr.operator_span,
 
            Expression::Slicing(expr) => expr.slicing_span,
 
            Expression::Select(expr) => expr.operator_span,
 
            Expression::Literal(expr) => expr.span,
 
            Expression::Cast(expr) => expr.cast_span,
 
            Expression::Call(expr) => expr.func_span,
 
            Expression::Variable(expr) => expr.identifier.span,
 
        }
 
    }
 

	
 
    /// Returns the span covering the entire expression (i.e. including the
 
    /// spans of the arguments as well).
 
    pub fn full_span(&self) -> InputSpan {
 
        match self {
 
            Expression::Assignment(expr) => expr.full_span,
 
            Expression::Binding(expr) => expr.full_span,
 
            Expression::Conditional(expr) => expr.full_span,
 
            Expression::Binary(expr) => expr.full_span,
 
            Expression::Unary(expr) => expr.full_span,
 
            Expression::Indexing(expr) => expr.full_span,
 
            Expression::Slicing(expr) => expr.full_span,
 
            Expression::Select(expr) => expr.full_span,
 
            Expression::Literal(expr) => expr.span,
 
            Expression::Cast(expr) => expr.full_span,
 
            Expression::Call(expr) => expr.full_span,
 
            Expression::Variable(expr) => expr.identifier.span,
 
        }
 
    }
 

	
 
    pub fn parent(&self) -> &ExpressionParent {
 
        match self {
 
            Expression::Assignment(expr) => &expr.parent,
 
            Expression::Binding(expr) => &expr.parent,
 
            Expression::Conditional(expr) => &expr.parent,
 
            Expression::Binary(expr) => &expr.parent,
 
            Expression::Unary(expr) => &expr.parent,
 
            Expression::Indexing(expr) => &expr.parent,
 
            Expression::Slicing(expr) => &expr.parent,
 
            Expression::Select(expr) => &expr.parent,
 
            Expression::Literal(expr) => &expr.parent,
 
            Expression::Cast(expr) => &expr.parent,
 
            Expression::Call(expr) => &expr.parent,
 
            Expression::Variable(expr) => &expr.parent,
 
        }
 
    }
 

	
 
    pub fn parent_mut(&mut self) -> &mut ExpressionParent {
 
        match self {
 
            Expression::Assignment(expr) => &mut expr.parent,
 
            Expression::Binding(expr) => &mut expr.parent,
 
            Expression::Conditional(expr) => &mut expr.parent,
 
            Expression::Binary(expr) => &mut expr.parent,
 
            Expression::Unary(expr) => &mut expr.parent,
 
            Expression::Indexing(expr) => &mut expr.parent,
 
            Expression::Slicing(expr) => &mut expr.parent,
 
            Expression::Select(expr) => &mut expr.parent,
 
            Expression::Literal(expr) => &mut expr.parent,
 
            Expression::Cast(expr) => &mut expr.parent,
 
            Expression::Call(expr) => &mut expr.parent,
 
            Expression::Variable(expr) => &mut expr.parent,
 
        }
 
    }
 

	
 
    pub fn parent_expr_id(&self) -> Option<ExpressionId> {
 
        if let ExpressionParent::Expression(id, _) = self.parent() {
 
            Some(*id)
 
        } else {
 
            None
 
        }
 
    }
 

	
 
    pub fn type_index(&self) -> i32 {
 
        match self {
 
            Expression::Assignment(expr) => expr.type_index,
 
            Expression::Binding(expr) => expr.type_index,
 
            Expression::Conditional(expr) => expr.type_index,
 
            Expression::Binary(expr) => expr.type_index,
 
            Expression::Unary(expr) => expr.type_index,
 
            Expression::Indexing(expr) => expr.type_index,
 
            Expression::Slicing(expr) => expr.type_index,
 
            Expression::Select(expr) => expr.type_index,
 
            Expression::Literal(expr) => expr.type_index,
 
            Expression::Cast(expr) => expr.type_index,
 
            Expression::Call(expr) => expr.type_index,
 
            Expression::Variable(expr) => expr.type_index,
 
        }
 
    }
 

	
 
    pub fn type_index_mut(&mut self) -> &mut i32 {
 
        match self {
 
            Expression::Assignment(expr) => &mut expr.type_index,
 
            Expression::Binding(expr) => &mut expr.type_index,
 
            Expression::Conditional(expr) => &mut expr.type_index,
 
            Expression::Binary(expr) => &mut expr.type_index,
 
            Expression::Unary(expr) => &mut expr.type_index,
 
            Expression::Indexing(expr) => &mut expr.type_index,
 
            Expression::Slicing(expr) => &mut expr.type_index,
 
            Expression::Select(expr) => &mut expr.type_index,
 
            Expression::Literal(expr) => &mut expr.type_index,
 
            Expression::Cast(expr) => &mut expr.type_index,
 
            Expression::Call(expr) => &mut expr.type_index,
 
            Expression::Variable(expr) => &mut expr.type_index,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub enum AssignmentOperator {
 
    Set,
 
    Concatenated,
 
    Multiplied,
 
    Divided,
 
    Remained,
 
    Added,
 
    Subtracted,
 
    ShiftedLeft,
 
    ShiftedRight,
 
    BitwiseAnded,
 
    BitwiseXored,
 
    BitwiseOred,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct AssignmentExpression {
 
    pub this: AssignmentExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan,
 
    pub full_span: InputSpan,
 
    pub left: ExpressionId,
 
    pub operation: AssignmentOperator,
 
    pub right: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    // Typing
 
    pub type_index: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BindingExpression {
 
    pub this: BindingExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan,
 
    pub full_span: InputSpan,
 
    pub bound_to: ExpressionId,
 
    pub bound_from: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    // Typing
 
    pub type_index: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ConditionalExpression {
 
    pub this: ConditionalExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan,
 
    pub full_span: InputSpan,
 
    pub test: ExpressionId,
 
    pub true_expression: ExpressionId,
 
    pub false_expression: ExpressionId,
 
    // Validator/Linking
 
    pub parent: ExpressionParent,
 
    // Typing
 
    pub type_index: i32,
 
}
 

	
 
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
 
pub enum BinaryOperator {
 
    Concatenate,
 
    LogicalOr,
 
    LogicalAnd,
 
    BitwiseOr,
 
    BitwiseXor,
 
    BitwiseAnd,
 
    Equality,
 
    Inequality,
 
    LessThan,
 
    GreaterThan,
 
    LessThanEqual,
 
    GreaterThanEqual,
 
    ShiftLeft,
 
    ShiftRight,
 
    Add,
 
    Subtract,
 
    Multiply,
 
    Divide,
 
    Remainder,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BinaryExpression {
 
    pub this: BinaryExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan,
 
    pub full_span: InputSpan,
 
    pub left: ExpressionId,
 
    pub operation: BinaryOperator,
 
    pub right: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    // Typing
 
    pub type_index: i32,
 
}
 

	
 
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
 
pub enum UnaryOperator {
 
    Positive,
 
    Negative,
 
    BitwiseNot,
 
    LogicalNot,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct UnaryExpression {
 
    pub this: UnaryExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan,
 
    pub full_span: InputSpan,
 
    pub operation: UnaryOperator,
 
    pub expression: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    // Typing
 
    pub type_index: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct IndexingExpression {
 
    pub this: IndexingExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan,
 
    pub full_span: InputSpan,
 
    pub subject: ExpressionId,
 
    pub index: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    // Typing
 
    pub type_index: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SlicingExpression {
 
    pub this: SlicingExpressionId,
 
    // Parsing
 
    pub slicing_span: InputSpan, // from '[' to ']'
 
    pub full_span: InputSpan, // includes subject
 
    pub subject: ExpressionId,
 
    pub from_index: ExpressionId,
 
    pub to_index: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    // Typing
 
    pub type_index: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum SelectKind {
 
    StructField(Identifier),
 
    TupleMember(u64), // u64 is overkill, but space is taken up by `StructField` variant anyway
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SelectExpression {
 
    pub this: SelectExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan, // of the '.'
 
    pub full_span: InputSpan, // includes subject and field
 
    pub subject: ExpressionId,
 
    pub kind: SelectKind,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    // Typing
 
    pub type_index: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct CastExpression {
 
    pub this: CastExpressionId,
 
    // Parsing
 
    pub cast_span: InputSpan, // of the "cast" keyword,
 
    pub full_span: InputSpan, // includes the cast subject
 
    pub to_type: ParserType,
 
    pub subject: ExpressionId,
 
    // Validator/linker
 
    pub parent: ExpressionParent,
 
    // Typing
 
    pub type_index: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct CallExpression {
 
    pub this: CallExpressionId,
 
    // Parsing
 
    pub func_span: InputSpan, // of the function name
 
    pub full_span: InputSpan, // includes the arguments and parentheses
 
    pub parser_type: ParserType, // of the function call, not the return type
 
    pub method: Method,
 
    pub arguments: Vec<ExpressionId>,
 
    pub procedure: ProcedureDefinitionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    // Typing
 
    pub type_index: i32,
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq)]
 
pub enum Method {
 
    // Builtin, accessible by programmer
 
    Get,
 
    Put,
 
    Fires,
 
    Create,
 
    Length,
 
    Assert,
 
    Print,
 
    // Builtin, not accessible by programmer
 
    SelectStart, // SelectStart(total_num_cases, total_num_ports)
 
    SelectRegisterCasePort, // SelectRegisterCasePort(case_index, port_index, port_id)
 
    SelectWait, // SelectWait() -> u32
 
    // User-defined
 
    UserFunction,
 
    UserComponent,
 
}
 

	
 
impl Method {
 
    pub(crate) fn is_public_builtin(&self) -> bool {
 
        use Method::*;
 
        match self {
 
            Get | Put | Fires | Create | Length | Assert | Print => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    pub(crate) fn is_user_defined(&self) -> bool {
 
        use Method::*;
 
        match self {
 
            UserFunction | UserComponent => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralExpression {
 
    pub this: LiteralExpressionId,
 
    // Parsing
 
    pub span: InputSpan,
 
    pub value: Literal,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    // Typing
 
    pub type_index: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Literal {
 
    Null, // message
 
    True,
 
    False,
 
    Character(char),
 
    String(StringRef<'static>),
 
    Integer(LiteralInteger),
 
    Struct(LiteralStruct),
 
    Enum(LiteralEnum),
 
    Union(LiteralUnion),
 
    Array(Vec<ExpressionId>),
 
    Tuple(Vec<ExpressionId>),
 
}
 

	
 
impl Literal {
 
    pub(crate) fn as_struct(&self) -> &LiteralStruct {
 
        if let Literal::Struct(literal) = self{
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Struct", self)
 
        }
 
    }
 

	
 
    pub(crate) fn as_enum(&self) -> &LiteralEnum {
 
        if let Literal::Enum(literal) = self {
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Enum", self)
 
        }
 
    }
 

	
 
    pub(crate) fn as_union(&self) -> &LiteralUnion {
 
        if let Literal::Union(literal) = self {
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Union", self)
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralInteger {
 
    pub(crate) unsigned_value: u64,
 
    pub(crate) negated: bool, // for constant expression evaluation, TODO: @Int
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralStructField {
 
    // Phase 1: parser
 
    pub(crate) identifier: Identifier,
 
    pub(crate) value: ExpressionId,
 
    // Phase 2: linker
 
    pub(crate) field_idx: usize, // in struct definition
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralStruct {
 
    // Phase 1: parser
 
    pub(crate) parser_type: ParserType,
 
    pub(crate) fields: Vec<LiteralStructField>,
 
    pub(crate) definition: DefinitionId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralEnum {
 
    // Phase 1: parser
 
    pub(crate) parser_type: ParserType,
 
    pub(crate) variant: Identifier,
 
    pub(crate) definition: DefinitionId,
 
    // Phase 2: linker
 
    pub(crate) variant_idx: usize, // as present in the type table
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralUnion {
 
    // Phase 1: parser
 
    pub(crate) parser_type: ParserType,
 
    pub(crate) variant: Identifier,
 
    pub(crate) values: Vec<ExpressionId>,
 
    pub(crate) definition: DefinitionId,
 
    // Phase 2: linker
 
    pub(crate) variant_idx: usize, // as present in type table
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct VariableExpression {
 
    pub this: VariableExpressionId,
 
    // Parsing
 
    pub identifier: Identifier,
 
    // Validator/Linker
 
    pub declaration: Option<VariableId>,
 
    pub used_as_binding_target: bool,
 
    pub parent: ExpressionParent,
 
    // Typing
 
    pub type_index: i32,
 
}
 
\ No newline at end of file
src/protocol/ast_printer.rs
Show inline comments
 
#![allow(dead_code)]
 

	
 
use std::fmt::{Debug, Display};
 
use std::io::Write as IOWrite;
 

	
 
use super::ast::*;
 
use super::token_parsing::*;
 

	
 
const INDENT: usize = 2;
 

	
 
const PREFIX_EMPTY: &'static str = "    ";
 
const PREFIX_ROOT_ID: &'static str = "Root";
 
const PREFIX_PRAGMA_ID: &'static str = "Prag";
 
const PREFIX_IMPORT_ID: &'static str = "Imp ";
 
const PREFIX_TYPE_ANNOT_ID: &'static str = "TyAn";
 
const PREFIX_VARIABLE_ID: &'static str = "Var ";
 
const PREFIX_DEFINITION_ID: &'static str = "Def ";
 
const PREFIX_STRUCT_ID: &'static str = "DefS";
 
const PREFIX_ENUM_ID: &'static str = "DefE";
 
const PREFIX_UNION_ID: &'static str = "DefU";
 
const PREFIX_COMPONENT_ID: &'static str = "DefC";
 
const PREFIX_FUNCTION_ID: &'static str = "DefF";
 
const PREFIX_STMT_ID: &'static str = "Stmt";
 
const PREFIX_BLOCK_STMT_ID: &'static str = "SBl ";
 
const PREFIX_ENDBLOCK_STMT_ID: &'static str = "SEBl";
 
const PREFIX_LOCAL_STMT_ID: &'static str = "SLoc";
 
const PREFIX_MEM_STMT_ID: &'static str = "SMem";
 
const PREFIX_CHANNEL_STMT_ID: &'static str = "SCha";
 
const PREFIX_SKIP_STMT_ID: &'static str = "SSki";
 
const PREFIX_LABELED_STMT_ID: &'static str = "SLab";
 
const PREFIX_IF_STMT_ID: &'static str = "SIf ";
 
const PREFIX_ENDIF_STMT_ID: &'static str = "SEIf";
 
const PREFIX_WHILE_STMT_ID: &'static str = "SWhi";
 
const PREFIX_ENDWHILE_STMT_ID: &'static str = "SEWh";
 
const PREFIX_BREAK_STMT_ID: &'static str = "SBre";
 
const PREFIX_CONTINUE_STMT_ID: &'static str = "SCon";
 
const PREFIX_SYNC_STMT_ID: &'static str = "SSyn";
 
const PREFIX_ENDSYNC_STMT_ID: &'static str = "SESy";
 
const PREFIX_FORK_STMT_ID: &'static str = "SFrk";
 
const PREFIX_END_FORK_STMT_ID: &'static str = "SEFk";
 
const PREFIX_SELECT_STMT_ID: &'static str = "SSel";
 
const PREFIX_END_SELECT_STMT_ID: &'static str = "SESl";
 
const PREFIX_RETURN_STMT_ID: &'static str = "SRet";
 
const PREFIX_ASSERT_STMT_ID: &'static str = "SAsr";
 
const PREFIX_GOTO_STMT_ID: &'static str = "SGot";
 
const PREFIX_NEW_STMT_ID: &'static str = "SNew";
 
const PREFIX_PUT_STMT_ID: &'static str = "SPut";
 
const PREFIX_EXPR_STMT_ID: &'static str = "SExp";
 
const PREFIX_ASSIGNMENT_EXPR_ID: &'static str = "EAsi";
 
const PREFIX_BINDING_EXPR_ID: &'static str = "EBnd";
 
const PREFIX_CONDITIONAL_EXPR_ID: &'static str = "ECnd";
 
const PREFIX_BINARY_EXPR_ID: &'static str = "EBin";
 
const PREFIX_UNARY_EXPR_ID: &'static str = "EUna";
 
const PREFIX_INDEXING_EXPR_ID: &'static str = "EIdx";
 
const PREFIX_SLICING_EXPR_ID: &'static str = "ESli";
 
const PREFIX_SELECT_EXPR_ID: &'static str = "ESel";
 
const PREFIX_LITERAL_EXPR_ID: &'static str = "ELit";
 
const PREFIX_CAST_EXPR_ID: &'static str = "ECas";
 
const PREFIX_CALL_EXPR_ID: &'static str = "ECll";
 
const PREFIX_VARIABLE_EXPR_ID: &'static str = "EVar";
 

	
 
struct KV<'a> {
 
    buffer: &'a mut String,
 
    prefix: Option<(&'static str, i32)>,
 
    indent: usize,
 
    temp_key: &'a mut String,
 
    temp_val: &'a mut String,
 
}
 

	
 
impl<'a> KV<'a> {
 
    fn new(buffer: &'a mut String, temp_key: &'a mut String, temp_val: &'a mut String, indent: usize) -> Self {
 
        temp_key.clear();
 
        temp_val.clear();
 
        KV{
 
            buffer,
 
            prefix: None,
 
            indent,
 
            temp_key,
 
            temp_val
 
        }
 
    }
 

	
 
    fn with_id(mut self, prefix: &'static str, id: i32) -> Self {
 
        self.prefix = Some((prefix, id));
 
        self
 
    }
 

	
 
    fn with_s_key(self, key: &str) -> Self {
 
        self.temp_key.push_str(key);
 
        self
 
    }
 

	
 
    fn with_d_key<D: Display>(self, key: &D) -> Self {
 
        self.temp_key.push_str(&key.to_string());
 
        self
 
    }
 

	
 
    fn with_s_val(self, val: &str) -> Self {
 
        self.temp_val.push_str(val);
 
        self
 
    }
 

	
 
    fn with_disp_val<D: Display>(self, val: &D) -> Self {
 
        self.temp_val.push_str(&format!("{}", val));
 
        self
 
    }
 

	
 
    fn with_debug_val<D: Debug>(self, val: &D) -> Self {
 
        self.temp_val.push_str(&format!("{:?}", val));
 
        self
 
    }
 

	
 
    fn with_identifier_val(self, val: &Identifier) -> Self {
 
        self.temp_val.push_str(val.value.as_str());
 
        self
 
    }
 

	
 
    fn with_opt_disp_val<D: Display>(self, val: Option<&D>) -> Self {
 
        match val {
 
            Some(v) => { self.temp_val.push_str(&format!("Some({})", v)); },
 
            None => { self.temp_val.push_str("None"); }
 
        }
 
        self
 
    }
 

	
 
    fn with_opt_identifier_val(self, val: Option<&Identifier>) -> Self {
 
        match val {
 
            Some(v) => {
 
                self.temp_val.push_str("Some(");
 
                self.temp_val.push_str(v.value.as_str());
 
                self.temp_val.push(')');
 
            },
 
            None => {
 
                self.temp_val.push_str("None");
 
            }
 
        }
 
        self
 
    }
 

	
 
    fn with_custom_val<F: Fn(&mut String)>(mut self, val_fn: F) -> Self {
 
        val_fn(&mut self.temp_val);
 
        self
 
    }
 
}
 

	
 
impl<'a> Drop for KV<'a> {
 
    fn drop(&mut self) {
 
        // Prefix and indent
 
        if let Some((prefix, id)) = &self.prefix {
 
            self.buffer.push_str(&format!("{}[{:04}]", prefix, id));
 
        } else {
 
            self.buffer.push_str("           ");
 
        }
 

	
 
        for _ in 0..self.indent * INDENT {
 
            self.buffer.push(' ');
 
        }
 

	
 
        // Leading dash
 
        self.buffer.push_str("- ");
 

	
 
        // Key and value
 
        self.buffer.push_str(self.temp_key);
 
        if self.temp_val.is_empty() {
 
            self.buffer.push(':');
 
        } else {
 
            self.buffer.push_str(": ");
 
            self.buffer.push_str(&self.temp_val);
 
        }
 
        self.buffer.push('\n');
 
    }
 
}
 

	
 
pub(crate) struct ASTWriter {
 
    cur_definition: Option<DefinitionId>,
 
    buffer: String,
 
    temp1: String,
 
    temp2: String,
 
}
 

	
 
impl ASTWriter {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            cur_definition: None,
 
            buffer: String::with_capacity(4096),
 
            temp1: String::with_capacity(256),
 
            temp2: String::with_capacity(256),
 
        }
 
    }
 
    pub(crate) fn write_ast<W: IOWrite>(&mut self, w: &mut W, heap: &Heap) {
 
        for root_id in heap.protocol_descriptions.iter().map(|v| v.this) {
 
            self.write_module(heap, root_id);
 
            w.write_all(self.buffer.as_bytes()).expect("flush buffer");
 
            self.buffer.clear();
 
        }
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Top-level module writing
 
    //--------------------------------------------------------------------------
 

	
 
    fn write_module(&mut self, heap: &Heap, root_id: RootId) {
 
        self.kv(0).with_id(PREFIX_ROOT_ID, root_id.index)
 
            .with_s_key("Module");
 

	
 
        let root = &heap[root_id];
 
        self.kv(1).with_s_key("Pragmas");
 
        for pragma_id in &root.pragmas {
 
            self.write_pragma(heap, *pragma_id, 2);
 
        }
 

	
 
        self.kv(1).with_s_key("Imports");
 
        for import_id in &root.imports {
 
            self.write_import(heap, *import_id, 2);
 
        }
 

	
 
        self.kv(1).with_s_key("Definitions");
 
        for def_id in &root.definitions {
 
            self.write_definition(heap, *def_id, 2);
 
        }
 
    }
 

	
 
    fn write_pragma(&mut self, heap: &Heap, pragma_id: PragmaId, indent: usize) {
 
        match &heap[pragma_id] {
 
            Pragma::Version(pragma) => {
 
                self.kv(indent).with_id(PREFIX_PRAGMA_ID, pragma.this.index)
 
                    .with_s_key("PragmaVersion")
 
                    .with_disp_val(&pragma.version);
 
            },
 
            Pragma::Module(pragma) => {
 
                self.kv(indent).with_id(PREFIX_PRAGMA_ID, pragma.this.index)
 
                    .with_s_key("PragmaModule")
 
                    .with_identifier_val(&pragma.value);
 
            }
 
        }
 
    }
 

	
 
    fn write_import(&mut self, heap: &Heap, import_id: ImportId, indent: usize) {
 
        let import = &heap[import_id];
 
        let indent2 = indent + 1;
 

	
 
        match import {
 
            Import::Module(import) => {
 
                self.kv(indent).with_id(PREFIX_IMPORT_ID, import.this.index)
 
                    .with_s_key("ImportModule");
 

	
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&import.module);
 
                self.kv(indent2).with_s_key("Alias").with_identifier_val(&import.alias);
 
                self.kv(indent2).with_s_key("Target").with_disp_val(&import.module_id.index);
 
            },
 
            Import::Symbols(import) => {
 
                self.kv(indent).with_id(PREFIX_IMPORT_ID, import.this.index)
 
                    .with_s_key("ImportSymbol");
 

	
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&import.module);
 
                self.kv(indent2).with_s_key("Target").with_disp_val(&import.module_id.index);
 

	
 
                self.kv(indent2).with_s_key("Symbols");
 

	
 
                let indent3 = indent2 + 1;
 
                let indent4 = indent3 + 1;
 
                for symbol in &import.symbols {
 
                    self.kv(indent3).with_s_key("AliasedSymbol");
 
                    self.kv(indent4).with_s_key("Name").with_identifier_val(&symbol.name);
 
                    self.kv(indent4).with_s_key("Alias").with_opt_identifier_val(symbol.alias.as_ref());
 
                    self.kv(indent4).with_s_key("Definition").with_disp_val(&symbol.definition_id.index);
 
                }
 
            }
 
        }
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Top-level definition writing
 
    //--------------------------------------------------------------------------
 

	
 
    fn write_definition(&mut self, heap: &Heap, def_id: DefinitionId, indent: usize) {
 
        self.cur_definition = Some(def_id);
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 
        let indent4 = indent3 + 1;
 

	
 
        match &heap[def_id] {
 
            Definition::Struct(def) => {
 
                self.kv(indent).with_id(PREFIX_STRUCT_ID, def.this.0.index)
 
                    .with_s_key("DefinitionStruct");
 

	
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&def.identifier);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar").with_identifier_val(&poly_var_id);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Fields");
 
                for field in &def.fields {
 
                    self.kv(indent3).with_s_key("Field");
 
                    self.kv(indent4).with_s_key("Name")
 
                        .with_identifier_val(&field.field);
 
                    self.kv(indent4).with_s_key("Type")
 
                        .with_custom_val(|s| write_parser_type(s, heap, &field.parser_type));
 
                }
 
            },
 
            Definition::Enum(def) => {
 
                self.kv(indent).with_id(PREFIX_ENUM_ID, def.this.0.index)
 
                    .with_s_key("DefinitionEnum");
 

	
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&def.identifier);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar").with_identifier_val(&poly_var_id);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Variants");
 
                for variant in &def.variants {
 
                    self.kv(indent3).with_s_key("Variant");
 
                    self.kv(indent4).with_s_key("Name")
 
                        .with_identifier_val(&variant.identifier);
 
                    let variant_value = self.kv(indent4).with_s_key("Value");
 
                    match &variant.value {
 
                        EnumVariantValue::None => variant_value.with_s_val("None"),
 
                        EnumVariantValue::Integer(value) => variant_value.with_disp_val(value),
 
                    };
 
                }
 
            },
 
            Definition::Union(def) => {
 
                self.kv(indent).with_id(PREFIX_UNION_ID, def.this.0.index)
 
                    .with_s_key("DefinitionUnion");
 

	
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&def.identifier);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar").with_identifier_val(&poly_var_id);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Variants");
 
                for variant in &def.variants {
 
                    self.kv(indent3).with_s_key("Variant");
 
                    self.kv(indent4).with_s_key("Name")
 
                        .with_identifier_val(&variant.identifier);
 
                        
 
                    if variant.value.is_empty() {
 
                        self.kv(indent4).with_s_key("Value").with_s_val("None");
 
                    } else {
 
                        self.kv(indent4).with_s_key("Values");
 
                        for embedded in &variant.value {
 
                            self.kv(indent4+1).with_s_key("Value")
 
                                .with_custom_val(|v| write_parser_type(v, heap, embedded));
 
                        }
 
                    }
 
                }
 
            }
 
            Definition::Procedure(def) => {
 
                self.kv(indent).with_id(PREFIX_FUNCTION_ID, def.this.0.index)
 
                    .with_s_key("DefinitionFunction");
 

	
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&def.identifier);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar").with_identifier_val(&poly_var_id);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Kind").with_debug_val(&def.kind);
 
                if let Some(parser_type) = &def.return_type {
 
                    self.kv(indent2).with_s_key("ReturnParserType")
 
                        .with_custom_val(|s| write_parser_type(s, heap, parser_type));
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parameters");
 
                for variable_id in &def.parameters {
 
                    self.write_variable(heap, *variable_id, indent3);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, def.body.upcast(), indent3);
 
            },
 
        }
 
    }
 

	
 
    fn write_stmt(&mut self, heap: &Heap, stmt_id: StatementId, indent: usize) {
 
        let stmt = &heap[stmt_id];
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 

	
 
        match stmt {
 
            Statement::Block(stmt) => {
 
                self.kv(indent).with_id(PREFIX_BLOCK_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Block");
 
                self.kv(indent2).with_s_key("EndBlockID").with_disp_val(&stmt.end_block.0.index);
 
                self.kv(indent2).with_s_key("ScopeID").with_disp_val(&stmt.scope.index);
 

	
 
                self.kv(indent2).with_s_key("Statements");
 
                for stmt_id in &stmt.statements {
 
                    self.write_stmt(heap, *stmt_id, indent3);
 
                }
 
            },
 
            Statement::EndBlock(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDBLOCK_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndBlock");
 
                self.kv(indent2).with_s_key("StartBlockID").with_disp_val(&stmt.start_block.0.index);
 
            }
 
            Statement::Local(stmt) => {
 
                match stmt {
 
                    LocalStatement::Channel(stmt) => {
 
                        self.kv(indent).with_id(PREFIX_CHANNEL_STMT_ID, stmt.this.0.0.index)
 
                            .with_s_key("LocalChannel");
 

	
 
                        self.kv(indent2).with_s_key("From");
 
                        self.write_variable(heap, stmt.from, indent3);
 
                        self.kv(indent2).with_s_key("To");
 
                        self.write_variable(heap, stmt.to, indent3);
 
                        self.kv(indent2).with_s_key("Next").with_disp_val(&stmt.next.index);
 
                    },
 
                    LocalStatement::Memory(stmt) => {
 
                        self.kv(indent).with_id(PREFIX_MEM_STMT_ID, stmt.this.0.0.index)
 
                            .with_s_key("LocalMemory");
 

	
 
                        self.kv(indent2).with_s_key("Variable");
 
                        self.write_variable(heap, stmt.variable, indent3);
 
                        self.kv(indent2).with_s_key("InitialValue");
 
                        self.write_expr(heap, stmt.initial_expr.upcast(), indent3);
 
                        self.kv(indent2).with_s_key("Next").with_disp_val(&stmt.next.index);
 
                    }
 
                }
 
            },
 
            Statement::Labeled(stmt) => {
 
                self.kv(indent).with_id(PREFIX_LABELED_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Labeled");
 

	
 
                self.kv(indent2).with_s_key("Label").with_identifier_val(&stmt.label);
 
                self.kv(indent2).with_s_key("Statement");
 
                self.write_stmt(heap, stmt.body, indent3);
 
            },
 
            Statement::If(stmt) => {
 
                self.kv(indent).with_id(PREFIX_IF_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("If");
 

	
 
                self.kv(indent2).with_s_key("EndIf").with_disp_val(&stmt.end_if.0.index);
 

	
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, stmt.test, indent3);
 

	
 
                self.kv(indent2).with_s_key("TrueBody");
 
                self.write_stmt(heap, stmt.true_case.body, indent3);
 

	
 
                if let Some(false_body) = stmt.false_case {
 
                    self.kv(indent2).with_s_key("FalseBody");
 
                    self.write_stmt(heap, false_body.body, indent3);
 
                }
 
            },
 
            Statement::EndIf(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDIF_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndIf");
 
                self.kv(indent2).with_s_key("StartIf").with_disp_val(&stmt.start_if.0.index);
 
                self.kv(indent2).with_s_key("Next").with_disp_val(&stmt.next.index);
 
            },
 
            Statement::While(stmt) => {
 
                self.kv(indent).with_id(PREFIX_WHILE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("While");
 

	
 
                self.kv(indent2).with_s_key("EndWhile").with_disp_val(&stmt.end_while.0.index);
 
                self.kv(indent2).with_s_key("InSync")
 
                    .with_disp_val(&stmt.in_sync.0.index);
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, stmt.test, indent3);
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, stmt.body, indent3);
 
            },
 
            Statement::EndWhile(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDWHILE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndWhile");
 
                self.kv(indent2).with_s_key("StartWhile").with_disp_val(&stmt.start_while.0.index);
 
                self.kv(indent2).with_s_key("Next").with_disp_val(&stmt.next.index);
 
            },
 
            Statement::Break(stmt) => {
 
                self.kv(indent).with_id(PREFIX_BREAK_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Break");
 
                self.kv(indent2).with_s_key("Label")
 
                    .with_opt_identifier_val(stmt.label.as_ref());
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_disp_val(&stmt.target.0.index);
 
            },
 
            Statement::Continue(stmt) => {
 
                self.kv(indent).with_id(PREFIX_CONTINUE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Continue");
 
                self.kv(indent2).with_s_key("Label")
 
                    .with_opt_identifier_val(stmt.label.as_ref());
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_disp_val(&stmt.target.0.index);
 
            },
 
            Statement::Synchronous(stmt) => {
 
                self.kv(indent).with_id(PREFIX_SYNC_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Synchronous");
 
                self.kv(indent2).with_s_key("EndSync").with_disp_val(&stmt.end_sync.0.index);
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, stmt.body, indent3);
 
            },
 
            Statement::EndSynchronous(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDSYNC_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndSynchronous");
 
                self.kv(indent2).with_s_key("StartSync").with_disp_val(&stmt.start_sync.0.index);
 
                self.kv(indent2).with_s_key("Next").with_disp_val(&stmt.next.index);
 
            },
 
            Statement::Fork(stmt) => {
 
                self.kv(indent).with_id(PREFIX_FORK_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Fork");
 
                self.kv(indent2).with_s_key("EndFork").with_disp_val(&stmt.end_fork.0.index);
 
                self.kv(indent2).with_s_key("LeftBody");
 
                self.write_stmt(heap, stmt.left_body, indent3);
 

	
 
                if let Some(right_body_id) = stmt.right_body {
 
                    self.kv(indent2).with_s_key("RightBody");
 
                    self.write_stmt(heap, right_body_id, indent3);
 
                }
 
            },
 
            Statement::EndFork(stmt) => {
 
                self.kv(indent).with_id(PREFIX_END_FORK_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndFork");
 
                self.kv(indent2).with_s_key("StartFork").with_disp_val(&stmt.start_fork.0.index);
 
                self.kv(indent2).with_s_key("Next").with_disp_val(&stmt.next.index);
 
            },
 
            Statement::Select(stmt) => {
 
                self.kv(indent).with_id(PREFIX_SELECT_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Select");
 
                self.kv(indent2).with_s_key("EndSelect").with_disp_val(&stmt.end_select.0.index);
 
                self.kv(indent2).with_s_key("Cases");
 
                let indent3 = indent2 + 1;
 
                let indent4 = indent3 + 1;
 
                for case in &stmt.cases {
 
                    self.kv(indent3).with_s_key("Guard");
 
                    self.write_stmt(heap, case.guard, indent4);
 

	
 
                    self.kv(indent3).with_s_key("Block");
 
                    self.write_stmt(heap, case.body, indent4);
 
                }
 
                self.kv(indent2).with_s_key("Replacement");
 
                self.write_stmt(heap, stmt.next, indent3);
 
            },
 
            Statement::EndSelect(stmt) => {
 
                self.kv(indent).with_id(PREFIX_END_SELECT_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndSelect");
 
                self.kv(indent2).with_s_key("StartSelect").with_disp_val(&stmt.start_select.0.index);
 
                self.kv(indent2).with_s_key("Next").with_disp_val(&stmt.next.index);
 
            }
 
            Statement::Return(stmt) => {
 
                self.kv(indent).with_id(PREFIX_RETURN_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Return");
 
                self.kv(indent2).with_s_key("Expressions");
 
                for expr_id in &stmt.expressions {
 
                    self.write_expr(heap, *expr_id, indent3);
 
                }
 
            },
 
            Statement::Goto(stmt) => {
 
                self.kv(indent).with_id(PREFIX_GOTO_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Goto");
 
                self.kv(indent2).with_s_key("Label").with_identifier_val(&stmt.label);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_disp_val(&stmt.target.0.index);
 
            },
 
            Statement::New(stmt) => {
 
                self.kv(indent).with_id(PREFIX_NEW_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("New");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression.upcast(), indent3);
 
                self.kv(indent2).with_s_key("Next").with_disp_val(&stmt.next.index);
 
            },
 
            Statement::Expression(stmt) => {
 
                self.kv(indent).with_id(PREFIX_EXPR_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("ExpressionStatement");
 
                self.write_expr(heap, stmt.expression, indent2);
 
                self.kv(indent2).with_s_key("Next").with_disp_val(&stmt.next.index);
 
            }
 
        }
 
    }
 

	
 
    fn write_expr(&mut self, heap: &Heap, expr_id: ExpressionId, indent: usize) {
 
        let expr = &heap[expr_id];
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 

	
 
        match expr {
 
            Expression::Assignment(expr) => {
 
                self.kv(indent).with_id(PREFIX_ASSIGNMENT_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("AssignmentExpr");
 
                self.kv(indent2).with_s_key("TypeIndex").with_disp_val(&expr.type_index);
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Left");
 
                self.write_expr(heap, expr.left, indent3);
 
                self.kv(indent2).with_s_key("Right");
 
                self.write_expr(heap, expr.right, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
            },
 
            Expression::Binding(expr) => {
 
                self.kv(indent).with_id(PREFIX_BINARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("BindingExpr");
 
                self.kv(indent2).with_s_key("TypeIndex").with_disp_val(&expr.type_index);
 
                self.kv(indent2).with_s_key("BindToExpression");
 
                self.write_expr(heap, expr.bound_to, indent3);
 
                self.kv(indent2).with_s_key("BindFromExpression");
 
                self.write_expr(heap, expr.bound_from, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
            },
 
            Expression::Conditional(expr) => {
 
                self.kv(indent).with_id(PREFIX_CONDITIONAL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ConditionalExpr");
 
                self.kv(indent2).with_s_key("TypeIndex").with_disp_val(&expr.type_index);
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, expr.test, indent3);
 
                self.kv(indent2).with_s_key("TrueExpression");
 
                self.write_expr(heap, expr.true_expression, indent3);
 
                self.kv(indent2).with_s_key("FalseExpression");
 
                self.write_expr(heap, expr.false_expression, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
            },
 
            Expression::Binary(expr) => {
 
                self.kv(indent).with_id(PREFIX_BINARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("BinaryExpr");
 
                self.kv(indent2).with_s_key("TypeIndex").with_disp_val(&expr.type_index);
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Left");
 
                self.write_expr(heap, expr.left, indent3);
 
                self.kv(indent2).with_s_key("Right");
 
                self.write_expr(heap, expr.right, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
            },
 
            Expression::Unary(expr) => {
 
                self.kv(indent).with_id(PREFIX_UNARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("UnaryExpr");
 
                self.kv(indent2).with_s_key("TypeIndex").with_disp_val(&expr.type_index);
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Argument");
 
                self.write_expr(heap, expr.expression, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
            },
 
            Expression::Indexing(expr) => {
 
                self.kv(indent).with_id(PREFIX_INDEXING_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("IndexingExpr");
 
                self.kv(indent2).with_s_key("TypeIndex").with_disp_val(&expr.type_index);
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 
                self.kv(indent2).with_s_key("Index");
 
                self.write_expr(heap, expr.index, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
            },
 
            Expression::Slicing(expr) => {
 
                self.kv(indent).with_id(PREFIX_SLICING_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("SlicingExpr");
 
                self.kv(indent2).with_s_key("TypeIndex").with_disp_val(&expr.type_index);
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 
                self.kv(indent2).with_s_key("FromIndex");
 
                self.write_expr(heap, expr.from_index, indent3);
 
                self.kv(indent2).with_s_key("ToIndex");
 
                self.write_expr(heap, expr.to_index, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
            },
 
            Expression::Select(expr) => {
 
                self.kv(indent).with_id(PREFIX_SELECT_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("SelectExpr");
 
                self.kv(indent2).with_s_key("TypeIndex").with_disp_val(&expr.type_index);
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 

	
 
                match &expr.kind {
 
                    SelectKind::StructField(field_name) => {
 
                        self.kv(indent2).with_s_key("StructField").with_identifier_val(field_name);
 
                    },
 
                    SelectKind::TupleMember(member_index) => {
 
                        self.kv(indent2).with_s_key("TupleMember").with_disp_val(member_index);
 
                    },
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
            },
 
            Expression::Literal(expr) => {
 
                self.kv(indent).with_id(PREFIX_LITERAL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("LiteralExpr");
 

	
 
                self.kv(indent2).with_s_key("TypeIndex").with_disp_val(&expr.type_index);
 
                let val = self.kv(indent2).with_s_key("Value");
 
                match &expr.value {
 
                    Literal::Null => { val.with_s_val("null"); },
 
                    Literal::True => { val.with_s_val("true"); },
 
                    Literal::False => { val.with_s_val("false"); },
 
                    Literal::Character(data) => { val.with_disp_val(data); },
 
                    Literal::String(data) => {
 
                        // Stupid hack
 
                        let string = String::from(data.as_str());
 
                        val.with_disp_val(&string);
 
                    },
 
                    Literal::Integer(data) => { val.with_debug_val(data); },
 
                    Literal::Struct(data) => {
 
                        val.with_s_val("Struct");
 
                        let indent4 = indent3 + 1;
 

	
 
                        self.kv(indent3).with_s_key("ParserType")
 
                            .with_custom_val(|t| write_parser_type(t, heap, &data.parser_type));
 
                        self.kv(indent3).with_s_key("Definition").with_disp_val(&data.definition.index);
 

	
 
                        for field in &data.fields {
 
                            self.kv(indent3).with_s_key("Field");
 
                            self.kv(indent4).with_s_key("Name").with_identifier_val(&field.identifier);
 
                            self.kv(indent4).with_s_key("Index").with_disp_val(&field.field_idx);
 
                            self.kv(indent4).with_s_key("ParserType");
 
                            self.write_expr(heap, field.value, indent4 + 1);
 
                        }
 
                    },
 
                    Literal::Enum(data) => {
 
                        val.with_s_val("Enum");
 

	
 
                        self.kv(indent3).with_s_key("ParserType")
 
                            .with_custom_val(|t| write_parser_type(t, heap, &data.parser_type));
 
                        self.kv(indent3).with_s_key("Definition").with_disp_val(&data.definition.index);
 
                        self.kv(indent3).with_s_key("VariantIdx").with_disp_val(&data.variant_idx);
 
                    },
 
                    Literal::Union(data) => {
 
                        val.with_s_val("Union");
 
                        let indent4 = indent3 + 1;
 

	
 
                        self.kv(indent3).with_s_key("ParserType")
 
                            .with_custom_val(|t| write_parser_type(t, heap, &data.parser_type));
 
                        self.kv(indent3).with_s_key("Definition").with_disp_val(&data.definition.index);
 
                        self.kv(indent3).with_s_key("VariantIdx").with_disp_val(&data.variant_idx);
 

	
 
                        for value in &data.values {
 
                            self.kv(indent3).with_s_key("Value");
 
                            self.write_expr(heap, *value, indent4);
 
                        }
 
                    },
 
                    Literal::Array(data) => {
 
                        val.with_s_val("Array");
 
                        let indent4 = indent3 + 1;
 

	
 
                        self.kv(indent3).with_s_key("Elements");
 
                        for expr_id in data {
 
                            self.write_expr(heap, *expr_id, indent4);
 
                        }
 
                    },
 
                    Literal::Tuple(data) => {
 
                        val.with_s_val("Tuple");
 
                        let indent4 = indent3 + 1;
 
                        self.kv(indent3).with_s_key("Elements");
 
                        for expr_id in data {
 
                            self.write_expr(heap, *expr_id, indent4);
 
                        }
 
                    }
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
            },
 
            Expression::Cast(expr) => {
 
                self.kv(indent).with_id(PREFIX_CAST_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("CallExpr");
 
                self.kv(indent2).with_s_key("TypeIndex").with_disp_val(&expr.type_index);
 
                self.kv(indent2).with_s_key("ToType")
 
                    .with_custom_val(|t| write_parser_type(t, heap, &expr.to_type));
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
            }
 
            Expression::Call(expr) => {
 
                self.kv(indent).with_id(PREFIX_CALL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("CallExpr");
 

	
 
                self.kv(indent2).with_s_key("TypeIndex").with_disp_val(&expr.type_index);
 
                self.kv(indent2).with_s_key("Method").with_debug_val(&expr.method);
 
                if !expr.procedure.is_invalid() {
 
                    let definition = &heap[expr.procedure];
 
                    self.kv(indent2).with_s_key("BuiltIn").with_disp_val(&definition.builtin);
 
                    self.kv(indent2).with_s_key("Source").with_debug_val(&definition.source);
 
                    self.kv(indent2).with_s_key("Variant").with_debug_val(&definition.kind);
 
                    self.kv(indent2).with_s_key("MethodName").with_identifier_val(&definition.identifier);
 
                    self.kv(indent2).with_s_key("ParserType")
 
                        .with_custom_val(|t| write_parser_type(t, heap, &expr.parser_type));
 
                }
 

	
 
                // Arguments
 
                self.kv(indent2).with_s_key("Arguments");
 
                for arg_id in &expr.arguments {
 
                    self.write_expr(heap, *arg_id, indent3);
 
                }
 

	
 
                // Parent
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
            },
 
            Expression::Variable(expr) => {
 
                self.kv(indent).with_id(PREFIX_VARIABLE_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("VariableExpr");
 
                self.kv(indent2).with_s_key("TypeIndex").with_disp_val(&expr.type_index);
 
                self.kv(indent2).with_s_key("Name").with_identifier_val(&expr.identifier);
 
                self.kv(indent2).with_s_key("Definition")
 
                    .with_opt_disp_val(expr.declaration.as_ref().map(|v| &v.index));
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
            }
 
        }
 
    }
 

	
 
    fn write_variable(&mut self, heap: &Heap, variable_id: VariableId, indent: usize) {
 
        let var = &heap[variable_id];
 
        let indent2 = indent + 1;
 

	
 
        self.kv(indent).with_id(PREFIX_VARIABLE_ID, variable_id.index)
 
            .with_s_key("Variable");
 

	
 
        self.kv(indent2).with_s_key("Name").with_identifier_val(&var.identifier);
 
        self.kv(indent2).with_s_key("Kind").with_debug_val(&var.kind);
 
        self.kv(indent2).with_s_key("ParserType")
 
            .with_custom_val(|w| write_parser_type(w, heap, &var.parser_type));
 
        self.kv(indent2).with_s_key("RelativePos").with_disp_val(&var.relative_pos_in_parent);
 
        self.kv(indent2).with_s_key("UniqueScopeID").with_disp_val(&var.unique_id_in_scope);
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Printing Utilities
 
    //--------------------------------------------------------------------------
 

	
 
    fn kv(&mut self, indent: usize) -> KV {
 
        KV::new(&mut self.buffer, &mut self.temp1, &mut self.temp2, indent)
 
    }
 

	
 
    fn flush<W: IOWrite>(&mut self, w: &mut W) {
 
        w.write(self.buffer.as_bytes()).unwrap();
 
        self.buffer.clear()
 
    }
 
}
 

	
 
fn write_option<V: Display>(target: &mut String, value: Option<V>) {
 
    target.clear();
 
    match &value {
 
        Some(v) => target.push_str(&format!("Some({})", v)),
 
        None => target.push_str("None")
 
    };
 
}
 

	
 
fn write_parser_type(target: &mut String, heap: &Heap, t: &ParserType) {
 
    use ParserTypeVariant as PTV;
 

	
 
    if t.elements.is_empty() {
 
        target.push_str("no elements in ParserType (can happen due to compiler-inserted AST nodes)");
 
        return;
 
    }
 

	
 
    fn write_element(target: &mut String, heap: &Heap, t: &ParserType, mut element_idx: usize) -> usize {
 
        let element = &t.elements[element_idx];
 
        match &element.variant {
 
            PTV::Void => target.push_str("void"),
 
            PTV::InputOrOutput => {
 
                target.push_str("portlike<");
 
                element_idx = write_element(target, heap, t, element_idx + 1);
 
                target.push('>');
 
            },
 
            PTV::ArrayLike => {
 
                element_idx = write_element(target, heap, t, element_idx + 1);
 
                target.push_str("[???]");
 
            },
 
            PTV::IntegerLike => target.push_str("integerlike"),
 
            PTV::Message => { target.push_str(KW_TYPE_MESSAGE_STR); },
 
            PTV::Bool => { target.push_str(KW_TYPE_BOOL_STR); },
 
            PTV::UInt8 => { target.push_str(KW_TYPE_UINT8_STR); },
 
            PTV::UInt16 => { target.push_str(KW_TYPE_UINT16_STR); },
 
            PTV::UInt32 => { target.push_str(KW_TYPE_UINT32_STR); },
 
            PTV::UInt64 => { target.push_str(KW_TYPE_UINT64_STR); },
 
            PTV::SInt8 => { target.push_str(KW_TYPE_SINT8_STR); },
 
            PTV::SInt16 => { target.push_str(KW_TYPE_SINT16_STR); },
 
            PTV::SInt32 => { target.push_str(KW_TYPE_SINT32_STR); },
 
            PTV::SInt64 => { target.push_str(KW_TYPE_SINT64_STR); },
 
            PTV::Character => { target.push_str(KW_TYPE_CHAR_STR); },
 
            PTV::String => { target.push_str(KW_TYPE_STRING_STR); },
 
            PTV::IntegerLiteral => { target.push_str("int_literal"); },
 
            PTV::Inferred => { target.push_str(KW_TYPE_INFERRED_STR); },
 
            PTV::Array => {
 
                element_idx = write_element(target, heap, t, element_idx + 1);
 
                target.push_str("[]");
 
            },
 
            PTV::Input => {
 
                target.push_str(KW_TYPE_IN_PORT_STR);
 
                target.push('<');
 
                element_idx = write_element(target, heap, t, element_idx + 1);
 
                target.push('>');
 
            },
 
            PTV::Output => {
 
                target.push_str(KW_TYPE_OUT_PORT_STR);
 
                target.push('<');
 
                element_idx = write_element(target, heap, t, element_idx + 1);
 
                target.push('>');
 
            },
 
            PTV::Tuple(num_embedded) => {
 
                target.push('(');
 
                let num_embedded = *num_embedded;
 
                for embedded_idx in 0..num_embedded {
 
                    if embedded_idx != 0 {
 
                        target.push(',');
 
                    }
 
                    element_idx = write_element(target, heap, t, element_idx + 1);
 
                }
 
                target.push(')');
 
            }
 
            PTV::PolymorphicArgument(definition_id, arg_idx) => {
 
                let definition = &heap[*definition_id];
 
                let poly_var = &definition.poly_vars()[*arg_idx as usize].value;
 
                target.push_str(poly_var.as_str());
 
            },
 
            PTV::Definition(definition_id, num_embedded) => {
 
                let definition = &heap[*definition_id];
 
                let definition_ident = definition.identifier().value.as_str();
 
                target.push_str(definition_ident);
 

	
 
                let num_embedded = *num_embedded;
 
                if num_embedded != 0 {
 
                    target.push('<');
 
                    for embedded_idx in 0..num_embedded {
 
                        if embedded_idx != 0 {
 
                            target.push(',');
 
                        }
 
                        element_idx = write_element(target, heap, t, element_idx + 1);
 
                    }
 
                    target.push('>');
 
                }
 
            }
 
        }
 

	
 
        element_idx
 
    }
 

	
 
    write_element(target, heap, t, 0);
 
}
 

	
 
fn write_concrete_type(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType) {
 
    use ConcreteTypePart as CTP;
 

	
 
    fn write_concrete_part(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType, mut idx: usize) -> usize {
 
        if idx >= t.parts.len() {
 
            return idx;
 
        }
 

	
 
        match &t.parts[idx] {
 
            CTP::Void => target.push_str("void"),
 
            CTP::Message => target.push_str("msg"),
 
            CTP::Bool => target.push_str(KW_TYPE_BOOL_STR),
 
            CTP::UInt8 => target.push_str(KW_TYPE_UINT8_STR),
 
            CTP::UInt16 => target.push_str(KW_TYPE_UINT16_STR),
 
            CTP::UInt32 => target.push_str(KW_TYPE_UINT32_STR),
 
            CTP::UInt64 => target.push_str(KW_TYPE_UINT64_STR),
 
            CTP::SInt8 => target.push_str(KW_TYPE_SINT8_STR),
 
            CTP::SInt16 => target.push_str(KW_TYPE_SINT16_STR),
 
            CTP::SInt32 => target.push_str(KW_TYPE_SINT32_STR),
 
            CTP::SInt64 => target.push_str(KW_TYPE_SINT64_STR),
 
            CTP::Character => target.push_str(KW_TYPE_CHAR_STR),
 
            CTP::String => target.push_str(KW_TYPE_STRING_STR),
 
            CTP::Pointer => target.push('*'),
 
            CTP::Array => {
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push_str("[]");
 
            },
 
            CTP::Slice => {
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push_str("[..]");
 
            }
 
            CTP::Input => {
 
                target.push_str("in<");
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push('>');
 
            },
 
            CTP::Output => {
 
                target.push_str("out<");
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push('>')
 
            },
 
            CTP::Tuple(num_embedded) => {
 
                target.push('(');
 
                for idx_embedded in 0..*num_embedded {
 
                    if idx_embedded != 0 {
 
                        target.push_str(", ");
 
                    }
 
                    idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                }
 
                target.push(')');
 
            },
 
            CTP::Instance(definition_id, num_embedded) => {
 
                let identifier = heap[*definition_id].identifier();
 
                target.push_str(identifier.value.as_str());
 
                target.push('<');
 
                for idx_embedded in 0..*num_embedded {
 
                    if idx_embedded != 0 {
 
                        target.push_str(", ");
 
                    }
 
                    idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                }
 
                target.push('>');
 
            },
 
            CTP::Function(_, _) => todo!("AST printer for ConcreteTypePart::Function"),
 
            CTP::Component(_, _) => todo!("AST printer for ConcreteTypePart::Component"),
 
        }
 

	
 
        idx + 1
 
    }
 

	
 
    write_concrete_part(target, heap, def_id, t, 0);
 
}
 

	
 
fn write_expression_parent(target: &mut String, parent: &ExpressionParent) {
 
    use ExpressionParent as EP;
 

	
 
    *target = match parent {
 
        EP::None => String::from("None"),
 
        EP::Memory(id) => format!("MemStmt({})", id.0.0.index),
 
        EP::If(id) => format!("IfStmt({})", id.0.index),
 
        EP::While(id) => format!("WhileStmt({})", id.0.index),
 
        EP::Return(id) => format!("ReturnStmt({})", id.0.index),
 
        EP::New(id) => format!("NewStmt({})", id.0.index),
 
        EP::ExpressionStmt(id) => format!("ExprStmt({})", id.0.index),
 
        EP::Expression(id, idx) => format!("Expr({}, {})", id.index, idx)
 
    };
 
}
 
\ No newline at end of file
src/protocol/mod.rs
Show inline comments
 
mod arena;
 
pub(crate) mod eval;
 
pub(crate) mod input_source;
 
mod parser;
 
#[cfg(test)] mod tests;
 

	
 
pub(crate) mod ast;
 
pub(crate) mod ast_printer;
 

	
 
use std::sync::Mutex;
 

	
 
use crate::collections::{StringPool, StringRef};
 
use crate::protocol::ast::*;
 
pub use crate::protocol::ast::*;
 
use crate::protocol::eval::*;
 
use crate::protocol::input_source::*;
 
use crate::protocol::parser::*;
 
use crate::protocol::type_table::*;
 

	
 
pub use parser::type_table::TypeId;
 

	
 
/// A protocol description module
 
pub struct Module {
 
    pub(crate) source: InputSource,
 
    pub(crate) root_id: RootId,
 
    pub(crate) name: Option<StringRef<'static>>,
 
}
 
/// Description of a protocol object, used to configure new connectors.
 
#[repr(C)]
 
pub struct ProtocolDescription {
 
    pub(crate) modules: Vec<Module>,
 
    pub(crate) heap: Heap,
 
    pub(crate) types: TypeTable,
 
    pub(crate) pool: Mutex<StringPool>,
 
}
 
#[derive(Debug, Clone)]
 
pub(crate) struct ComponentState {
 
    pub(crate) prompt: Prompt,
 
}
 

	
 
#[derive(Debug)]
 
pub enum ComponentCreationError {
 
    ModuleDoesntExist,
 
    DefinitionDoesntExist,
 
    DefinitionNotComponent,
 
    InvalidNumArguments,
 
    InvalidArgumentType(usize),
 
    UnownedPort,
 
    InSync,
 
}
 

	
 
impl ProtocolDescription {
 
    pub fn parse(buffer: &[u8]) -> Result<Self, String> {
 
        let source = InputSource::new(String::new(), Vec::from(buffer));
 
        let mut parser = Parser::new();
 
        let mut parser = Parser::new()?;
 
        parser.feed(source).expect("failed to feed source");
 
        
 
        if let Err(err) = parser.parse() {
 
            println!("ERROR:\n{}", err);
 
            return Err(format!("{}", err))
 
        }
 

	
 
        debug_assert_eq!(parser.modules.len(), 1, "only supporting one module here for now");
 
        let modules: Vec<Module> = parser.modules.into_iter()
 
            .map(|module| Module{
 
                source: module.source,
 
                root_id: module.root_id,
 
                name: module.name.map(|(_, name)| name)
 
            })
 
            .collect();
 

	
 
        return Ok(ProtocolDescription {
 
            modules,
 
            heap: parser.heap,
 
            types: parser.type_table,
 
            pool: Mutex::new(parser.string_pool),
 
        });
 
    }
 

	
 
    pub(crate) fn new_component(
 
        &self, module_name: &[u8], identifier: &[u8], arguments: ValueGroup
 
    ) -> Result<Prompt, ComponentCreationError> {
 
        // Find the module in which the definition can be found
 
        let module_root = self.lookup_module_root(module_name);
 
        if module_root.is_none() {
 
            return Err(ComponentCreationError::ModuleDoesntExist);
 
        }
 
        let module_root = module_root.unwrap();
 

	
 
        let root = &self.heap[module_root];
 
        let definition_id = root.get_definition_ident(&self.heap, identifier);
 
        if definition_id.is_none() {
 
            return Err(ComponentCreationError::DefinitionDoesntExist);
 
        }
 
        let definition_id = definition_id.unwrap();
 

	
 
        let ast_definition = &self.heap[definition_id];
 
        if !ast_definition.is_procedure() {
 
            return Err(ComponentCreationError::DefinitionNotComponent);
 
        }
 

	
 
        // Make sure that the types of the provided value group matches that of
 
        // the expected types.
 
        let ast_definition = ast_definition.as_procedure();
 
        if !ast_definition.poly_vars.is_empty() || ast_definition.kind == ProcedureKind::Function {
 
            return Err(ComponentCreationError::DefinitionNotComponent);
 
        }
 

	
 
        // - check number of arguments by retrieving the one instantiated
 
        //   monomorph
 
        let concrete_type = ConcreteType{ parts: vec![ConcreteTypePart::Component(ast_definition.this, 0)] };
 
        let procedure_type_id = self.types.get_procedure_monomorph_type_id(&definition_id, &concrete_type.parts).unwrap();
 
        let procedure_monomorph_index = self.types.get_monomorph(procedure_type_id).variant.as_procedure().monomorph_index;
 
        let monomorph_info = &ast_definition.monomorphs[procedure_monomorph_index as usize];
 
        if monomorph_info.argument_types.len() != arguments.values.len() {
 
            return Err(ComponentCreationError::InvalidNumArguments);
 
        }
 

	
 
        // - for each argument try to make sure the types match
 
        for arg_idx in 0..arguments.values.len() {
 
            let expected_type_id = monomorph_info.argument_types[arg_idx];
 
            let expected_type = &self.types.get_monomorph(expected_type_id).concrete_type;
 
            let provided_value = &arguments.values[arg_idx];
 
            if !self.verify_same_type(expected_type, 0, &arguments, provided_value) {
 
                return Err(ComponentCreationError::InvalidArgumentType(arg_idx));
 
            }
 
        }
 

	
 
        // By now we're sure that all of the arguments are correct. So create
 
        // the connector.
 
        return Ok(Prompt::new(&self.types, &self.heap, ast_definition.this, procedure_type_id, arguments));
 
    }
 

	
 
    fn lookup_module_root(&self, module_name: &[u8]) -> Option<RootId> {
 
        for module in self.modules.iter() {
 
            match &module.name {
 
                Some(name) => if name.as_bytes() == module_name {
 
                    return Some(module.root_id);
 
                },
 
                None => if module_name.is_empty() {
 
                    return Some(module.root_id);
 
                }
 
            }
 
        }
 

	
 
        return None;
 
    }
 

	
 
    fn verify_same_type(&self, expected: &ConcreteType, expected_idx: usize, arguments: &ValueGroup, argument: &Value) -> bool {
 
        use ConcreteTypePart as CTP;
 

	
 
        match &expected.parts[expected_idx] {
 
            CTP::Void | CTP::Message | CTP::Slice | CTP::Pointer | CTP::Function(_, _) | CTP::Component(_, _) => unreachable!(),
 
            CTP::Bool => if let Value::Bool(_) = argument { true } else { false },
 
            CTP::UInt8 => if let Value::UInt8(_) = argument { true } else { false },
 
            CTP::UInt16 => if let Value::UInt16(_) = argument { true } else { false },
 
            CTP::UInt32 => if let Value::UInt32(_) = argument { true } else { false },
 
            CTP::UInt64 => if let Value::UInt64(_) = argument { true } else { false },
 
            CTP::SInt8 => if let Value::SInt8(_) = argument { true } else { false },
 
            CTP::SInt16 => if let Value::SInt16(_) = argument { true } else { false },
 
            CTP::SInt32 => if let Value::SInt32(_) = argument { true } else { false },
 
            CTP::SInt64 => if let Value::SInt64(_) = argument { true } else { false },
 
            CTP::Character => if let Value::Char(_) = argument { true } else { false },
 
            CTP::String => {
 
                // Match outer string type and embedded character types
 
                if let Value::String(heap_pos) = argument {
 
                    for element in &arguments.regions[*heap_pos as usize] {
 
                        if let Value::Char(_) = element {} else {
 
                            return false;
 
                        }
 
                    }
 
                } else {
 
                    return false;
 
                }
 

	
 
                return true;
 
            },
 
            CTP::Array => {
 
                if let Value::Array(heap_pos) = argument {
 
                    let heap_pos = *heap_pos;
 
                    for element in &arguments.regions[heap_pos as usize] {
 
                        if !self.verify_same_type(expected, expected_idx + 1, arguments, element) {
 
                            return false;
 
                        }
 
                    }
 
                    return true;
 
                } else {
 
                    return false;
 
                }
 
            },
 
            CTP::Input => if let Value::Input(_) = argument { true } else { false },
 
            CTP::Output => if let Value::Output(_) = argument { true } else { false },
 
            CTP::Tuple(_) => todo!("implement full type checking on user-supplied arguments"),
 
            CTP::Instance(definition_id, _num_embedded) => {
 
                let definition = self.types.get_base_definition(definition_id).unwrap();
 
                match &definition.definition {
 
                    DefinedTypeVariant::Enum(definition) => {
 
                        if let Value::Enum(variant_value) = argument {
 
                            let is_valid = definition.variants.iter()
 
                                .any(|v| v.value == *variant_value);
 
                            return is_valid;
 
                        }
 
                    },
 
                    _ => todo!("implement full type checking on user-supplied arguments"),
 
                }
 

	
 
                return false;
 
            },
 
        }
 
    }
 
}
 

	
 
pub trait RunContext {
 
    fn performed_put(&mut self, port: PortId) -> bool;
 
    fn performed_get(&mut self, port: PortId) -> Option<ValueGroup>; // None if still waiting on message
 
    fn fires(&mut self, port: PortId) -> Option<Value>; // None if not yet branched
 
    fn performed_fork(&mut self) -> Option<bool>; // None if not yet forked
 
    fn created_channel(&mut self) -> Option<(Value, Value)>; // None if not yet prepared
 
    fn performed_select_wait(&mut self) -> Option<u32>; // None if not yet notified runtime of select blocker
 
}
 

	
 
pub struct ProtocolDescriptionBuilder {
 
    parser: Parser,
 
}
 

	
 
impl ProtocolDescriptionBuilder {
 
    pub fn new() -> Self {
 
        return Self{
 
            parser: Parser::new(),
 
        }
 
    pub fn new() -> Result<Self, String> {
 
        return Ok(Self{
 
            parser: Parser::new()?,
 
        })
 
    }
 

	
 
    pub fn add(&mut self, filename: String, buffer: Vec<u8>) -> Result<(), ParseError> {
 
        let input = InputSource::new(filename, buffer);
 
        self.parser.feed(input)?;
 

	
 
        return Ok(())
 
    }
 

	
 
    pub fn compile(mut self) -> Result<ProtocolDescription, ParseError> {
 
        self.parser.parse()?;
 

	
 
        let modules: Vec<Module> = self.parser.modules.into_iter()
 
            .map(|module| Module{
 
                source: module.source,
 
                root_id: module.root_id,
 
                name: module.name.map(|(_, name)| name)
 
            })
 
            .collect();
 

	
 
        return Ok(ProtocolDescription {
 
            modules,
 
            heap: self.parser.heap,
 
            types: self.parser.type_table,
 
            pool: Mutex::new(self.parser.string_pool),
 
        });
 
    }
 
}
src/protocol/parser/mod.rs
Show inline comments
 
#[macro_use] mod visitor;
 
pub(crate) mod symbol_table;
 
pub(crate) mod type_table;
 
pub(crate) mod tokens;
 
pub(crate) mod token_parsing;
 
pub(crate) mod pass_tokenizer;
 
pub(crate) mod pass_symbols;
 
pub(crate) mod pass_imports;
 
pub(crate) mod pass_definitions;
 
pub(crate) mod pass_definitions_types;
 
pub(crate) mod pass_validation_linking;
 
pub(crate) mod pass_rewriting;
 
pub(crate) mod pass_typing;
 
pub(crate) mod pass_stack_size;
 

	
 
use tokens::*;
 
use crate::collections::*;
 
use visitor::Visitor;
 
use pass_tokenizer::PassTokenizer;
 
use pass_symbols::PassSymbols;
 
use pass_imports::PassImport;
 
use pass_definitions::PassDefinitions;
 
use pass_validation_linking::PassValidationLinking;
 
use pass_typing::{PassTyping, ResolveQueue};
 
use pass_rewriting::PassRewriting;
 
use pass_stack_size::PassStackSize;
 
use symbol_table::*;
 
use type_table::*;
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::input_source::*;
 

	
 
use crate::protocol::ast_printer::ASTWriter;
 
use crate::protocol::parser::type_table::PolymorphicVariable;
 

	
 
const REOWOLF_PATH_ENV: &'static str = "REOWOLF_ROOT"; // first lookup reowolf path
 
const REOWOLF_PATH_DIR: &'static str = "std"; // then try folder in current working directory
 

	
 
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
 
pub enum ModuleCompilationPhase {
 
    Tokenized,              // source is tokenized
 
    SymbolsScanned,         // all definitions are linked to their type class
 
    ImportsResolved,        // all imports are added to the symbol table
 
    DefinitionsParsed,      // produced the AST for the entire module
 
    TypesAddedToTable,      // added all definitions to the type table
 
    ValidatedAndLinked,     // AST is traversed and has linked the required AST nodes
 
    Typed,                  // Type inference and checking has been performed
 
    Rewritten,              // Special AST nodes are rewritten into regular AST nodes
 
    // When we continue with the compiler:
 
    // StackSize
 
}
 

	
 
pub struct Module {
 
    // Buffers
 
    pub source: InputSource,
 
    pub tokens: TokenBuffer,
 
    // Identifiers
 
    pub root_id: RootId,
 
    pub name: Option<(PragmaId, StringRef<'static>)>,
 
    pub version: Option<(PragmaId, i64)>,
 
    pub phase: ModuleCompilationPhase,
 
}
 

	
 
pub struct TargetArch {
 
    pub void_type_id: TypeId,
 
    pub message_type_id: TypeId,
 
    pub bool_type_id: TypeId,
 
    pub uint8_type_id: TypeId,
 
    pub uint16_type_id: TypeId,
 
    pub uint32_type_id: TypeId,
 
    pub uint64_type_id: TypeId,
 
    pub sint8_type_id: TypeId,
 
    pub sint16_type_id: TypeId,
 
    pub sint32_type_id: TypeId,
 
    pub sint64_type_id: TypeId,
 
    pub char_type_id: TypeId,
 
    pub string_type_id: TypeId,
 
    pub array_type_id: TypeId,
 
    pub slice_type_id: TypeId,
 
    pub input_type_id: TypeId,
 
    pub output_type_id: TypeId,
 
    pub pointer_type_id: TypeId,
 
}
 

	
 
impl TargetArch {
 
    fn new() -> Self {
 
        return Self{
 
            void_type_id: TypeId::new_invalid(),
 
            bool_type_id: TypeId::new_invalid(),
 
            message_type_id: TypeId::new_invalid(),
 
            uint8_type_id: TypeId::new_invalid(),
 
            uint16_type_id: TypeId::new_invalid(),
 
            uint32_type_id: TypeId::new_invalid(),
 
            uint64_type_id: TypeId::new_invalid(),
 
            sint8_type_id: TypeId::new_invalid(),
 
            sint16_type_id: TypeId::new_invalid(),
 
            sint32_type_id: TypeId::new_invalid(),
 
            sint64_type_id: TypeId::new_invalid(),
 
            char_type_id: TypeId::new_invalid(),
 
            string_type_id: TypeId::new_invalid(),
 
            array_type_id: TypeId::new_invalid(),
 
            slice_type_id: TypeId::new_invalid(),
 
            input_type_id: TypeId::new_invalid(),
 
            output_type_id: TypeId::new_invalid(),
 
            pointer_type_id: TypeId::new_invalid(),
 
        }
 
    }
 
}
 

	
 
pub struct PassCtx<'a> {
 
    heap: &'a mut Heap,
 
    symbols: &'a mut SymbolTable,
 
    pool: &'a mut StringPool,
 
    arch: &'a TargetArch,
 
}
 

	
 
pub struct Parser {
 
    // Storage of all information created/gathered during compilation.
 
    pub(crate) heap: Heap,
 
    pub(crate) string_pool: StringPool, // Do not deallocate, holds all strings
 
    pub(crate) modules: Vec<Module>,
 
    pub(crate) symbol_table: SymbolTable,
 
    pub(crate) type_table: TypeTable,
 
    // Compiler passes, used as little state machine that keep their memory
 
    // around.
 
    pass_tokenizer: PassTokenizer,
 
    pass_symbols: PassSymbols,
 
    pass_import: PassImport,
 
    pass_definitions: PassDefinitions,
 
    pass_validation: PassValidationLinking,
 
    pass_typing: PassTyping,
 
    pass_rewriting: PassRewriting,
 
    pass_stack_size: PassStackSize,
 
    // Compiler options
 
    pub write_ast_to: Option<String>,
 
    pub(crate) arch: TargetArch,
 
}
 

	
 
impl Parser {
 
    pub fn new() -> Self {
 
    pub fn new() -> Result<Self, String> {
 
        let mut parser = Parser{
 
            heap: Heap::new(),
 
            string_pool: StringPool::new(),
 
            modules: Vec::new(),
 
            symbol_table: SymbolTable::new(),
 
            type_table: TypeTable::new(),
 
            pass_tokenizer: PassTokenizer::new(),
 
            pass_symbols: PassSymbols::new(),
 
            pass_import: PassImport::new(),
 
            pass_definitions: PassDefinitions::new(),
 
            pass_validation: PassValidationLinking::new(),
 
            pass_typing: PassTyping::new(),
 
            pass_rewriting: PassRewriting::new(),
 
            pass_stack_size: PassStackSize::new(),
 
            write_ast_to: None,
 
            arch: TargetArch::new(),
 
        };
 

	
 
        parser.symbol_table.insert_scope(None, SymbolScope::Global);
 

	
 
        // Insert builtin types
 
        // TODO: At some point use correct values for size/alignment
 
        parser.arch.void_type_id    = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::Void], false, 0, 1);
 
        parser.arch.message_type_id = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::Message], false, 24, 8);
 
        parser.arch.bool_type_id    = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::Bool], false, 1, 1);
 
        parser.arch.uint8_type_id   = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::UInt8], false, 1, 1);
 
        parser.arch.uint16_type_id  = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::UInt16], false, 2, 2);
 
        parser.arch.uint32_type_id  = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::UInt32], false, 4, 4);
 
        parser.arch.uint64_type_id  = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::UInt64], false, 8, 8);
 
        parser.arch.sint8_type_id   = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::SInt8], false, 1, 1);
 
        parser.arch.sint16_type_id  = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::SInt16], false, 2, 2);
 
        parser.arch.sint32_type_id  = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::SInt32], false, 4, 4);
 
        parser.arch.sint64_type_id  = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::SInt64], false, 8, 8);
 
        parser.arch.char_type_id    = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::Character], false, 4, 4);
 
        parser.arch.string_type_id  = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::String], false, 24, 8);
 
        parser.arch.array_type_id   = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::Array, ConcreteTypePart::Void], true, 24, 8);
 
        parser.arch.slice_type_id   = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::Slice, ConcreteTypePart::Void], true, 16, 4);
 
        parser.arch.input_type_id   = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::Input, ConcreteTypePart::Void], true, 8, 8);
 
        parser.arch.output_type_id  = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::Output, ConcreteTypePart::Void], true, 8, 8);
 
        parser.arch.pointer_type_id = insert_builtin_type(&mut parser.type_table, vec![ConcreteTypePart::Pointer, ConcreteTypePart::Void], true, 8, 8);
 

	
 
        // Insert builtin functions
 
        fn quick_type(variants: &[ParserTypeVariant]) -> ParserType {
 
            let mut t = ParserType{ elements: Vec::with_capacity(variants.len()), full_span: InputSpan::new() };
 
            for variant in variants {
 
                t.elements.push(ParserTypeElement{ element_span: InputSpan::new(), variant: variant.clone() });
 
            }
 
            t
 
        }
 
        parser.feed_standard_library()?;
 

	
 
        use ParserTypeVariant as PTV;
 
        insert_builtin_function(&mut parser, "get", &["T"], |id| (
 
            vec![
 
                ("input", quick_type(&[PTV::Input, PTV::PolymorphicArgument(id.upcast(), 0)]))
 
            ],
 
            quick_type(&[PTV::PolymorphicArgument(id.upcast(), 0)])
 
        ));
 
        insert_builtin_function(&mut parser, "put", &["T"], |id| (
 
            vec![
 
                ("output", quick_type(&[PTV::Output, PTV::PolymorphicArgument(id.upcast(), 0)])),
 
                ("value", quick_type(&[PTV::PolymorphicArgument(id.upcast(), 0)])),
 
            ],
 
            quick_type(&[PTV::Void])
 
        ));
 
        insert_builtin_function(&mut parser, "fires", &["T"], |id| (
 
            vec![
 
                ("port", quick_type(&[PTV::InputOrOutput, PTV::PolymorphicArgument(id.upcast(), 0)]))
 
            ],
 
            quick_type(&[PTV::Bool])
 
        ));
 
        insert_builtin_function(&mut parser, "create", &["T"], |id| (
 
            vec![
 
                ("length", quick_type(&[PTV::IntegerLike]))
 
            ],
 
            quick_type(&[PTV::ArrayLike, PTV::PolymorphicArgument(id.upcast(), 0)])
 
        ));
 
        insert_builtin_function(&mut parser, "length", &["T"], |id| (
 
            vec![
 
                ("array", quick_type(&[PTV::ArrayLike, PTV::PolymorphicArgument(id.upcast(), 0)]))
 
            ],
 
            quick_type(&[PTV::UInt32]) // TODO: @PtrInt
 
        ));
 
        insert_builtin_function(&mut parser, "assert", &[], |_id| (
 
            vec![
 
                ("condition", quick_type(&[PTV::Bool])),
 
            ],
 
            quick_type(&[PTV::Void])
 
        ));
 
        insert_builtin_function(&mut parser, "print", &[], |_id| (
 
            vec![
 
                ("message", quick_type(&[PTV::String])),
 
            ],
 
            quick_type(&[PTV::Void])
 
        ));
 

	
 
        parser
 
        return Ok(parser)
 
    }
 

	
 
    pub fn feed(&mut self, mut source: InputSource) -> Result<(), ParseError> {
 
        let mut token_buffer = TokenBuffer::new();
 
        self.pass_tokenizer.tokenize(&mut source, &mut token_buffer)?;
 

	
 
        let module = Module{
 
            source,
 
            tokens: token_buffer,
 
            root_id: RootId::new_invalid(),
 
            name: None,
 
            version: None,
 
            phase: ModuleCompilationPhase::Tokenized,
 
        };
 
        self.modules.push(module);
 

	
 
        Ok(())
 
    }
 

	
 
    pub fn parse(&mut self) -> Result<(), ParseError> {
 
        let mut pass_ctx = PassCtx{
 
            heap: &mut self.heap,
 
            symbols: &mut self.symbol_table,
 
            pool: &mut self.string_pool,
 
            arch: &self.arch,
 
        };
 

	
 
        // Advance all modules to the phase where all symbols are scanned
 
        for module_idx in 0..self.modules.len() {
 
            self.pass_symbols.parse(&mut self.modules, module_idx, &mut pass_ctx)?;
 
        }
 

	
 
        // With all symbols scanned, perform further compilation until we can
 
        // add all base types to the type table.
 
        for module_idx in 0..self.modules.len() {
 
            self.pass_import.parse(&mut self.modules, module_idx, &mut pass_ctx)?;
 
            self.pass_definitions.parse(&mut self.modules, module_idx, &mut pass_ctx)?;
 
        }
 

	
 
        // Add every known type to the type table
 
        self.type_table.build_base_types(&mut self.modules, &mut pass_ctx)?;
 

	
 
        // Continue compilation with the remaining phases now that the types
 
        // are all in the type table
 
        for module_idx in 0..self.modules.len() {
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                modules: &mut self.modules,
 
                module_idx,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
                arch: &self.arch,
 
            };
 
            self.pass_validation.visit_module(&mut ctx)?;
 
        }
 

	
 
        // Perform typechecking on all modules
 
        let mut queue = ResolveQueue::new();
 
        for module_idx in 0..self.modules.len() {
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                modules: &mut self.modules,
 
                module_idx,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
                arch: &self.arch,
 
            };
 
            self.pass_typing.queue_module_definitions(&mut ctx, &mut queue);
 
        };
 
        while !queue.is_empty() {
 
            let top = queue.pop_front().unwrap();
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                modules: &mut self.modules,
 
                module_idx: top.root_id.index as usize,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
                arch: &self.arch,
 
            };
 
            self.pass_typing.handle_module_definition(&mut ctx, &mut queue, top)?;
 
        }
 

	
 
        // Rewrite nodes in tree, then prepare for execution of code
 
        for module_idx in 0..self.modules.len() {
 
            self.modules[module_idx].phase = ModuleCompilationPhase::Typed;
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                modules: &mut self.modules,
 
                module_idx,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
                arch: &self.arch,
 
            };
 
            self.pass_rewriting.visit_module(&mut ctx)?;
 
            self.pass_stack_size.visit_module(&mut ctx)?;
 
        }
 

	
 
        // Write out desired information
 
        if let Some(filename) = &self.write_ast_to {
 
            let mut writer = ASTWriter::new();
 
            let mut file = std::fs::File::create(std::path::Path::new(filename)).unwrap();
 
            writer.write_ast(&mut file, &self.heap);
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    /// Tries to find the standard library and add the files for parsing.
 
    fn feed_standard_library(&mut self) -> Result<(), String> {
 
        use std::env;
 
        use std::path::{Path, PathBuf};
 
        use std::fs;
 

	
 
        const FILES: [&'static str; 1] = [
 
            "std.global.pdl",
 
        ];
 

	
 
        // Determine base directory
 
        let (base_path, from_env) = if let Ok(path) = env::var(REOWOLF_PATH_ENV) {
 
            // Path variable is set
 
            (path, true)
 
        } else {
 
            let mut path = String::with_capacity(REOWOLF_PATH_DIR.len() + 2);
 
            path.push_str("./");
 
            path.push_str(REOWOLF_PATH_DIR);
 
            (path, false)
 
        };
 

	
 
        // Make sure directory exists
 
        let path = Path::new(&base_path);
 
        if !path.exists() {
 
            return Err(format!("std lib root directory '{}' does not exist", base_path));
 
        }
 

	
 
        // Try to load all standard library files. We might need a more unified
 
        // way to do this in the future (i.e. a "std" package, containing all
 
        // of the modules)
 
        let mut file_path = PathBuf::new();
 
        for file in FILES {
 
            file_path.push(path);
 
            file_path.push(file);
 

	
 
            let source = fs::read(file_path.as_path());
 
            if let Err(err) = source {
 
                return Err(format!(
 
                    "failed to read std lib file '{}' in root directory '{}', because: {}",
 
                    file, base_path, err
 
                ));
 
            }
 

	
 
            let source = source.unwrap();
 
            let input_source = InputSource::new(file.to_string(), source);
 

	
 
            if let Err(err) = self.feed(input_source) {
 
                // A bit of a hack, but shouldn't really happen anyway: the
 
                // compiler should ship with a decent standard library (at some
 
                // point)
 
                return Err(format!("{}", err));
 
            }
 
        }
 

	
 
        return Ok(())
 
    }
 
}
 

	
 
fn insert_builtin_type(type_table: &mut TypeTable, parts: Vec<ConcreteTypePart>, has_poly_var: bool, size: usize, alignment: usize) -> TypeId {
 
    const POLY_VARS: [PolymorphicVariable; 1] = [PolymorphicVariable{
 
        identifier: Identifier::new_empty(InputSpan::new()),
 
        is_in_use: false,
 
    }];
 

	
 
    let concrete_type = ConcreteType{ parts };
 
    let poly_var = if has_poly_var {
 
        POLY_VARS.as_slice()
 
    } else {
 
        &[]
 
    };
 

	
 
    return type_table.add_builtin_data_type(concrete_type, poly_var, size, alignment);
 
}
 

	
 
// Note: args and return type need to be a function because we need to know the function ID.
 
fn insert_builtin_function<T: Fn(ProcedureDefinitionId) -> (Vec<(&'static str, ParserType)>, ParserType)> (
 
    p: &mut Parser, func_name: &str, polymorphic: &[&str], arg_and_return_fn: T
 
) {
 
    // Insert into AST (to get an ID), also prepare the polymorphic variables
 
    // we need later for the type table
 
    let mut ast_poly_vars = Vec::with_capacity(polymorphic.len());
 
    let mut type_poly_vars = Vec::with_capacity(polymorphic.len());
 
    for poly_var in polymorphic {
 
        let identifier = Identifier{ span: InputSpan::new(), value: p.string_pool.intern(poly_var.as_bytes()) } ;
 
        ast_poly_vars.push(identifier.clone());
 
        type_poly_vars.push(PolymorphicVariable{ identifier, is_in_use: false });
 
    }
 

	
 
    let func_ident_ref = p.string_pool.intern(func_name.as_bytes());
 
    let procedure_id = p.heap.alloc_procedure_definition(|this| ProcedureDefinition {
 
        this,
 
        defined_in: RootId::new_invalid(),
 
        builtin: true,
 
        kind: ProcedureKind::Function,
 
        span: InputSpan::new(),
 
        identifier: Identifier{ span: InputSpan::new(), value: func_ident_ref.clone() },
 
        poly_vars: ast_poly_vars,
 
        return_type: None,
 
        parameters: Vec::new(),
 
        scope: ScopeId::new_invalid(),
 
        body: BlockStatementId::new_invalid(),
 
        monomorphs: Vec::new(),
 
    });
 

	
 
    // Modify AST with more information about the procedure
 
    let (arguments, return_type) = arg_and_return_fn(procedure_id);
 

	
 
    let mut parameters = Vec::with_capacity(arguments.len());
 
    for (arg_name, arg_type) in arguments {
 
        let identifier = Identifier{ span: InputSpan::new(), value: p.string_pool.intern(arg_name.as_bytes()) };
 
        let param_id = p.heap.alloc_variable(|this| Variable{
 
            this,
 
            kind: VariableKind::Parameter,
 
            parser_type: arg_type.clone(),
 
            identifier,
 
            relative_pos_in_parent: 0,
 
            unique_id_in_scope: 0
 
        });
 
        parameters.push(param_id);
 
    }
 

	
 
    let func = &mut p.heap[procedure_id];
 
    func.parameters = parameters;
 
    func.return_type = Some(return_type);
 

	
 
    // Insert into symbol table
 
    p.symbol_table.insert_symbol(SymbolScope::Global, Symbol{
 
        name: func_ident_ref,
 
        variant: SymbolVariant::Definition(SymbolDefinition{
 
            defined_in_module: RootId::new_invalid(),
 
            defined_in_scope: SymbolScope::Global,
 
            definition_span: InputSpan::new(),
 
            identifier_span: InputSpan::new(),
 
            imported_at: None,
 
            class: DefinitionClass::Function,
 
            definition_id: procedure_id.upcast(),
 
        })
 
    }).unwrap();
 

	
 
    // Insert into type table
 
    // let mut concrete_type = ConcreteType::default();
 
    // concrete_type.parts.push(ConcreteTypePart::Function(procedure_id, type_poly_vars.len() as u32));
 
    //
 
    // for _ in 0..type_poly_vars.len() {
 
    //     concrete_type.parts.push(ConcreteTypePart::Void); // doesn't matter (I hope...)
 
    // }
 
    // p.type_table.add_builtin_procedure_type(concrete_type, &type_poly_vars);
 
}
 
\ No newline at end of file
src/protocol/parser/pass_definitions.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use super::symbol_table::*;
 
use super::{Module, ModuleCompilationPhase, PassCtx};
 
use super::tokens::*;
 
use super::token_parsing::*;
 
use super::pass_definitions_types::*;
 

	
 
use crate::protocol::input_source::{InputSource, InputPosition, InputSpan, ParseError};
 
use crate::collections::*;
 

	
 
/// Parses all the tokenized definitions into actual AST nodes.
 
pub(crate) struct PassDefinitions {
 
    // State associated with the definition currently being processed
 
    cur_definition: DefinitionId,
 
    // Itty bitty parsing machines
 
    type_parser: ParserTypeParser,
 
    // Temporary buffers of various kinds
 
    buffer: String,
 
    struct_fields: ScopedBuffer<StructFieldDefinition>,
 
    enum_variants: ScopedBuffer<EnumVariantDefinition>,
 
    union_variants: ScopedBuffer<UnionVariantDefinition>,
 
    variables: ScopedBuffer<VariableId>,
 
    expressions: ScopedBuffer<ExpressionId>,
 
    statements: ScopedBuffer<StatementId>,
 
    parser_types: ScopedBuffer<ParserType>,
 
}
 

	
 
impl PassDefinitions {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            cur_definition: DefinitionId::new_invalid(),
 
            type_parser: ParserTypeParser::new(),
 
            buffer: String::with_capacity(128),
 
            struct_fields: ScopedBuffer::with_capacity(128),
 
            enum_variants: ScopedBuffer::with_capacity(128),
 
            union_variants: ScopedBuffer::with_capacity(128),
 
            variables: ScopedBuffer::with_capacity(128),
 
            expressions: ScopedBuffer::with_capacity(128),
 
            statements: ScopedBuffer::with_capacity(128),
 
            parser_types: ScopedBuffer::with_capacity(128),
 
        }
 
    }
 

	
 
    pub(crate) fn parse(&mut self, modules: &mut [Module], module_idx: usize, ctx: &mut PassCtx) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let module_range = &module.tokens.ranges[0];
 
        debug_assert_eq!(module.phase, ModuleCompilationPhase::ImportsResolved);
 
        debug_assert_eq!(module_range.range_kind, TokenRangeKind::Module);
 

	
 
        // Although we only need to parse the definitions, we want to go through
 
        // code ranges as well such that we can throw errors if we get
 
        // unexpected tokens at the module level of the source.
 
        let mut range_idx = module_range.first_child_idx;
 
        loop {
 
            let range_idx_usize = range_idx as usize;
 
            let cur_range = &module.tokens.ranges[range_idx_usize];
 

	
 
            match cur_range.range_kind {
 
                TokenRangeKind::Module => unreachable!(), // should not be reachable
 
                TokenRangeKind::Pragma | TokenRangeKind::Import => {
 
                    // Already fully parsed, fall through and go to next range
 
                },
 
                TokenRangeKind::Definition | TokenRangeKind::Code => {
 
                    // Visit range even if it is a "code" range to provide
 
                    // proper error messages.
 
                    self.visit_range(modules, module_idx, ctx, range_idx_usize)?;
 
                },
 
            }
 

	
 
            if cur_range.next_sibling_idx == NO_SIBLING {
 
                break;
 
            } else {
 
                range_idx = cur_range.next_sibling_idx;
 
            }
 
        }
 

	
 
        modules[module_idx].phase = ModuleCompilationPhase::DefinitionsParsed;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_range(
 
        &mut self, modules: &[Module], module_idx: usize, ctx: &mut PassCtx, range_idx: usize
 
    ) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let cur_range = &module.tokens.ranges[range_idx];
 
        debug_assert!(cur_range.range_kind == TokenRangeKind::Definition || cur_range.range_kind == TokenRangeKind::Code);
 

	
 
        // Detect which definition we're parsing
 
        let mut iter = module.tokens.iter_range(cur_range);
 
        loop {
 
            let next = iter.next();
 
            if next.is_none() {
 
                return Ok(())
 
            }
 

	
 
            // Token was not None, so peek_ident returns None if not an ident
 
            let ident = peek_ident(&module.source, &mut iter);
 
            match ident {
 
                Some(KW_STRUCT) => self.visit_struct_definition(module, &mut iter, ctx)?,
 
                Some(KW_ENUM) => self.visit_enum_definition(module, &mut iter, ctx)?,
 
                Some(KW_UNION) => self.visit_union_definition(module, &mut iter, ctx)?,
 
                Some(KW_FUNCTION) => self.visit_function_definition(module, &mut iter, ctx)?,
 
                Some(KW_PRIMITIVE) | Some(KW_COMPOSITE) => self.visit_component_definition(module, &mut iter, ctx)?,
 
                _ => return Err(ParseError::new_error_str_at_pos(
 
                    &module.source, iter.last_valid_pos(),
 
                    "unexpected symbol, expected a keyword marking the start of a definition"
 
                )),
 
            }
 
        }
 
    }
 

	
 
    fn visit_struct_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_STRUCT)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        // Parse struct definition
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        let mut fields_section = self.struct_fields.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
                let start_pos = iter.last_valid_pos();
 
                let parser_type = self.type_parser.consume_parser_type(
 
                    iter, &ctx.heap, source, &ctx.symbols, poly_vars, definition_id,
 
                    module_scope, false, None
 
                )?;
 
                let field = consume_ident_interned(source, iter, ctx)?;
 
                Ok(StructFieldDefinition{
 
                    span: InputSpan::from_positions(start_pos, field.span.end),
 
                    field, parser_type
 
                })
 
            },
 
            &mut fields_section, "a struct field", "a list of struct fields", None
 
        )?;
 

	
 
        // Transfer to preallocated definition
 
        let struct_def = ctx.heap[definition_id].as_struct_mut();
 
        struct_def.fields = fields_section.into_vec();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_enum_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_ENUM)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        // Parse enum definition
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        let mut enum_section = self.enum_variants.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let identifier = consume_ident_interned(source, iter, ctx)?;
 
                let value = if iter.next() == Some(TokenKind::Equal) {
 
                    iter.consume();
 
                    let (variant_number, _) = consume_integer_literal(source, iter, &mut self.buffer)?;
 
                    EnumVariantValue::Integer(variant_number as i64) // TODO: @int
 
                } else {
 
                    EnumVariantValue::None
 
                };
 
                Ok(EnumVariantDefinition{ identifier, value })
 
            },
 
            &mut enum_section, "an enum variant", "a list of enum variants", None
 
        )?;
 

	
 
        // Transfer to definition
 
        let enum_def = ctx.heap[definition_id].as_enum_mut();
 
        enum_def.variants = enum_section.into_vec();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_union_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_UNION)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        // Parse union definition
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        let mut variants_section = self.union_variants.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let identifier = consume_ident_interned(source, iter, ctx)?;
 
                let mut close_pos = identifier.span.end;
 

	
 
                let mut types_section = self.parser_types.start_section();
 

	
 
                let has_embedded = maybe_consume_comma_separated(
 
                    TokenKind::OpenParen, TokenKind::CloseParen, source, iter, ctx,
 
                    |source, iter, ctx| {
 
                        let poly_vars = ctx.heap[definition_id].poly_vars();
 
                        self.type_parser.consume_parser_type(
 
                            iter, &ctx.heap, source, &ctx.symbols, poly_vars, definition_id,
 
                            module_scope, false, None
 
                        )
 
                    },
 
                    &mut types_section, "an embedded type", Some(&mut close_pos)
 
                )?;
 
                let value = if has_embedded {
 
                    types_section.into_vec()
 
                } else {
 
                    types_section.forget();
 
                    Vec::new()
 
                };
 

	
 
                Ok(UnionVariantDefinition{
 
                    span: InputSpan::from_positions(identifier.span.begin, close_pos),
 
                    identifier,
 
                    value
 
                })
 
            },
 
            &mut variants_section, "a union variant", "a list of union variants", None
 
        )?;
 

	
 
        // Transfer to AST
 
        let union_def = ctx.heap[definition_id].as_union_mut();
 
        union_def.variants = variants_section.into_vec();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_function_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        // Retrieve function name
 
        consume_exact_ident(&module.source, iter, KW_FUNCTION)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        // Parse function's argument list
 
        let mut parameter_section = self.variables.start_section();
 
        consume_parameter_list(
 
            &mut self.type_parser, &module.source, iter, ctx, &mut parameter_section, module_scope, definition_id
 
        )?;
 
        let parameters = parameter_section.into_vec();
 

	
 
        // Consume return types
 
        consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 
        let parser_type = self.type_parser.consume_parser_type(
 
            iter, &ctx.heap, &module.source, &ctx.symbols, poly_vars, definition_id,
 
            module_scope, false, None
 
        )?;
 

	
 
        // Consume block and the definition's scope
 
        let body_id = self.consume_block_statement(module, iter, ctx)?;
 
        // Consume body
 
        let (body_id, source) = self.consume_procedure_body(module, iter, ctx, definition_id, ProcedureKind::Function)?;
 
        let scope_id = ctx.heap.alloc_scope(|this| Scope::new(this, ScopeAssociation::Definition(definition_id)));
 

	
 
        // Assign everything in the preallocated AST node
 
        let function = ctx.heap[definition_id].as_procedure_mut();
 
        function.source = source;
 
        function.return_type = Some(parser_type);
 
        function.parameters = parameters;
 
        function.scope = scope_id;
 
        function.body = body_id;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_component_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        // Consume component variant and name
 
        let (_variant_text, _) = consume_any_ident(&module.source, iter)?;
 
        debug_assert!(_variant_text == KW_PRIMITIVE || _variant_text == KW_COMPOSITE);
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated definition
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        // Parse component's argument list
 
        let mut parameter_section = self.variables.start_section();
 
        consume_parameter_list(
 
            &mut self.type_parser, &module.source, iter, ctx, &mut parameter_section, module_scope, definition_id
 
        )?;
 
        let parameters = parameter_section.into_vec();
 

	
 
        // Consume block
 
        let body_id = self.consume_block_statement(module, iter, ctx)?;
 
        // Consume body
 
        let procedure_kind = ctx.heap[definition_id].as_procedure().kind;
 
        let (body_id, source) = self.consume_procedure_body(module, iter, ctx, definition_id, procedure_kind)?;
 
        let scope_id = ctx.heap.alloc_scope(|this| Scope::new(this, ScopeAssociation::Definition(definition_id)));
 

	
 
        // Assign everything in the AST node
 
        let component = ctx.heap[definition_id].as_procedure_mut();
 
        debug_assert!(component.return_type.is_none());
 
        component.source = source;
 
        component.parameters = parameters;
 
        component.scope = scope_id;
 
        component.body = body_id;
 

	
 
        Ok(())
 
    }
 

	
 
    /// Consumes a procedure's body: either a user-defined procedure, which we
 
    /// parse as normal, or a builtin function, where we'll make sure we expect
 
    /// the particular builtin.
 
    ///
 
    /// We expect that the procedure's name is already stored in the
 
    /// preallocated AST node.
 
    fn consume_procedure_body(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, definition_id: DefinitionId, kind: ProcedureKind
 
    ) -> Result<(BlockStatementId, ProcedureSource), ParseError> {
 
        if iter.peek() == Some(TokenKind::Pragma) {
 
            let (pragma, pragma_start, pragma_end) = consume_pragma(&module.source, iter)?;
 
            if pragma != b"#builtin" {
 
                return Err(ParseError::new_error_str_at_span(
 
                    &module.source, InputSpan::from_positions(pragma_start, pragma_end),
 
                    "expected a '#builtin' pragma, or a function body"
 
                ));
 
            }
 

	
 
            // Retrieve module and procedure name
 
            assert!(module.name.is_some(), "compiler error: builtin procedure body in unnamed module");
 
            let (_, module_name) = module.name.as_ref().unwrap();
 
            let module_name = module_name.as_str();
 

	
 
            let definition = ctx.heap[definition_id].as_procedure();
 
            let procedure_name = definition.identifier.value.as_str();
 

	
 
            let source = match (module_name, procedure_name) {
 
                ("std.global", "get") => ProcedureSource::FuncGet,
 
                ("std.global", "put") => ProcedureSource::FuncPut,
 
                ("std.global", "fires") => ProcedureSource::FuncFires,
 
                ("std.global", "create") => ProcedureSource::FuncCreate,
 
                ("std.global", "length") => ProcedureSource::FuncLength,
 
                ("std.global", "assert") => ProcedureSource::FuncAssert,
 
                ("std.global", "print") => ProcedureSource::FuncPrint,
 
                _ => panic!(
 
                    "compiler error: unknown builtin procedure '{}' in module '{}'",
 
                    procedure_name, module_name
 
                ),
 
            };
 

	
 
            return Ok((BlockStatementId::new_invalid(), source));
 
        } else {
 
            let body_id = self.consume_block_statement(module, iter, ctx)?;
 
            let source = match kind {
 
                ProcedureKind::Function =>
 
                    ProcedureSource::FuncUserDefined,
 
                ProcedureKind::Primitive | ProcedureKind::Composite =>
 
                    ProcedureSource::CompUserDefined,
 
            };
 

	
 
            return Ok((body_id, source))
 
        }
 
    }
 

	
 
    /// Consumes a statement and returns a boolean indicating whether it was a
 
    /// block or not.
 
    fn consume_statement(&mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx) -> Result<StatementId, ParseError> {
 
        let next = iter.next().expect("consume_statement has a next token");
 

	
 
        if next == TokenKind::OpenCurly {
 
            let id = self.consume_block_statement(module, iter, ctx)?;
 
            return Ok(id.upcast());
 
        } else if next == TokenKind::Ident {
 
            let ident = peek_ident(&module.source, iter).unwrap();
 
            if ident == KW_STMT_IF {
 
                // Consume if statement and place end-if statement directly
 
                // after it.
 
                let id = self.consume_if_statement(module, iter, ctx)?;
 
                return Ok(id.upcast());
 
            } else if ident == KW_STMT_WHILE {
 
                let id = self.consume_while_statement(module, iter, ctx)?;
 
                return Ok(id.upcast());
 
            } else if ident == KW_STMT_BREAK {
 
                let id = self.consume_break_statement(module, iter, ctx)?;
 
                return Ok(id.upcast());
 
            } else if ident == KW_STMT_CONTINUE {
 
                let id = self.consume_continue_statement(module, iter, ctx)?;
 
                return Ok(id.upcast());
 
            } else if ident == KW_STMT_SYNC {
 
                let id = self.consume_synchronous_statement(module, iter, ctx)?;
 
                return Ok(id.upcast());
 
            } else if ident == KW_STMT_FORK {
 
                let id = self.consume_fork_statement(module, iter, ctx)?;
 

	
 
                let end_fork = ctx.heap.alloc_end_fork_statement(|this| EndForkStatement {
 
                    this,
 
                    start_fork: id,
 
                    next: StatementId::new_invalid(),
 
                });
 

	
 
                let fork_stmt = &mut ctx.heap[id];
 
                fork_stmt.end_fork = end_fork;
 

	
 
                return Ok(id.upcast());
 
            } else if ident == KW_STMT_SELECT {
 
                let id = self.consume_select_statement(module, iter, ctx)?;
 
                return Ok(id.upcast());
 
            } else if ident == KW_STMT_RETURN {
 
                let id = self.consume_return_statement(module, iter, ctx)?;
 
                return Ok(id.upcast());
 
            } else if ident == KW_STMT_GOTO {
 
                let id = self.consume_goto_statement(module, iter, ctx)?;
 
                return Ok(id.upcast());
 
            } else if ident == KW_STMT_NEW {
 
                let id = self.consume_new_statement(module, iter, ctx)?;
 
                return Ok(id.upcast());
 
            } else if ident == KW_STMT_CHANNEL {
 
                let id = self.consume_channel_statement(module, iter, ctx)?;
 
                return Ok(id.upcast().upcast());
 
            } else if iter.peek() == Some(TokenKind::Colon) {
 
                let id = self.consume_labeled_statement(module, iter, ctx)?;
 
                return Ok(id.upcast());
 
            } else {
 
                // Two fallback possibilities: the first one is a memory
 
                // declaration, the other one is to parse it as a normal
 
                // expression. This is a bit ugly.
 
                if let Some(memory_stmt_id) = self.maybe_consume_memory_statement_without_semicolon(module, iter, ctx)? {
 
                    consume_token(&module.source, iter, TokenKind::SemiColon)?;
 
                    return Ok(memory_stmt_id.upcast().upcast());
 
                } else {
 
                    let id = self.consume_expression_statement(module, iter, ctx)?;
 
                    return Ok(id.upcast());
 
                }
 
            }
 
        } else if next == TokenKind::OpenParen {
 
            // Same as above: memory statement or normal expression
 
            if let Some(memory_stmt_id) = self.maybe_consume_memory_statement_without_semicolon(module, iter, ctx)? {
 
                consume_token(&module.source, iter, TokenKind::SemiColon)?;
 
                return Ok(memory_stmt_id.upcast().upcast());
 
            } else {
 
                let id = self.consume_expression_statement(module, iter, ctx)?;
 
                return Ok(id.upcast());
 
            }
 
        } else {
 
            let id = self.consume_expression_statement(module, iter, ctx)?;
 
            return Ok(id.upcast());
 
        }
 
    }
 

	
 
    fn consume_block_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<BlockStatementId, ParseError> {
 
        let open_curly_span = consume_token(&module.source, iter, TokenKind::OpenCurly)?;
 

	
 
        let mut stmt_section = self.statements.start_section();
 
        let mut next = iter.next();
 
        while next != Some(TokenKind::CloseCurly) {
 
            if next.is_none() {
 
                return Err(ParseError::new_error_str_at_pos(
 
                    &module.source, iter.last_valid_pos(), "expected a statement or '}'"
 
                ));
 
            }
 
            let stmt_id = self.consume_statement(module, iter, ctx)?;
 
            stmt_section.push(stmt_id);
 
            next = iter.next();
 
        }
 

	
 
        let statements = stmt_section.into_vec();
 
        let mut block_span = consume_token(&module.source, iter, TokenKind::CloseCurly)?;
 
        block_span.begin = open_curly_span.begin;
 

	
 
        let block_id = ctx.heap.alloc_block_statement(|this| BlockStatement{
 
            this,
 
            span: block_span,
 
            statements,
 
            end_block: EndBlockStatementId::new_invalid(),
 
            scope: ScopeId::new_invalid(),
 
            next: StatementId::new_invalid(),
 
        });
 
        let scope_id = ctx.heap.alloc_scope(|this| Scope::new(this, ScopeAssociation::Block(block_id)));
 

	
 
        let end_block_id = ctx.heap.alloc_end_block_statement(|this| EndBlockStatement{
 
            this, start_block: block_id, next: StatementId::new_invalid()
 
        });
 

	
 
        let block_stmt = &mut ctx.heap[block_id];
 
        block_stmt.end_block = end_block_id;
 
        block_stmt.scope = scope_id;
 

	
 
        Ok(block_id)
 
    }
 

	
 
    fn consume_if_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<IfStatementId, ParseError> {
 
        let if_span = consume_exact_ident(&module.source, iter, KW_STMT_IF)?;
 
        consume_token(&module.source, iter, TokenKind::OpenParen)?;
 
        let test = self.consume_expression(module, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::CloseParen)?;
 

	
 
        // Consume bodies of if-statement
 
        let true_body = IfStatementCase{
 
            body: self.consume_statement(module, iter, ctx)?,
 
            scope: ScopeId::new_invalid(),
 
        };
 

	
 
        let false_body = if has_ident(&module.source, iter, KW_STMT_ELSE) {
 
            iter.consume();
 
            let false_body = IfStatementCase{
 
                body: self.consume_statement(module, iter, ctx)?,
 
                scope: ScopeId::new_invalid(),
 
            };
 

	
 
            Some(false_body)
 
        } else {
 
            None
 
        };
 

	
 
        // Construct AST elements
 
        let if_stmt_id = ctx.heap.alloc_if_statement(|this| IfStatement{
 
            this,
 
            span: if_span,
 
            test,
 
            true_case: true_body,
 
            false_case: false_body,
 
            end_if: EndIfStatementId::new_invalid(),
 
        });
 
        let end_if_stmt_id = ctx.heap.alloc_end_if_statement(|this| EndIfStatement{
 
            this,
 
            start_if: if_stmt_id,
 
            next: StatementId::new_invalid(),
 
        });
 
        let true_scope_id = ctx.heap.alloc_scope(|this| Scope::new(this, ScopeAssociation::If(if_stmt_id, true)));
 
        let false_scope_id = if false_body.is_some() {
 
            Some(ctx.heap.alloc_scope(|this| Scope::new(this, ScopeAssociation::If(if_stmt_id, false))))
 
        } else {
 
            None
 
        };
 

	
 
        let if_stmt = &mut ctx.heap[if_stmt_id];
 
        if_stmt.end_if = end_if_stmt_id;
 
        if_stmt.true_case.scope = true_scope_id;
 
        if let Some(false_case) = &mut if_stmt.false_case {
 
            false_case.scope = false_scope_id.unwrap();
 
        }
 

	
 
        return Ok(if_stmt_id);
 
    }
 

	
 
    fn consume_while_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<WhileStatementId, ParseError> {
 
        let while_span = consume_exact_ident(&module.source, iter, KW_STMT_WHILE)?;
 
        consume_token(&module.source, iter, TokenKind::OpenParen)?;
 
        let test = self.consume_expression(module, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::CloseParen)?;
 
        let body = self.consume_statement(module, iter, ctx)?;
 

	
 
        let while_stmt_id = ctx.heap.alloc_while_statement(|this| WhileStatement{
 
            this,
 
            span: while_span,
 
            test,
 
            scope: ScopeId::new_invalid(),
 
            body,
 
            end_while: EndWhileStatementId::new_invalid(),
 
            in_sync: SynchronousStatementId::new_invalid(),
 
        });
 
        let end_while_stmt_id = ctx.heap.alloc_end_while_statement(|this| EndWhileStatement{
 
            this,
 
            start_while: while_stmt_id,
 
            next: StatementId::new_invalid(),
 
        });
 
        let scope_id = ctx.heap.alloc_scope(|this| Scope::new(this, ScopeAssociation::While(while_stmt_id)));
 

	
 
        let while_stmt = &mut ctx.heap[while_stmt_id];
 
        while_stmt.scope = scope_id;
 
        while_stmt.end_while = end_while_stmt_id;
 

	
 
        Ok(while_stmt_id)
 
    }
 

	
 
    fn consume_break_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<BreakStatementId, ParseError> {
 
        let break_span = consume_exact_ident(&module.source, iter, KW_STMT_BREAK)?;
 
        let label = if Some(TokenKind::Ident) == iter.next() {
 
            let label = consume_ident_interned(&module.source, iter, ctx)?;
 
            Some(label)
 
        } else {
 
            None
 
        };
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 
        Ok(ctx.heap.alloc_break_statement(|this| BreakStatement{
 
            this,
 
            span: break_span,
 
            label,
 
            target: EndWhileStatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_continue_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ContinueStatementId, ParseError> {
 
        let continue_span = consume_exact_ident(&module.source, iter, KW_STMT_CONTINUE)?;
 
        let label=  if Some(TokenKind::Ident) == iter.next() {
 
            let label = consume_ident_interned(&module.source, iter, ctx)?;
 
            Some(label)
 
        } else {
 
            None
 
        };
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 
        Ok(ctx.heap.alloc_continue_statement(|this| ContinueStatement{
 
            this,
 
            span: continue_span,
 
            label,
 
            target: WhileStatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_synchronous_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<SynchronousStatementId, ParseError> {
 
        let synchronous_span = consume_exact_ident(&module.source, iter, KW_STMT_SYNC)?;
 
        let body = self.consume_statement(module, iter, ctx)?;
 

	
 
        let sync_stmt_id = ctx.heap.alloc_synchronous_statement(|this| SynchronousStatement{
 
            this,
 
            span: synchronous_span,
 
            scope: ScopeId::new_invalid(),
 
            body,
 
            end_sync: EndSynchronousStatementId::new_invalid(),
 
        });
 
        let end_sync_stmt_id = ctx.heap.alloc_end_synchronous_statement(|this| EndSynchronousStatement{
 
            this,
 
            start_sync: sync_stmt_id,
 
            next: StatementId::new_invalid(),
 
        });
 
        let scope_id = ctx.heap.alloc_scope(|this| Scope::new(this, ScopeAssociation::Synchronous(sync_stmt_id)));
 

	
 
        let sync_stmt = &mut ctx.heap[sync_stmt_id];
 
        sync_stmt.scope = scope_id;
 
        sync_stmt.end_sync = end_sync_stmt_id;
 

	
 
        return Ok(sync_stmt_id);
 
    }
 

	
 
    fn consume_fork_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ForkStatementId, ParseError> {
 
        let fork_span = consume_exact_ident(&module.source, iter, KW_STMT_FORK)?;
 
        let left_body = self.consume_statement(module, iter, ctx)?;
 

	
 
        let right_body = if has_ident(&module.source, iter, KW_STMT_OR) {
 
            iter.consume();
 
            let right_body = self.consume_statement(module, iter, ctx)?;
 
            Some(right_body)
 
        } else {
 
            None
 
        };
 

	
 
        Ok(ctx.heap.alloc_fork_statement(|this| ForkStatement{
 
            this,
 
            span: fork_span,
 
            left_body,
 
            right_body,
 
            end_fork: EndForkStatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_select_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<SelectStatementId, ParseError> {
 
        let select_span = consume_exact_ident(&module.source, iter, KW_STMT_SELECT)?;
 
        consume_token(&module.source, iter, TokenKind::OpenCurly)?;
 

	
 
        let mut cases = Vec::new();
 
        let mut next = iter.next();
 

	
 
        while Some(TokenKind::CloseCurly) != next {
 
            let guard = match self.maybe_consume_memory_statement_without_semicolon(module, iter, ctx)? {
 
                Some(guard_mem_stmt) => guard_mem_stmt.upcast().upcast(),
 
                None => {
 
                    let start_pos = iter.last_valid_pos();
 
                    let expr = self.consume_expression(module, iter, ctx)?;
 
                    let end_pos = iter.last_valid_pos();
 

	
 
                    let guard_expr_stmt = ctx.heap.alloc_expression_statement(|this| ExpressionStatement{
 
                        this,
 
                        span: InputSpan::from_positions(start_pos, end_pos),
 
                        expression: expr,
 
                        next: StatementId::new_invalid(),
 
                    });
 

	
 
                    guard_expr_stmt.upcast()
 
                },
 
            };
 
            consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
            let block = self.consume_statement(module, iter, ctx)?;
 
            cases.push(SelectCase{
 
                guard,
 
                body: block,
 
                scope: ScopeId::new_invalid(),
 
                involved_ports: Vec::with_capacity(1)
 
            });
 

	
 
            next = iter.next();
 
        }
 

	
 
        consume_token(&module.source, iter, TokenKind::CloseCurly)?;
 

	
 
        let num_cases = cases.len();
 
        let select_stmt_id = ctx.heap.alloc_select_statement(|this| SelectStatement{
 
            this,
 
            span: select_span,
 
            cases,
 
            end_select: EndSelectStatementId::new_invalid(),
 
            relative_pos_in_parent: -1,
 
            next: StatementId::new_invalid(),
 
        });
 

	
 
        let end_select_stmt_id = ctx.heap.alloc_end_select_statement(|this| EndSelectStatement{
 
            this,
 
            start_select: select_stmt_id,
 
            next: StatementId::new_invalid(),
 
        });
 

	
 
        let select_stmt = &mut ctx.heap[select_stmt_id];
 
        select_stmt.end_select = end_select_stmt_id;
 

	
 
        for case_index in 0..num_cases {
 
            let scope_id = ctx.heap.alloc_scope(|this| Scope::new(this, ScopeAssociation::SelectCase(select_stmt_id, case_index as u32)));
 
            let select_stmt = &mut ctx.heap[select_stmt_id];
 
            let select_case = &mut select_stmt.cases[case_index];
 
            select_case.scope = scope_id;
 
        }
 

	
 
        return Ok(select_stmt_id)
 
    }
 

	
 
    fn consume_return_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ReturnStatementId, ParseError> {
 
        let return_span = consume_exact_ident(&module.source, iter, KW_STMT_RETURN)?;
 
        let mut scoped_section = self.expressions.start_section();
 

	
 
        consume_comma_separated_until(
 
            TokenKind::SemiColon, &module.source, iter, ctx,
 
            |_source, iter, ctx| self.consume_expression(module, iter, ctx),
 
            &mut scoped_section, "an expression", None
 
        )?;
 
        let expressions = scoped_section.into_vec();
 

	
 
        if expressions.is_empty() {
 
            return Err(ParseError::new_error_str_at_span(&module.source, return_span, "expected at least one return value"));
 
        } else if expressions.len() > 1 {
 
            return Err(ParseError::new_error_str_at_span(&module.source, return_span, "multiple return values are not (yet) supported"))
 
        }
 

	
 
        Ok(ctx.heap.alloc_return_statement(|this| ReturnStatement{
 
            this,
 
            span: return_span,
 
            expressions
 
        }))
 
    }
 

	
 
    fn consume_goto_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<GotoStatementId, ParseError> {
 
        let goto_span = consume_exact_ident(&module.source, iter, KW_STMT_GOTO)?;
 
        let label = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 
        Ok(ctx.heap.alloc_goto_statement(|this| GotoStatement{
 
            this,
 
            span: goto_span,
 
            label,
 
            target: LabeledStatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_new_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<NewStatementId, ParseError> {
 
        let new_span = consume_exact_ident(&module.source, iter, KW_STMT_NEW)?;
 

	
 
        let start_pos = iter.last_valid_pos();
 
        let expression_id = self.consume_primary_expression(module, iter, ctx)?;
 
        let expression = &ctx.heap[expression_id];
 
        let mut valid = false;
 

	
 
        let mut call_id = CallExpressionId::new_invalid();
 
        if let Expression::Call(expression) = expression {
 
            // Allow both components and functions, as it makes more sense to
 
            // check their correct use in the validation and linking pass
 
            if expression.method == Method::UserComponent || expression.method == Method::UserFunction {
 
                call_id = expression.this;
 
                valid = true;
 
            }
 
        }
 

	
 
        if !valid {
 
            return Err(ParseError::new_error_str_at_span(
 
                &module.source, InputSpan::from_positions(start_pos, iter.last_valid_pos()), "expected a call expression"
 
            ));
 
        }
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
        debug_assert!(!call_id.is_invalid());
 
        Ok(ctx.heap.alloc_new_statement(|this| NewStatement{
 
            this,
 
            span: new_span,
 
            expression: call_id,
 
            next: StatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_channel_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ChannelStatementId, ParseError> {
 
        // Consume channel specification
 
        let channel_span = consume_exact_ident(&module.source, iter, KW_STMT_CHANNEL)?;
 
        let (inner_port_type, end_pos) = if Some(TokenKind::OpenAngle) == iter.next() {
 
            // Retrieve the type of the channel, we're cheating a bit here by
 
            // consuming the first '<' and setting the initial angle depth to 1
 
            // such that our final '>' will be consumed as well.
 
            let angle_start_pos = iter.next_start_position();
 
            iter.consume();
 
            let definition_id = self.cur_definition;
 
            let poly_vars = ctx.heap[definition_id].poly_vars();
 
            let parser_type = self.type_parser.consume_parser_type(
 
                iter, &ctx.heap, &module.source, &ctx.symbols, poly_vars,
 
                definition_id, SymbolScope::Module(module.root_id),
 
                true, Some(angle_start_pos)
 
            )?;
 

	
 
            (parser_type.elements, parser_type.full_span.end)
 
        } else {
 
            // Assume inferred
 
            (
 
                vec![ParserTypeElement{
 
                    element_span: channel_span,
 
                    variant: ParserTypeVariant::Inferred
 
                }],
 
                channel_span.end
 
            )
 
        };
 

	
 
        let from_identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
        let to_identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
        // Construct ports
 
        let port_type_span = InputSpan::from_positions(channel_span.begin, end_pos);
 
        let port_type_len = inner_port_type.len() + 1;
 
        let mut from_port_type = ParserType{ elements: Vec::with_capacity(port_type_len), full_span: port_type_span };
 
        from_port_type.elements.push(ParserTypeElement{
 
            element_span: channel_span,
 
            variant: ParserTypeVariant::Output,
 
        });
 
        from_port_type.elements.extend_from_slice(&inner_port_type);
 
        let from = ctx.heap.alloc_variable(|this| Variable{
 
            this,
 
            kind: VariableKind::Local,
 
            identifier: from_identifier,
 
            parser_type: from_port_type,
 
            relative_pos_in_parent: 0,
 
            unique_id_in_scope: -1,
 
        });
 

	
 
        let mut to_port_type = ParserType{ elements: Vec::with_capacity(port_type_len), full_span: port_type_span };
 
        to_port_type.elements.push(ParserTypeElement{
 
            element_span: channel_span,
 
            variant: ParserTypeVariant::Input
 
        });
 
        to_port_type.elements.extend_from_slice(&inner_port_type);
 
        let to = ctx.heap.alloc_variable(|this|Variable{
 
            this,
 
            kind: VariableKind::Local,
 
            identifier: to_identifier,
 
            parser_type: to_port_type,
 
            relative_pos_in_parent: 0,
 
            unique_id_in_scope: -1,
 
        });
 

	
 
        // Construct the channel
 
        Ok(ctx.heap.alloc_channel_statement(|this| ChannelStatement{
 
            this,
 
            span: channel_span,
 
            from, to,
 
            relative_pos_in_parent: 0,
 
            next: StatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_labeled_statement(&mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx) -> Result<LabeledStatementId, ParseError> {
 
        let label = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::Colon)?;
 

	
 
        let inner_stmt_id = self.consume_statement(module, iter, ctx)?;
 
        let stmt_id = ctx.heap.alloc_labeled_statement(|this| LabeledStatement {
 
            this,
 
            label,
 
            body: inner_stmt_id,
 
            relative_pos_in_parent: 0,
 
            in_sync: SynchronousStatementId::new_invalid(),
 
        });
 

	
 
        return Ok(stmt_id);
 
    }
 

	
 
    /// Attempts to consume a memory statement (a statement along the lines of
 
    /// `type var_name = initial_expr`). Will return `Ok(None)` if it didn't
 
    /// seem like there was a memory statement, `Ok(Some(...))` if there was
 
    /// one, and `Err(...)` if its reasonable to assume that there was a memory
 
    /// statement, but we failed to parse it.
 
    fn maybe_consume_memory_statement_without_semicolon(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<Option<MemoryStatementId>, ParseError> {
 
        // This is a bit ugly. It would be nicer if we could somehow
 
        // consume the expression with a type hint if we do get a valid
 
        // type, but we don't get an identifier following it
 
        let iter_state = iter.save();
 
        let definition_id = self.cur_definition;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        let parser_type = self.type_parser.consume_parser_type(
 
            iter, &ctx.heap, &module.source, &ctx.symbols, poly_vars,
 
            definition_id, SymbolScope::Definition(definition_id), true, None
 
        );
 

	
 
        if let Ok(parser_type) = parser_type {
 
            if Some(TokenKind::Ident) == iter.next() {
 
                // Assume this is a proper memory statement
 
                let identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
                let memory_span = InputSpan::from_positions(parser_type.full_span.begin, identifier.span.end);
 
                let assign_span = consume_token(&module.source, iter, TokenKind::Equal)?;
 

	
 
                let initial_expr_id = self.consume_expression(module, iter, ctx)?;
 
                let initial_expr_end_pos = iter.last_valid_pos();
 

	
 
                // Create the AST variable
 
                let local_id = ctx.heap.alloc_variable(|this| Variable{
 
                    this,
 
                    kind: VariableKind::Local,
 
                    identifier: identifier.clone(),
 
                    parser_type,
 
                    relative_pos_in_parent: 0,
 
                    unique_id_in_scope: -1,
 
                });
 

	
 
                // Create the initial assignment expression
 
                // Note: we set the initial variable declaration here
 
                let variable_expr_id = ctx.heap.alloc_variable_expression(|this| VariableExpression{
 
                    this,
 
                    identifier,
 
                    declaration: Some(local_id),
 
                    used_as_binding_target: false,
 
                    parent: ExpressionParent::None,
 
                    type_index: -1,
 
                });
 
                let assignment_expr_id = ctx.heap.alloc_assignment_expression(|this| AssignmentExpression{
 
                    this,
 
                    operator_span: assign_span,
 
                    full_span: InputSpan::from_positions(memory_span.begin, initial_expr_end_pos),
 
                    left: variable_expr_id.upcast(),
 
                    operation: AssignmentOperator::Set,
 
                    right: initial_expr_id,
 
                    parent: ExpressionParent::None,
 
                    type_index: -1,
 
                });
 

	
 
                // Put both together in the memory statement
 
                let memory_stmt_id = ctx.heap.alloc_memory_statement(|this| MemoryStatement{
 
                    this,
 
                    span: memory_span,
 
                    variable: local_id,
 
                    initial_expr: assignment_expr_id,
 
                    next: StatementId::new_invalid()
 
                });
 

	
 
                return Ok(Some(memory_stmt_id));
 
            }
 
        }
 

	
 
        // If here then one of the preconditions for a memory statement was not
 
        // met. So recover the iterator and return
 
        iter.load(iter_state);
 
        Ok(None)
 
    }
 

	
 
    fn consume_expression_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionStatementId, ParseError> {
 
        let start_pos = iter.last_valid_pos();
 
        let expression = self.consume_expression(module, iter, ctx)?;
 
        let end_pos = iter.last_valid_pos();
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
        Ok(ctx.heap.alloc_expression_statement(|this| ExpressionStatement{
 
            this,
 
            span: InputSpan::from_positions(start_pos, end_pos),
 
            expression,
 
            next: StatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Expression Parsing
 
    //--------------------------------------------------------------------------
 

	
 
    // TODO: @Cleanup This is fine for now. But I prefer my stacktraces not to
 
    //  look like enterprise Java code...
 
    fn consume_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_assignment_expression(module, iter, ctx)
 
    }
 

	
 
    fn consume_assignment_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        // Utility to convert token into assignment operator
 
        fn parse_assignment_operator(token: Option<TokenKind>) -> Option<AssignmentOperator> {
 
            use TokenKind as TK;
 
            use AssignmentOperator as AO;
 

	
 
            if token.is_none() {
 
                return None
 
            }
 

	
 
            match token.unwrap() {
 
                TK::Equal               => Some(AO::Set),
 
                TK::AtEquals            => Some(AO::Concatenated),
 
                TK::StarEquals          => Some(AO::Multiplied),
 
                TK::SlashEquals         => Some(AO::Divided),
 
                TK::PercentEquals       => Some(AO::Remained),
 
                TK::PlusEquals          => Some(AO::Added),
 
                TK::MinusEquals         => Some(AO::Subtracted),
 
                TK::ShiftLeftEquals     => Some(AO::ShiftedLeft),
 
                TK::ShiftRightEquals    => Some(AO::ShiftedRight),
 
                TK::AndEquals           => Some(AO::BitwiseAnded),
 
                TK::CaretEquals         => Some(AO::BitwiseXored),
 
                TK::OrEquals            => Some(AO::BitwiseOred),
 
                _                       => None
 
            }
 
        }
 

	
 
        let expr = self.consume_conditional_expression(module, iter, ctx)?;
 
        if let Some(operation) = parse_assignment_operator(iter.next()) {
 
            let operator_span = iter.next_span();
 
            iter.consume();
 

	
 
            let left = expr;
 
            let right = self.consume_expression(module, iter, ctx)?;
 

	
 
            let full_span = InputSpan::from_positions(
 
                ctx.heap[left].full_span().begin,
 
                ctx.heap[right].full_span().end,
 
            );
 

	
 
            Ok(ctx.heap.alloc_assignment_expression(|this| AssignmentExpression{
 
                this, operator_span, full_span, left, operation, right,
 
                parent: ExpressionParent::None,
 
                type_index: -1,
 
            }).upcast())
 
        } else {
 
            Ok(expr)
 
        }
 
    }
 

	
 
    fn consume_conditional_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        let result = self.consume_concat_expression(module, iter, ctx)?;
 
        if let Some(TokenKind::Question) = iter.next() {
 
            let operator_span = iter.next_span();
 
            iter.consume();
 

	
 
            let test = result;
 
            let true_expression = self.consume_expression(module, iter, ctx)?;
 
            consume_token(&module.source, iter, TokenKind::Colon)?;
 
            let false_expression = self.consume_expression(module, iter, ctx)?;
 

	
 
            let full_span = InputSpan::from_positions(
 
                ctx.heap[test].full_span().begin,
 
                ctx.heap[false_expression].full_span().end,
 
            );
 

	
 
            Ok(ctx.heap.alloc_conditional_expression(|this| ConditionalExpression{
 
                this, operator_span, full_span, test, true_expression, false_expression,
 
                parent: ExpressionParent::None,
 
                type_index: -1,
 
            }).upcast())
 
        } else {
 
            Ok(result)
 
        }
 
    }
 

	
 
    fn consume_concat_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::At) => Some(BinaryOperator::Concatenate),
 
                _ => None
 
            },
 
            Self::consume_logical_or_expression
 
        )
 
    }
 

	
 
    fn consume_logical_or_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::OrOr) => Some(BinaryOperator::LogicalOr),
 
                _ => None
 
            },
 
            Self::consume_logical_and_expression
 
        )
 
    }
 

	
 
    fn consume_logical_and_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::AndAnd) => Some(BinaryOperator::LogicalAnd),
 
                _ => None
 
            },
 
            Self::consume_bitwise_or_expression
 
        )
 
    }
 

	
 
    fn consume_bitwise_or_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::Or) => Some(BinaryOperator::BitwiseOr),
 
                _ => None
 
            },
 
            Self::consume_bitwise_xor_expression
 
        )
 
    }
 

	
 
    fn consume_bitwise_xor_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::Caret) => Some(BinaryOperator::BitwiseXor),
 
                _ => None
 
            },
 
            Self::consume_bitwise_and_expression
 
        )
 
    }
 

	
 
    fn consume_bitwise_and_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::And) => Some(BinaryOperator::BitwiseAnd),
 
                _ => None
 
            },
 
            Self::consume_equality_expression
 
        )
 
    }
 

	
 
    fn consume_equality_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::EqualEqual) => Some(BinaryOperator::Equality),
 
                Some(TokenKind::NotEqual) => Some(BinaryOperator::Inequality),
 
                _ => None
 
            },
 
            Self::consume_relational_expression
 
        )
 
    }
 

	
 
    fn consume_relational_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::OpenAngle) => Some(BinaryOperator::LessThan),
 
                Some(TokenKind::CloseAngle) => Some(BinaryOperator::GreaterThan),
 
                Some(TokenKind::LessEquals) => Some(BinaryOperator::LessThanEqual),
 
                Some(TokenKind::GreaterEquals) => Some(BinaryOperator::GreaterThanEqual),
 
                _ => None
 
            },
 
            Self::consume_shift_expression
 
        )
 
    }
 

	
 
    fn consume_shift_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::ShiftLeft) => Some(BinaryOperator::ShiftLeft),
 
                Some(TokenKind::ShiftRight) => Some(BinaryOperator::ShiftRight),
 
                _ => None
 
            },
 
            Self::consume_add_or_subtract_expression
 
        )
 
    }
 

	
 
    fn consume_add_or_subtract_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::Plus) => Some(BinaryOperator::Add),
 
                Some(TokenKind::Minus) => Some(BinaryOperator::Subtract),
 
                _ => None,
 
            },
 
            Self::consume_multiply_divide_or_modulus_expression
 
        )
 
    }
 

	
 
    fn consume_multiply_divide_or_modulus_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::Star) => Some(BinaryOperator::Multiply),
 
                Some(TokenKind::Slash) => Some(BinaryOperator::Divide),
 
                Some(TokenKind::Percent) => Some(BinaryOperator::Remainder),
 
                _ => None
 
            },
 
            Self::consume_prefix_expression
 
        )
 
    }
 

	
 
    fn consume_prefix_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        fn parse_prefix_token(token: Option<TokenKind>) -> Option<UnaryOperator> {
 
            use TokenKind as TK;
 
            use UnaryOperator as UO;
 
            match token {
 
                Some(TK::Plus) => Some(UO::Positive),
 
                Some(TK::Minus) => Some(UO::Negative),
 
                Some(TK::Tilde) => Some(UO::BitwiseNot),
 
                Some(TK::Exclamation) => Some(UO::LogicalNot),
 
                _ => None
 
            }
 
        }
 

	
 
        let next = iter.next();
 
        if let Some(operation) = parse_prefix_token(next) {
 
            let operator_span = iter.next_span();
 
            iter.consume();
 

	
 
            let expression = self.consume_prefix_expression(module, iter, ctx)?;
 
            let full_span = InputSpan::from_positions(
 
                operator_span.begin, ctx.heap[expression].full_span().end,
 
            );
 
            Ok(ctx.heap.alloc_unary_expression(|this| UnaryExpression {
 
                this, operator_span, full_span, operation, expression,
 
                parent: ExpressionParent::None,
 
                type_index: -1,
 
            }).upcast())
 
        } else if next == Some(TokenKind::PlusPlus) {
 
            return Err(ParseError::new_error_str_at_span(
 
                &module.source, iter.next_span(), "prefix increment is not supported in the language"
 
            ));
 
        } else if next == Some(TokenKind::MinusMinus) {
 
            return Err(ParseError::new_error_str_at_span(
 
                &module.source, iter.next_span(), "prefix decrement is not supported in this language"
 
            ));
 
        } else {
 
            self.consume_postfix_expression(module, iter, ctx)
 
        }
 
    }
 

	
 
    fn consume_postfix_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        fn has_matching_postfix_token(token: Option<TokenKind>) -> bool {
 
            use TokenKind as TK;
 

	
 
            if token.is_none() { return false; }
 
            match token.unwrap() {
 
                TK::PlusPlus | TK::MinusMinus | TK::OpenSquare | TK::Dot => true,
 
                _ => false
 
            }
 
        }
 

	
 
        let mut result = self.consume_primary_expression(module, iter, ctx)?;
 
        let mut next = iter.next();
 
        while has_matching_postfix_token(next) {
 
            let token = next.unwrap();
 
            let mut operator_span = iter.next_span();
 
            iter.consume();
 

	
 
            if token == TokenKind::PlusPlus {
 
                return Err(ParseError::new_error_str_at_span(
 
                    &module.source, operator_span, "postfix increment is not supported in this language"
 
                ));
 
            } else if token == TokenKind::MinusMinus {
 
                return Err(ParseError::new_error_str_at_span(
 
                    &module.source, operator_span, "prefix increment is not supported in this language"
 
                ));
 
            } else if token == TokenKind::OpenSquare {
 
                let subject = result;
 
                let from_index = self.consume_expression(module, iter, ctx)?;
 

	
 
                // Check if we have an indexing or slicing operation
 
                next = iter.next();
 
                if Some(TokenKind::DotDot) == next {
 
                    iter.consume();
 

	
 
                    let to_index = self.consume_expression(module, iter, ctx)?;
 
                    let end_span = consume_token(&module.source, iter, TokenKind::CloseSquare)?;
 
                    operator_span.end = end_span.end;
 
                    let full_span = InputSpan::from_positions(
 
                        ctx.heap[subject].full_span().begin, operator_span.end
 
                    );
 

	
 
                    result = ctx.heap.alloc_slicing_expression(|this| SlicingExpression{
 
                        this,
 
                        slicing_span: operator_span,
 
                        full_span, subject, from_index, to_index,
 
                        parent: ExpressionParent::None,
 
                        type_index: -1,
 
                    }).upcast();
 
                } else if Some(TokenKind::CloseSquare) == next {
 
                    let end_span = consume_token(&module.source, iter, TokenKind::CloseSquare)?;
 
                    operator_span.end = end_span.end;
 

	
 
                    let full_span = InputSpan::from_positions(
 
                        ctx.heap[subject].full_span().begin, operator_span.end
 
                    );
 

	
 
                    result = ctx.heap.alloc_indexing_expression(|this| IndexingExpression{
 
                        this, operator_span, full_span, subject,
 
                        index: from_index,
 
                        parent: ExpressionParent::None,
 
                        type_index: -1,
 
                    }).upcast();
 
                } else {
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        &module.source, iter.last_valid_pos(), "unexpected token: expected ']' or '..'"
 
                    ));
 
                }
 
            } else {
 
                // Can be a select expression for struct fields, or a select
 
                // for a tuple element.
 
                debug_assert_eq!(token, TokenKind::Dot);
 
                let subject = result;
 

	
 
                let next = iter.next();
 
                let (select_kind, full_span) = if Some(TokenKind::Integer) == next {
 
                    // Tuple member
 
                    let (index, index_span) = consume_integer_literal(&module.source, iter, &mut self.buffer)?;
 
                    let full_span = InputSpan::from_positions(
 
                        ctx.heap[subject].full_span().begin, index_span.end
 
                    );
 

	
 
                    (SelectKind::TupleMember(index), full_span)
 
                } else if Some(TokenKind::Ident) == next {
 
                    // Struct field
 
                    let field_name = consume_ident_interned(&module.source, iter, ctx)?;
 

	
 
                    let full_span = InputSpan::from_positions(
 
                        ctx.heap[subject].full_span().begin, field_name.span.end
 
                    );
 

	
 
                    (SelectKind::StructField(field_name), full_span)
 
                } else {
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        &module.source, iter.last_valid_pos(), "unexpected token: expected integer or identifier"
 
                    ));
 
                };
 

	
 
                result = ctx.heap.alloc_select_expression(|this| SelectExpression{
 
                    this, operator_span, full_span, subject,
 
                    kind: select_kind,
 
                    parent: ExpressionParent::None,
 
                    type_index: -1,
 
                }).upcast();
 
            }
 

	
 
            next = iter.next();
 
        }
 

	
 
        Ok(result)
 
    }
 

	
 
    fn consume_primary_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        let next = iter.next();
 

	
 
        let result = if next == Some(TokenKind::OpenParen) {
 
            // Something parenthesized. This can mean several things: we have
 
            // a parenthesized expression or we have a tuple literal. They are
 
            // ambiguous when the tuple has one member. But like the tuple type
 
            // parsing we interpret all one-tuples as parenthesized expressions.
 
            //
 
            // Practically (to prevent unnecessary `consume_expression` calls)
 
            // we distinguish the zero-tuple, the parenthesized expression, and
 
            // the N-tuple (for N > 1).
 
            let open_paren_pos = iter.next_start_position();
 
            iter.consume();
 
            let result = if Some(TokenKind::CloseParen) == iter.next() {
 
                // Zero-tuple
 
                let (_, close_paren_pos) = iter.next_positions();
 
                iter.consume();
 

	
 
                let literal_id = ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                    this,
 
                    span: InputSpan::from_positions(open_paren_pos, close_paren_pos),
 
                    value: Literal::Tuple(Vec::new()),
 
                    parent: ExpressionParent::None,
 
                    type_index: -1,
 
                });
 

	
 
                literal_id.upcast()
 
            } else {
 
                // Start by consuming one expression, then check for a comma
 
                let expr_id = self.consume_expression(module, iter, ctx)?;
 
                if Some(TokenKind::Comma) == iter.next() && Some(TokenKind::CloseParen) != iter.peek() {
 
                    // Must be an N-tuple
 
                    iter.consume(); // the comma
 
                    let mut scoped_section = self.expressions.start_section();
 
                    scoped_section.push(expr_id);
 

	
 
                    let mut close_paren_pos = open_paren_pos;
 
                    consume_comma_separated_until(
 
                        TokenKind::CloseParen, &module.source, iter, ctx,
 
                        |_source, iter, ctx| self.consume_expression(module, iter, ctx),
 
                        &mut scoped_section, "an expression", Some(&mut close_paren_pos)
 
                    )?;
 
                    debug_assert!(scoped_section.len() > 1); // peeked token wasn't CloseParen, must be expression
 

	
 
                    let literal_id = ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                        this,
 
                        span: InputSpan::from_positions(open_paren_pos, close_paren_pos),
 
                        value: Literal::Tuple(scoped_section.into_vec()),
 
                        parent: ExpressionParent::None,
 
                        type_index: -1,
 
                    });
 

	
 
                    literal_id.upcast()
 
                } else {
 
                    // Assume we're dealing with a normal expression
 
                    consume_token(&module.source, iter, TokenKind::CloseParen)?;
 

	
 
                    expr_id
 
                }
 
            };
 

	
 
            result
 
        } else if next == Some(TokenKind::OpenCurly) {
 
            // Array literal
 
            let (start_pos, mut end_pos) = iter.next_positions();
 
            let mut scoped_section = self.expressions.start_section();
 
            consume_comma_separated(
 
                TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
                |_source, iter, ctx| self.consume_expression(module, iter, ctx),
 
                &mut scoped_section, "an expression", "a list of expressions", Some(&mut end_pos)
 
            )?;
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this,
 
                span: InputSpan::from_positions(start_pos, end_pos),
 
                value: Literal::Array(scoped_section.into_vec()),
 
                parent: ExpressionParent::None,
 
                type_index: -1,
 
            }).upcast()
 
        } else if next == Some(TokenKind::Integer) {
 
            let (literal, span) = consume_integer_literal(&module.source, iter, &mut self.buffer)?;
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this, span,
 
                value: Literal::Integer(LiteralInteger{ unsigned_value: literal, negated: false }),
 
                parent: ExpressionParent::None,
 
                type_index: -1,
 
            }).upcast()
 
        } else if next == Some(TokenKind::String) {
 
            let span = consume_string_literal(&module.source, iter, &mut self.buffer)?;
 
            let interned = ctx.pool.intern(self.buffer.as_bytes());
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this, span,
 
                value: Literal::String(interned),
 
                parent: ExpressionParent::None,
 
                type_index: -1,
 
            }).upcast()
 
        } else if next == Some(TokenKind::Character) {
 
            let (character, span) = consume_character_literal(&module.source, iter)?;
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this, span,
 
                value: Literal::Character(character),
 
                parent: ExpressionParent::None,
 
                type_index: -1,
 
            }).upcast()
 
        } else if next == Some(TokenKind::Ident) {
 
            // May be a variable, a type instantiation or a function call. If we
 
            // have a single identifier that we cannot find in the type table
 
            // then we're going to assume that we're dealing with a variable.
 

	
 
            let ident_span = iter.next_span();
 
            let ident_text = module.source.section_at_span(ident_span);
 
            let symbol = ctx.symbols.get_symbol_by_name(SymbolScope::Module(module.root_id), ident_text);
 

	
 
            if symbol.is_some() {
 
                // The first bit looked like a symbol, so we're going to follow
 
                // that all the way through, assume we arrive at some kind of
 
                // function call or type instantiation
 
                use ParserTypeVariant as PTV;
 

	
 
                let symbol_scope = SymbolScope::Definition(self.cur_definition);
 
                let poly_vars = ctx.heap[self.cur_definition].poly_vars();
 
                let parser_type = self.type_parser.consume_parser_type(
 
                    iter, &ctx.heap, &module.source, &ctx.symbols, poly_vars, self.cur_definition,
 
                    symbol_scope, true, None
 
                )?;
 
                debug_assert!(!parser_type.elements.is_empty());
 
                match parser_type.elements[0].variant {
 
                    PTV::Definition(target_definition_id, _) => {
 
                        let definition = &ctx.heap[target_definition_id];
 
                        match definition {
 
                            Definition::Struct(_) => {
 
                                // Struct literal
 
                                let mut last_token = iter.last_valid_pos();
 
                                let mut struct_fields = Vec::new();
 
                                consume_comma_separated(
 
                                    TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
                                    |source, iter, ctx| {
 
                                        let identifier = consume_ident_interned(source, iter, ctx)?;
 
                                        consume_token(source, iter, TokenKind::Colon)?;
 
                                        let value = self.consume_expression(module, iter, ctx)?;
 
                                        Ok(LiteralStructField{ identifier, value, field_idx: 0 })
 
                                    },
 
                                    &mut struct_fields, "a struct field", "a list of struct fields", Some(&mut last_token)
 
                                )?;
 

	
 
                                ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                                    this,
 
                                    span: InputSpan::from_positions(ident_span.begin, last_token),
 
                                    value: Literal::Struct(LiteralStruct{
 
                                        parser_type,
 
                                        fields: struct_fields,
 
                                        definition: target_definition_id,
 
                                    }),
 
                                    parent: ExpressionParent::None,
 
                                    type_index: -1,
 
                                }).upcast()
 
                            },
 
                            Definition::Enum(_) => {
 
                                // Enum literal: consume the variant
 
                                consume_token(&module.source, iter, TokenKind::ColonColon)?;
 
                                let variant = consume_ident_interned(&module.source, iter, ctx)?;
 

	
 
                                ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                                    this,
 
                                    span: InputSpan::from_positions(ident_span.begin, variant.span.end),
 
                                    value: Literal::Enum(LiteralEnum{
 
                                        parser_type,
 
                                        variant,
 
                                        definition: target_definition_id,
 
                                        variant_idx: 0
 
                                    }),
 
                                    parent: ExpressionParent::None,
 
                                    type_index: -1,
 
                                }).upcast()
 
                            },
 
                            Definition::Union(_) => {
 
                                // Union literal: consume the variant
 
                                consume_token(&module.source, iter, TokenKind::ColonColon)?;
 
                                let variant = consume_ident_interned(&module.source, iter, ctx)?;
 

	
 
                                // Consume any possible embedded values
 
                                let mut end_pos = variant.span.end;
 
                                let values = if Some(TokenKind::OpenParen) == iter.next() {
 
                                    self.consume_expression_list(module, iter, ctx, Some(&mut end_pos))?
 
                                } else {
 
                                    Vec::new()
 
                                };
 

	
 
                                ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                                    this,
 
                                    span: InputSpan::from_positions(ident_span.begin, end_pos),
 
                                    value: Literal::Union(LiteralUnion{
 
                                        parser_type, variant, values,
 
                                        definition: target_definition_id,
 
                                        variant_idx: 0,
 
                                    }),
 
                                    parent: ExpressionParent::None,
 
                                    type_index: -1,
 
                                }).upcast()
 
                            },
 
                            Definition::Procedure(proc_def) => {
 
                                // Check whether it is a builtin function
 
                                // TODO: Once we start generating bytecode this is unnecessary
 
                                let procedure_id = proc_def.this;
 
                                let method = if proc_def.builtin {
 
                                    match proc_def.identifier.value.as_bytes() {
 
                                        KW_FUNC_GET => Method::Get,
 
                                        KW_FUNC_PUT => Method::Put,
 
                                        KW_FUNC_FIRES => Method::Fires,
 
                                        KW_FUNC_CREATE => Method::Create,
 
                                        KW_FUNC_LENGTH => Method::Length,
 
                                        KW_FUNC_ASSERT => Method::Assert,
 
                                        KW_FUNC_PRINT => Method::Print,
 
                                        _ => unreachable!(),
 
                                    }
 
                                } else if proc_def.kind == ProcedureKind::Function {
 
                                    Method::UserFunction
 
                                } else {
 
                                    Method::UserComponent
 
                                let method = match proc_def.source {
 
                                    ProcedureSource::FuncUserDefined => Method::UserFunction,
 
                                    ProcedureSource::CompUserDefined => Method::UserComponent,
 
                                    ProcedureSource::FuncGet => Method::Get,
 
                                    ProcedureSource::FuncPut => Method::Put,
 
                                    ProcedureSource::FuncFires => Method::Fires,
 
                                    ProcedureSource::FuncCreate => Method::Create,
 
                                    ProcedureSource::FuncLength => Method::Length,
 
                                    ProcedureSource::FuncAssert => Method::Assert,
 
                                    ProcedureSource::FuncPrint => Method::Print,
 
                                    _ => todo!("other proc sources")
 
                                };
 

	
 
                                // Function call: consume the arguments
 
                                let func_span = parser_type.full_span;
 
                                let mut full_span = func_span;
 
                                let arguments = self.consume_expression_list(
 
                                    module, iter, ctx, Some(&mut full_span.end)
 
                                )?;
 

	
 
                                ctx.heap.alloc_call_expression(|this| CallExpression{
 
                                    this, func_span, full_span, parser_type, method, arguments,
 
                                    procedure: procedure_id,
 
                                    parent: ExpressionParent::None,
 
                                    type_index: -1,
 
                                }).upcast()
 
                            }
 
                        }
 
                    },
 
                    _ => {
 
                        return Err(ParseError::new_error_str_at_span(
 
                            &module.source, parser_type.full_span, "unexpected type in expression"
 
                        ))
 
                    }
 
                }
 
            } else {
 
                // Check for builtin keywords or builtin functions
 
                if ident_text == KW_LIT_NULL || ident_text == KW_LIT_TRUE || ident_text == KW_LIT_FALSE {
 
                    iter.consume();
 

	
 
                    // Parse builtin literal
 
                    let value = match ident_text {
 
                        KW_LIT_NULL => Literal::Null,
 
                        KW_LIT_TRUE => Literal::True,
 
                        KW_LIT_FALSE => Literal::False,
 
                        _ => unreachable!(),
 
                    };
 

	
 
                    ctx.heap.alloc_literal_expression(|this| LiteralExpression {
 
                        this,
 
                        span: ident_span,
 
                        value,
 
                        parent: ExpressionParent::None,
 
                        type_index: -1,
 
                    }).upcast()
 
                } else if ident_text == KW_LET {
 
                    // Binding expression
 
                    let operator_span = iter.next_span();
 
                    iter.consume();
 

	
 
                    let bound_to = self.consume_prefix_expression(module, iter, ctx)?;
 
                    consume_token(&module.source, iter, TokenKind::Equal)?;
 
                    let bound_from = self.consume_prefix_expression(module, iter, ctx)?;
 

	
 
                    let full_span = InputSpan::from_positions(
 
                        operator_span.begin, ctx.heap[bound_from].full_span().end,
 
                    );
 

	
 
                    ctx.heap.alloc_binding_expression(|this| BindingExpression{
 
                        this, operator_span, full_span, bound_to, bound_from,
 
                        parent: ExpressionParent::None,
 
                        type_index: -1,
 
                    }).upcast()
 
                } else if ident_text == KW_CAST {
 
                    // Casting expression
 
                    iter.consume();
 
                    let to_type = if Some(TokenKind::OpenAngle) == iter.next() {
 
                        let angle_start_pos = iter.next_start_position();
 
                        iter.consume();
 
                        let definition_id = self.cur_definition;
 
                        let poly_vars = ctx.heap[definition_id].poly_vars();
 
                        self.type_parser.consume_parser_type(
 
                            iter, &ctx.heap, &module.source, &ctx.symbols,
 
                            poly_vars, definition_id, SymbolScope::Module(module.root_id),
 
                            true, Some(angle_start_pos)
 
                        )?
 
                    } else {
 
                        // Automatic casting with inferred target type
 
                        ParserType{
 
                            elements: vec![ParserTypeElement{
 
                                element_span: ident_span,
 
                                variant: ParserTypeVariant::Inferred,
 
                            }],
 
                            full_span: ident_span
 
                        }
 
                    };
 

	
 
                    consume_token(&module.source, iter, TokenKind::OpenParen)?;
 
                    let subject = self.consume_expression(module, iter, ctx)?;
 
                    let mut full_span = iter.next_span();
 
                    full_span.begin = to_type.full_span.begin;
 
                    consume_token(&module.source, iter, TokenKind::CloseParen)?;
 

	
 
                    ctx.heap.alloc_cast_expression(|this| CastExpression{
 
                        this,
 
                        cast_span: to_type.full_span,
 
                        full_span, to_type, subject,
 
                        parent: ExpressionParent::None,
 
                        type_index: -1,
 
                    }).upcast()
 
                } else {
 
                    // Not a builtin literal, but also not a known type. So we
 
                    // assume it is a variable expression. Although if we do,
 
                    // then if a programmer mistyped a struct/function name the
 
                    // error messages will be rather cryptic. For polymorphic
 
                    // arguments we can't really do anything at all (because it
 
                    // uses the '<' token). In the other cases we try to provide
 
                    // a better error message.
 
                    iter.consume();
 
                    let next = iter.next();
 
                    if Some(TokenKind::ColonColon) == next {
 
                        return Err(ParseError::new_error_str_at_span(&module.source, ident_span, "unknown identifier"));
 
                    } else if Some(TokenKind::OpenParen) == next {
 
                        return Err(ParseError::new_error_str_at_span(
 
                            &module.source, ident_span,
 
                            "unknown identifier, did you mistype a union variant's, component's, or function's name?"
 
                        ));
 
                    } else if Some(TokenKind::OpenCurly) == next {
 
                        return Err(ParseError::new_error_str_at_span(
 
                            &module.source, ident_span,
 
                            "unknown identifier, did you mistype a struct type's name?"
 
                        ))
 
                    }
 

	
 
                    let ident_text = ctx.pool.intern(ident_text);
 
                    let identifier = Identifier { span: ident_span, value: ident_text };
 

	
 
                    ctx.heap.alloc_variable_expression(|this| VariableExpression {
 
                        this,
 
                        identifier,
 
                        declaration: None,
 
                        used_as_binding_target: false,
 
                        parent: ExpressionParent::None,
 
                        type_index: -1,
 
                    }).upcast()
 
                }
 
            }
 
        } else {
 
            return Err(ParseError::new_error_str_at_pos(
 
                &module.source, iter.last_valid_pos(), "expected an expression"
 
            ));
 
        };
 

	
 
        Ok(result)
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Expression Utilities
 
    //--------------------------------------------------------------------------
 

	
 
    #[inline]
 
    fn consume_generic_binary_expression<
 
        M: Fn(Option<TokenKind>) -> Option<BinaryOperator>,
 
        F: Fn(&mut PassDefinitions, &Module, &mut TokenIter, &mut PassCtx) -> Result<ExpressionId, ParseError>
 
    >(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, match_fn: M, higher_precedence_fn: F
 
    ) -> Result<ExpressionId, ParseError> {
 
        let mut result = higher_precedence_fn(self, module, iter, ctx)?;
 
        while let Some(operation) = match_fn(iter.next()) {
 
            let operator_span = iter.next_span();
 
            iter.consume();
 

	
 
            let left = result;
 
            let right = higher_precedence_fn(self, module, iter, ctx)?;
 

	
 
            let full_span = InputSpan::from_positions(
 
                ctx.heap[left].full_span().begin,
 
                ctx.heap[right].full_span().end,
 
            );
 

	
 
            result = ctx.heap.alloc_binary_expression(|this| BinaryExpression{
 
                this, operator_span, full_span, left, operation, right,
 
                parent: ExpressionParent::None,
 
                type_index: -1,
 
            }).upcast();
 
        }
 

	
 
        Ok(result)
 
    }
 

	
 
    #[inline]
 
    fn consume_expression_list(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, end_pos: Option<&mut InputPosition>
 
    ) -> Result<Vec<ExpressionId>, ParseError> {
 
        let mut section = self.expressions.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenParen, TokenKind::CloseParen, &module.source, iter, ctx,
 
            |_source, iter, ctx| self.consume_expression(module, iter, ctx),
 
            &mut section, "an expression", "a list of expressions", end_pos
 
        )?;
 
        Ok(section.into_vec())
 
    }
 
}
 

	
 
/// Consumes polymorphic variables and throws them on the floor.
 
fn consume_polymorphic_vars_spilled(source: &InputSource, iter: &mut TokenIter, _ctx: &mut PassCtx) -> Result<(), ParseError> {
 
    maybe_consume_comma_separated_spilled(
 
        TokenKind::OpenAngle, TokenKind::CloseAngle, source, iter, _ctx,
 
        |source, iter, _ctx| {
 
            consume_ident(source, iter)?;
 
            Ok(())
 
        }, "a polymorphic variable"
 
    )?;
 
    Ok(())
 
}
 

	
 
/// Consumes the parameter list to functions/components
 
fn consume_parameter_list(
 
    parser: &mut ParserTypeParser, source: &InputSource, iter: &mut TokenIter,
 
    ctx: &mut PassCtx, target: &mut ScopedSection<VariableId>,
 
    scope: SymbolScope, definition_id: DefinitionId
 
) -> Result<(), ParseError> {
 
    consume_comma_separated(
 
        TokenKind::OpenParen, TokenKind::CloseParen, source, iter, ctx,
 
        |source, iter, ctx| {
 
            let poly_vars = ctx.heap[definition_id].poly_vars(); // Rust being rust, multiple lookups
 
            let parser_type = parser.consume_parser_type(
 
                iter, &ctx.heap, source, &ctx.symbols, poly_vars, definition_id,
 
                scope, false, None
 
            )?;
 
            let identifier = consume_ident_interned(source, iter, ctx)?;
 
            let parameter_id = ctx.heap.alloc_variable(|this| Variable{
 
                this,
 
                kind: VariableKind::Parameter,
 
                parser_type,
 
                identifier,
 
                relative_pos_in_parent: 0,
 
                unique_id_in_scope: -1,
 
            });
 
            Ok(parameter_id)
 
        },
 
        target, "a parameter", "a parameter list", None
 
    )
 
}
 
\ No newline at end of file
src/protocol/parser/pass_typing.rs
Show inline comments
 
@@ -541,3073 +541,3073 @@ impl InferenceType {
 
                type_parts_a, &mut idx_a, type_parts_b, &mut idx_b
 
            ) {
 
                depth += depth_change;
 
                continue;
 
            }
 
            if let Some(depth_change) = Self::check_part_for_single_type(
 
                type_parts_b, &mut idx_b, type_parts_a, &mut idx_a
 
            ) {
 
                depth += depth_change;
 
                continue;
 
            }
 

	
 
            return false;
 
        }
 

	
 
        true
 
    }
 

	
 
    /// Performs the conversion of the inference type into a concrete type.
 
    /// By calling this function you must make sure that no unspecified types
 
    /// (e.g. Unknown or IntegerLike) exist in the type. Will not clear or check
 
    /// if the supplied `ConcreteType` is empty, will simply append to the parts
 
    /// vector.
 
    fn write_concrete_type(&self, concrete_type: &mut ConcreteType) {
 
        use InferenceTypePart as ITP;
 
        use ConcreteTypePart as CTP;
 

	
 
        // Make sure inference type is specified but concrete type is not yet specified
 
        debug_assert!(!self.parts.is_empty());
 
        concrete_type.parts.reserve(self.parts.len());
 

	
 
        let mut idx = 0;
 
        while idx < self.parts.len() {
 
            let part = &self.parts[idx];
 
            let converted_part = match part {
 
                ITP::Marker(_) => {
 
                    // Markers are removed when writing to the concrete type.
 
                    idx += 1;
 
                    continue;
 
                },
 
                ITP::Unknown | ITP::NumberLike |
 
                ITP::IntegerLike | ITP::ArrayLike | ITP::PortLike => {
 
                    // Should not happen if type inferencing works correctly: we
 
                    // should have returned a programmer-readable error or have
 
                    // inferred all types.
 
                    unreachable!("attempted to convert inference type part {:?} into concrete type", part);
 
                },
 
                ITP::Void => CTP::Void,
 
                ITP::Message => CTP::Message,
 
                ITP::Bool => CTP::Bool,
 
                ITP::UInt8 => CTP::UInt8,
 
                ITP::UInt16 => CTP::UInt16,
 
                ITP::UInt32 => CTP::UInt32,
 
                ITP::UInt64 => CTP::UInt64,
 
                ITP::SInt8 => CTP::SInt8,
 
                ITP::SInt16 => CTP::SInt16,
 
                ITP::SInt32 => CTP::SInt32,
 
                ITP::SInt64 => CTP::SInt64,
 
                ITP::Character => CTP::Character,
 
                ITP::String => {
 
                    // Inferred type has a 'char' subtype to simplify array
 
                    // checking, we remove it here.
 
                    debug_assert_eq!(self.parts[idx + 1], InferenceTypePart::Character);
 
                    idx += 1;
 
                    CTP::String
 
                },
 
                ITP::Array => CTP::Array,
 
                ITP::Slice => CTP::Slice,
 
                ITP::Input => CTP::Input,
 
                ITP::Output => CTP::Output,
 
                ITP::Tuple(num) => CTP::Tuple(*num),
 
                ITP::Instance(id, num) => CTP::Instance(*id, *num),
 
            };
 

	
 
            concrete_type.parts.push(converted_part);
 
            idx += 1;
 
        }
 
    }
 

	
 
    /// Writes a human-readable version of the type to a string. This is used
 
    /// to display error messages
 
    fn write_display_name(
 
        buffer: &mut String, heap: &Heap, parts: &[InferenceTypePart], mut idx: usize
 
    ) -> usize {
 
        use InferenceTypePart as ITP;
 

	
 
        match &parts[idx] {
 
            ITP::Marker(_marker_idx) => {
 
                if debug_log_enabled!() {
 
                    buffer.push_str(&format!("{{Marker:{}}}", *_marker_idx));
 
                }
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
            },
 
            ITP::Unknown => buffer.push_str("?"),
 
            ITP::NumberLike => buffer.push_str("numberlike"),
 
            ITP::IntegerLike => buffer.push_str("integerlike"),
 
            ITP::ArrayLike => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[?]");
 
            },
 
            ITP::PortLike => {
 
                buffer.push_str("portlike<");
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            }
 
            ITP::Void => buffer.push_str("void"),
 
            ITP::Bool => buffer.push_str(KW_TYPE_BOOL_STR),
 
            ITP::UInt8 => buffer.push_str(KW_TYPE_UINT8_STR),
 
            ITP::UInt16 => buffer.push_str(KW_TYPE_UINT16_STR),
 
            ITP::UInt32 => buffer.push_str(KW_TYPE_UINT32_STR),
 
            ITP::UInt64 => buffer.push_str(KW_TYPE_UINT64_STR),
 
            ITP::SInt8 => buffer.push_str(KW_TYPE_SINT8_STR),
 
            ITP::SInt16 => buffer.push_str(KW_TYPE_SINT16_STR),
 
            ITP::SInt32 => buffer.push_str(KW_TYPE_SINT32_STR),
 
            ITP::SInt64 => buffer.push_str(KW_TYPE_SINT64_STR),
 
            ITP::Character => buffer.push_str(KW_TYPE_CHAR_STR),
 
            ITP::String => {
 
                buffer.push_str(KW_TYPE_STRING_STR);
 
                idx += 1; // skip the 'char' subtype
 
            },
 
            ITP::Message => {
 
                buffer.push_str(KW_TYPE_MESSAGE_STR);
 
                buffer.push('<');
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Array => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[]");
 
            },
 
            ITP::Slice => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[..]");
 
            },
 
            ITP::Input => {
 
                buffer.push_str(KW_TYPE_IN_PORT_STR);
 
                buffer.push('<');
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Output => {
 
                buffer.push_str(KW_TYPE_OUT_PORT_STR);
 
                buffer.push('<');
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Tuple(num_sub) => {
 
                buffer.push('(');
 
                if *num_sub > 0 {
 
                    idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    for _sub_idx in 1..*num_sub {
 
                        buffer.push_str(", ");
 
                        idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    }
 
                }
 
                buffer.push(')');
 
            }
 
            ITP::Instance(definition_id, num_sub) => {
 
                let definition = &heap[*definition_id];
 
                buffer.push_str(definition.identifier().value.as_str());
 
                if *num_sub > 0 {
 
                    buffer.push('<');
 
                    idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    for _sub_idx in 1..*num_sub {
 
                        buffer.push_str(", ");
 
                        idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    }
 
                    buffer.push('>');
 
                }
 
            },
 
        }
 

	
 
        idx
 
    }
 

	
 
    /// Returns the display name of a (part of) the type tree. Will allocate a
 
    /// string.
 
    fn partial_display_name(heap: &Heap, parts: &[InferenceTypePart]) -> String {
 
        let mut buffer = String::with_capacity(parts.len() * 6);
 
        Self::write_display_name(&mut buffer, heap, parts, 0);
 
        buffer
 
    }
 

	
 
    /// Returns the display name of the full type tree. Will allocate a string.
 
    fn display_name(&self, heap: &Heap) -> String {
 
        Self::partial_display_name(heap, &self.parts)
 
    }
 
}
 

	
 
impl Default for InferenceType {
 
    fn default() -> Self {
 
        Self{
 
            has_marker: false,
 
            is_done: false,
 
            parts: Vec::new(),
 
        }
 
    }
 
}
 

	
 
/// Iterator over the subtrees that follow a marker in an `InferenceType`
 
/// instance. Returns immutable slices over the internal parts
 
struct InferenceTypeMarkerIter<'a> {
 
    parts: &'a [InferenceTypePart],
 
    idx: usize,
 
}
 

	
 
impl<'a> InferenceTypeMarkerIter<'a> {
 
    fn new(parts: &'a [InferenceTypePart]) -> Self {
 
        Self{ parts, idx: 0 }
 
    }
 
}
 

	
 
impl<'a> Iterator for InferenceTypeMarkerIter<'a> {
 
    type Item = (u32, &'a [InferenceTypePart]);
 

	
 
    fn next(&mut self) -> Option<Self::Item> {
 
        // Iterate until we find a marker
 
        while self.idx < self.parts.len() {
 
            if let InferenceTypePart::Marker(marker) = self.parts[self.idx] {
 
                // Found a marker, find the subtree end
 
                let start_idx = self.idx + 1;
 
                let end_idx = InferenceType::find_subtree_end_idx(self.parts, start_idx);
 

	
 
                // Modify internal index, then return items
 
                self.idx = end_idx;
 
                return Some((marker, &self.parts[start_idx..end_idx]));
 
            }
 

	
 
            self.idx += 1;
 
        }
 

	
 
        None
 
    }
 
}
 

	
 
#[derive(Debug, PartialEq, Eq)]
 
enum DualInferenceResult {
 
    Neither,        // neither argument is clarified
 
    First,          // first argument is clarified using the second one
 
    Second,         // second argument is clarified using the first one
 
    Both,           // both arguments are clarified
 
    Incompatible,   // types are incompatible: programmer error
 
}
 

	
 
impl DualInferenceResult {
 
    fn modified_lhs(&self) -> bool {
 
        match self {
 
            DualInferenceResult::First | DualInferenceResult::Both => true,
 
            _ => false
 
        }
 
    }
 
    fn modified_rhs(&self) -> bool {
 
        match self {
 
            DualInferenceResult::Second | DualInferenceResult::Both => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, PartialEq, Eq)]
 
enum SingleInferenceResult {
 
    Unmodified,
 
    Modified,
 
    Incompatible
 
}
 

	
 
// -----------------------------------------------------------------------------
 
// PassTyping - Public Interface
 
// -----------------------------------------------------------------------------
 

	
 
type InferNodeIndex = usize;
 
type PolyDataIndex = isize;
 
type VarDataIndex = usize;
 

	
 
pub(crate) struct ResolveQueueElement {
 
    // Note that using the `definition_id` and the `monomorph_idx` one may
 
    // query the type table for the full procedure type, thereby retrieving
 
    // the polymorphic arguments to the procedure.
 
    pub(crate) root_id: RootId,
 
    pub(crate) definition_id: DefinitionId,
 
    pub(crate) reserved_type_id: TypeId,
 
    pub(crate) reserved_monomorph_index: u32,
 
}
 

	
 
pub(crate) type ResolveQueue = VecDeque<ResolveQueueElement>;
 

	
 
struct InferenceNode {
 
    // filled in during type inference
 
    expr_type: InferenceType,               // result type from expression
 
    expr_id: ExpressionId,                  // expression that is evaluated
 
    inference_rule: InferenceRule,          // rule used to infer node type
 
    parent_index: Option<InferNodeIndex>,   // parent of inference node
 
    field_index: i32,                       // index of struct field or tuple member
 
    poly_data_index: PolyDataIndex,         // index to inference data for polymorphic types
 
    // filled in once type inference is done
 
    info_type_id: TypeId,
 
    info_variant: ExpressionInfoVariant,
 
}
 

	
 
impl InferenceNode {
 
    #[inline]
 
    fn as_expression_info(&self) -> ExpressionInfo {
 
        return ExpressionInfo {
 
            type_id: self.info_type_id,
 
            variant: self.info_variant
 
        }
 
    }
 
}
 

	
 
/// Inferencing rule to apply. Some of these are reasonably generic. Other ones
 
/// require so much custom logic that we'll not try to come up with an
 
/// abstraction.
 
enum InferenceRule {
 
    Noop,
 
    MonoTemplate(InferenceRuleTemplate),
 
    BiEqual(InferenceRuleBiEqual),
 
    TriEqualArgs(InferenceRuleTriEqualArgs),
 
    TriEqualAll(InferenceRuleTriEqualAll),
 
    Concatenate(InferenceRuleTwoArgs),
 
    IndexingExpr(InferenceRuleIndexingExpr),
 
    SlicingExpr(InferenceRuleSlicingExpr),
 
    SelectStructField(InferenceRuleSelectStructField),
 
    SelectTupleMember(InferenceRuleSelectTupleMember),
 
    LiteralStruct(InferenceRuleLiteralStruct),
 
    LiteralEnum,
 
    LiteralUnion(InferenceRuleLiteralUnion),
 
    LiteralArray(InferenceRuleLiteralArray),
 
    LiteralTuple(InferenceRuleLiteralTuple),
 
    CastExpr(InferenceRuleCastExpr),
 
    CallExpr(InferenceRuleCallExpr),
 
    VariableExpr(InferenceRuleVariableExpr),
 
}
 

	
 
impl InferenceRule {
 
    union_cast_to_ref_method_impl!(as_mono_template, InferenceRuleTemplate, InferenceRule::MonoTemplate);
 
    union_cast_to_ref_method_impl!(as_bi_equal, InferenceRuleBiEqual, InferenceRule::BiEqual);
 
    union_cast_to_ref_method_impl!(as_tri_equal_args, InferenceRuleTriEqualArgs, InferenceRule::TriEqualArgs);
 
    union_cast_to_ref_method_impl!(as_tri_equal_all, InferenceRuleTriEqualAll, InferenceRule::TriEqualAll);
 
    union_cast_to_ref_method_impl!(as_concatenate, InferenceRuleTwoArgs, InferenceRule::Concatenate);
 
    union_cast_to_ref_method_impl!(as_indexing_expr, InferenceRuleIndexingExpr, InferenceRule::IndexingExpr);
 
    union_cast_to_ref_method_impl!(as_slicing_expr, InferenceRuleSlicingExpr, InferenceRule::SlicingExpr);
 
    union_cast_to_ref_method_impl!(as_select_struct_field, InferenceRuleSelectStructField, InferenceRule::SelectStructField);
 
    union_cast_to_ref_method_impl!(as_select_tuple_member, InferenceRuleSelectTupleMember, InferenceRule::SelectTupleMember);
 
    union_cast_to_ref_method_impl!(as_literal_struct, InferenceRuleLiteralStruct, InferenceRule::LiteralStruct);
 
    union_cast_to_ref_method_impl!(as_literal_union, InferenceRuleLiteralUnion, InferenceRule::LiteralUnion);
 
    union_cast_to_ref_method_impl!(as_literal_array, InferenceRuleLiteralArray, InferenceRule::LiteralArray);
 
    union_cast_to_ref_method_impl!(as_literal_tuple, InferenceRuleLiteralTuple, InferenceRule::LiteralTuple);
 
    union_cast_to_ref_method_impl!(as_cast_expr, InferenceRuleCastExpr, InferenceRule::CastExpr);
 
    union_cast_to_ref_method_impl!(as_call_expr, InferenceRuleCallExpr, InferenceRule::CallExpr);
 
    union_cast_to_ref_method_impl!(as_variable_expr, InferenceRuleVariableExpr, InferenceRule::VariableExpr);
 
}
 

	
 
// Note: InferenceRuleTemplate is `Copy`, so don't add dynamically allocated
 
// members in the future (or review places where this struct is copied)
 
#[derive(Clone, Copy)]
 
struct InferenceRuleTemplate {
 
    template: &'static [InferenceTypePart],
 
    application: InferenceRuleTemplateApplication,
 
}
 

	
 
impl InferenceRuleTemplate {
 
    fn new_none() -> Self {
 
        return Self{
 
            template: &[],
 
            application: InferenceRuleTemplateApplication::None,
 
        };
 
    }
 

	
 
    fn new_forced(template: &'static [InferenceTypePart]) -> Self {
 
        return Self{
 
            template,
 
            application: InferenceRuleTemplateApplication::Forced,
 
        };
 
    }
 

	
 
    fn new_template(template: &'static [InferenceTypePart]) -> Self {
 
        return Self{
 
            template,
 
            application: InferenceRuleTemplateApplication::Template,
 
        }
 
    }
 
}
 

	
 
#[derive(Clone, Copy)]
 
enum InferenceRuleTemplateApplication {
 
    None, // do not apply template, silly, but saves some bytes
 
    Forced,
 
    Template,
 
}
 

	
 
/// Type equality applied to 'self' and the argument. An optional template will
 
/// be applied to 'self' first. Example: "bitwise not"
 
struct InferenceRuleBiEqual {
 
    template: InferenceRuleTemplate,
 
    argument_index: InferNodeIndex,
 
}
 

	
 
/// Type equality applied to two arguments. Template can be applied to 'self'
 
/// (generally forced, since this rule does not apply a type equality constraint
 
/// to 'self') and the two arguments. Example: "equality operator"
 
struct InferenceRuleTriEqualArgs {
 
    argument_template: InferenceRuleTemplate,
 
    result_template: InferenceRuleTemplate,
 
    argument1_index: InferNodeIndex,
 
    argument2_index: InferNodeIndex,
 
}
 

	
 
/// Type equality applied to 'self' and two arguments. Template may be
 
/// optionally applied to 'self'. Example: "addition operator"
 
struct InferenceRuleTriEqualAll {
 
    template: InferenceRuleTemplate,
 
    argument1_index: InferNodeIndex,
 
    argument2_index: InferNodeIndex,
 
}
 

	
 
/// Information for an inference rule that is applied to 'self' and two
 
/// arguments, see `InferenceRule` for its meaning.
 
struct InferenceRuleTwoArgs {
 
    argument1_index: InferNodeIndex,
 
    argument2_index: InferNodeIndex,
 
}
 

	
 
struct InferenceRuleIndexingExpr {
 
    subject_index: InferNodeIndex,
 
    index_index: InferNodeIndex,
 
}
 

	
 
struct InferenceRuleSlicingExpr {
 
    subject_index: InferNodeIndex,
 
    from_index: InferNodeIndex,
 
    to_index: InferNodeIndex,
 
}
 

	
 
struct InferenceRuleSelectStructField {
 
    subject_index: InferNodeIndex,
 
    selected_field: Identifier,
 
}
 

	
 
struct InferenceRuleSelectTupleMember {
 
    subject_index: InferNodeIndex,
 
    selected_index: u64,
 
}
 

	
 
struct InferenceRuleLiteralStruct {
 
    element_indices: Vec<InferNodeIndex>,
 
}
 

	
 
struct InferenceRuleLiteralUnion {
 
    element_indices: Vec<InferNodeIndex>
 
}
 

	
 
struct InferenceRuleLiteralArray {
 
    element_indices: Vec<InferNodeIndex>
 
}
 

	
 
struct InferenceRuleLiteralTuple {
 
    element_indices: Vec<InferNodeIndex>
 
}
 

	
 
struct InferenceRuleCastExpr {
 
    subject_index: InferNodeIndex,
 
}
 

	
 
struct InferenceRuleCallExpr {
 
    argument_indices: Vec<InferNodeIndex>
 
}
 

	
 
/// Data associated with a variable expression: an expression that reads the
 
/// value from a variable.
 
struct InferenceRuleVariableExpr {
 
    var_data_index: VarDataIndex, // shared variable information
 
}
 

	
 
/// This particular visitor will recurse depth-first into the AST and ensures
 
/// that all expressions have the appropriate types.
 
pub(crate) struct PassTyping {
 
    // Current definition we're typechecking.
 
    reserved_type_id: TypeId,
 
    reserved_monomorph_index: u32,
 
    procedure_id: ProcedureDefinitionId,
 
    procedure_kind: ProcedureKind,
 
    poly_vars: Vec<ConcreteType>,
 
    // Temporary variables during construction of inference rulesr
 
    parent_index: Option<InferNodeIndex>,
 
    // Buffers for iteration over various types
 
    var_buffer: ScopedBuffer<VariableId>,
 
    expr_buffer: ScopedBuffer<ExpressionId>,
 
    stmt_buffer: ScopedBuffer<StatementId>,
 
    bool_buffer: ScopedBuffer<bool>,
 
    index_buffer: ScopedBuffer<usize>,
 
    definition_buffer: ScopedBuffer<DefinitionId>,
 
    poly_progress_buffer: ScopedBuffer<u32>,
 
    // Mapping from parser type to inferred type. We attempt to continue to
 
    // specify these types until we're stuck or we've fully determined the type.
 
    infer_nodes: Vec<InferenceNode>,                     // will be transferred to type table at end
 
    poly_data: Vec<PolyData>,       // data for polymorph inference
 
    var_data: Vec<VarData>,
 
    // Keeping track of which expressions need to be reinferred because the
 
    // expressions they're linked to made progression on an associated type
 
    node_queued: DequeSet<InferNodeIndex>,
 
}
 

	
 
/// Generic struct that is used to store inferred types associated with
 
/// polymorphic types.
 
struct PolyData {
 
    first_rule_application: bool,
 
    definition_id: DefinitionId, // the definition, only used for user feedback
 
    /// Inferred types of the polymorphic variables as they are written down
 
    /// at the type's definition.
 
    poly_vars: Vec<InferenceType>,
 
    expr_types: PolyDataTypes,
 
}
 

	
 
// silly structure, just so we can use `PolyDataTypeIndex` ergonomically while
 
// making sure we're still capable of borrowing from `poly_vars`.
 
struct PolyDataTypes {
 
    /// Inferred types of associated types (e.g. struct fields, tuple members,
 
    /// function arguments). These types may depend on the polymorphic variables
 
    /// defined above.
 
    associated: Vec<InferenceType>,
 
    /// Inferred "returned" type (e.g. if a struct field is selected, then this
 
    /// contains the type of the selected field, for a function call it contains
 
    /// the return type). May depend on the polymorphic variables defined above.
 
    returned: InferenceType,
 
}
 

	
 
#[derive(Clone, Copy)]
 
enum PolyDataTypeIndex {
 
    Associated(usize), // indexes into `PolyData.associated`
 
    Returned,
 
}
 

	
 
impl PolyDataTypes {
 
    fn get_type(&self, index: PolyDataTypeIndex) -> &InferenceType {
 
        match index {
 
            PolyDataTypeIndex::Associated(index) => return &self.associated[index],
 
            PolyDataTypeIndex::Returned => return &self.returned,
 
        }
 
    }
 

	
 
    fn get_type_mut(&mut self, index: PolyDataTypeIndex) -> &mut InferenceType {
 
        match index {
 
            PolyDataTypeIndex::Associated(index) => return &mut self.associated[index],
 
            PolyDataTypeIndex::Returned => return &mut self.returned,
 
        }
 
    }
 
}
 

	
 
struct VarData {
 
    var_id: VariableId,
 
    var_type: InferenceType,
 
    used_at: Vec<InferNodeIndex>, // of variable expressions
 
    linked_var: Option<VarDataIndex>,
 
}
 

	
 
impl PassTyping {
 
    pub(crate) fn new() -> Self {
 
        PassTyping {
 
            reserved_type_id: TypeId::new_invalid(),
 
            reserved_monomorph_index: u32::MAX,
 
            procedure_id: ProcedureDefinitionId::new_invalid(),
 
            procedure_kind: ProcedureKind::Function,
 
            poly_vars: Vec::new(),
 
            parent_index: None,
 
            var_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_LARGE),
 
            expr_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_LARGE),
 
            stmt_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_LARGE),
 
            bool_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_SMALL),
 
            index_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_SMALL),
 
            definition_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_LARGE),
 
            poly_progress_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_SMALL),
 
            infer_nodes: Vec::with_capacity(BUFFER_INIT_CAP_LARGE),
 
            poly_data: Vec::with_capacity(BUFFER_INIT_CAP_SMALL),
 
            var_data: Vec::with_capacity(BUFFER_INIT_CAP_SMALL),
 
            node_queued: DequeSet::new(),
 
        }
 
    }
 

	
 
    pub(crate) fn queue_module_definitions(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) {
 
        debug_assert_eq!(ctx.module().phase, ModuleCompilationPhase::ValidatedAndLinked);
 
        let root_id = ctx.module().root_id;
 
        let root = &ctx.heap.protocol_descriptions[root_id];
 
        let definitions_section = self.definition_buffer.start_section_initialized(&root.definitions);
 

	
 
        for definition_id in definitions_section.iter_copied() {
 
            let definition = &ctx.heap[definition_id];
 

	
 
            let first_concrete_part_and_procedure_id = match definition {
 
                Definition::Procedure(definition) => {
 
                    if definition.poly_vars.is_empty() {
 
                        if definition.kind == ProcedureKind::Function {
 
                            Some((ConcreteTypePart::Function(definition.this, 0), definition.this))
 
                        } else {
 
                            Some((ConcreteTypePart::Component(definition.this, 0), definition.this))
 
                        }
 
                    } else {
 
                        None
 
                    }
 
                }
 
                Definition::Enum(_) | Definition::Struct(_) | Definition::Union(_) => None,
 
            };
 

	
 
            if let Some((first_concrete_part, procedure_id)) = first_concrete_part_and_procedure_id {
 
                let procedure = &mut ctx.heap[procedure_id];
 
                let monomorph_index = procedure.monomorphs.len() as u32;
 
                procedure.monomorphs.push(ProcedureDefinitionMonomorph::new_invalid());
 

	
 
                let concrete_type = ConcreteType{ parts: vec![first_concrete_part] };
 
                let type_id = ctx.types.reserve_procedure_monomorph_type_id(&definition_id, concrete_type, monomorph_index);
 
                queue.push_back(ResolveQueueElement{
 
                    root_id,
 
                    definition_id,
 
                    reserved_type_id: type_id,
 
                    reserved_monomorph_index: monomorph_index,
 
                })
 
            }
 
        }
 

	
 
        definitions_section.forget();
 
    }
 

	
 
    pub(crate) fn handle_module_definition(
 
        &mut self, ctx: &mut Ctx, queue: &mut ResolveQueue, element: ResolveQueueElement
 
    ) -> VisitorResult {
 
        self.reset();
 
        debug_assert_eq!(ctx.module().root_id, element.root_id);
 
        debug_assert!(self.poly_vars.is_empty());
 

	
 
        // Prepare for visiting the definition
 
        self.reserved_type_id = element.reserved_type_id;
 
        self.reserved_monomorph_index = element.reserved_monomorph_index;
 

	
 
        let proc_base = ctx.types.get_base_definition(&element.definition_id).unwrap();
 
        if proc_base.is_polymorph {
 
            let monomorph = ctx.types.get_monomorph(element.reserved_type_id);
 
            for poly_arg in monomorph.concrete_type.embedded_iter(0) {
 
                self.poly_vars.push(ConcreteType{ parts: Vec::from(poly_arg) });
 
            }
 
        }
 

	
 
        // Visit the definition, setting up the type resolving process, then
 
        // (attempt to) resolve all types
 
        self.visit_definition(ctx, element.definition_id)?;
 
        self.resolve_types(ctx, queue)?;
 
        Ok(())
 
    }
 

	
 
    fn reset(&mut self) {
 
        self.reserved_type_id = TypeId::new_invalid();
 
        self.procedure_id = ProcedureDefinitionId::new_invalid();
 
        self.procedure_kind = ProcedureKind::Function;
 
        self.poly_vars.clear();
 
        self.parent_index = None;
 

	
 
        self.infer_nodes.clear();
 
        self.poly_data.clear();
 
        self.var_data.clear();
 
        self.node_queued.clear();
 
    }
 
}
 

	
 
// -----------------------------------------------------------------------------
 
// PassTyping - Visitor-like implementation
 
// -----------------------------------------------------------------------------
 

	
 
type VisitorResult = Result<(), ParseError>;
 
type VisitExprResult = Result<InferNodeIndex, ParseError>;
 

	
 
impl PassTyping {
 
    // Definitions
 

	
 
    fn visit_definition(&mut self, ctx: &mut Ctx, id: DefinitionId) -> VisitorResult {
 
        return visitor_recursive_definition_impl!(self, &ctx.heap[id], ctx);
 
    }
 

	
 
    fn visit_enum_definition(&mut self, _: &mut Ctx, _: EnumDefinitionId) -> VisitorResult { return Ok(()) }
 
    fn visit_struct_definition(&mut self, _: &mut Ctx, _: StructDefinitionId) -> VisitorResult { return Ok(()) }
 
    fn visit_union_definition(&mut self, _: &mut Ctx, _: UnionDefinitionId) -> VisitorResult { return Ok(()) }
 

	
 
    fn visit_procedure_definition(&mut self, ctx: &mut Ctx, id: ProcedureDefinitionId) -> VisitorResult {
 
        let procedure_def = &ctx.heap[id];
 

	
 
        self.procedure_id = id;
 
        self.procedure_kind = procedure_def.kind;
 
        let body_id = procedure_def.body;
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting procedure: '{}' (id: {}, kind: {:?})", procedure_def.identifier.value.as_str(), id.0.index, procedure_def.kind);
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        // Visit parameters
 
        let section = self.var_buffer.start_section_initialized(procedure_def.parameters.as_slice());
 
        for param_id in section.iter_copied() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type_elements(&param.parser_type.elements, true);
 
            debug_assert!(var_type.is_done, "expected function arguments to be concrete types");
 
            self.var_data.push(VarData{
 
                var_id: param_id,
 
                var_type,
 
                used_at: Vec::new(),
 
                linked_var: None
 
            })
 
        }
 
        section.forget();
 

	
 
        // Visit all of the expressions within the body
 
        self.parent_index = None;
 
        return self.visit_block_stmt(ctx, body_id);
 
    }
 

	
 
    // Statements
 

	
 
    fn visit_stmt(&mut self, ctx: &mut Ctx, id: StatementId) -> VisitorResult {
 
        return visitor_recursive_statement_impl!(self, &ctx.heap[id], ctx, Ok(()));
 
    }
 

	
 
    fn visit_block_stmt(&mut self, ctx: &mut Ctx, id: BlockStatementId) -> VisitorResult {
 
        // Transfer statements for traversal
 
        let block = &ctx.heap[id];
 

	
 
        let section = self.stmt_buffer.start_section_initialized(block.statements.as_slice());
 
        for stmt_id in section.iter_copied() {
 
            self.visit_stmt(ctx, stmt_id)?;
 
        }
 
        section.forget();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_stmt(&mut self, ctx: &mut Ctx, id: LocalStatementId) -> VisitorResult {
 
        return visitor_recursive_local_impl!(self, &ctx.heap[id], ctx);
 
    }
 

	
 
    fn visit_local_memory_stmt(&mut self, ctx: &mut Ctx, id: MemoryStatementId) -> VisitorResult {
 
        let memory_stmt = &ctx.heap[id];
 
        let initial_expr_id = memory_stmt.initial_expr;
 

	
 
        let local = &ctx.heap[memory_stmt.variable];
 
        let var_type = self.determine_inference_type_from_parser_type_elements(&local.parser_type.elements, true);
 
        self.var_data.push(VarData{
 
            var_id: memory_stmt.variable,
 
            var_type,
 
            used_at: Vec::new(),
 
            linked_var: None,
 
        });
 

	
 
        // Process the initial value
 
        self.visit_assignment_expr(ctx, initial_expr_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_channel_stmt(&mut self, ctx: &mut Ctx, id: ChannelStatementId) -> VisitorResult {
 
        let channel_stmt = &ctx.heap[id];
 

	
 
        let from_var_index = self.var_data.len() as VarDataIndex;
 
        let to_var_index = from_var_index + 1;
 

	
 
        let from_local = &ctx.heap[channel_stmt.from];
 
        let from_var_type = self.determine_inference_type_from_parser_type_elements(&from_local.parser_type.elements, true);
 
        self.var_data.push(VarData{
 
            var_id: channel_stmt.from,
 
            var_type: from_var_type,
 
            used_at: Vec::new(),
 
            linked_var: Some(to_var_index),
 
        });
 

	
 
        let to_local = &ctx.heap[channel_stmt.to];
 
        let to_var_type = self.determine_inference_type_from_parser_type_elements(&to_local.parser_type.elements, true);
 
        self.var_data.push(VarData{
 
            var_id: channel_stmt.to,
 
            var_type: to_var_type,
 
            used_at: Vec::new(),
 
            linked_var: Some(from_var_index),
 
        });
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_labeled_stmt(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> VisitorResult {
 
        let labeled_stmt = &ctx.heap[id];
 
        let substmt_id = labeled_stmt.body;
 
        self.visit_stmt(ctx, substmt_id)
 
    }
 

	
 
    fn visit_if_stmt(&mut self, ctx: &mut Ctx, id: IfStatementId) -> VisitorResult {
 
        let if_stmt = &ctx.heap[id];
 

	
 
        let true_body_case = if_stmt.true_case;
 
        let false_body_case = if_stmt.false_case;
 
        let test_expr_id = if_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_stmt(ctx, true_body_case.body)?;
 
        if let Some(false_body_case) = false_body_case {
 
            self.visit_stmt(ctx, false_body_case.body)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_while_stmt(&mut self, ctx: &mut Ctx, id: WhileStatementId) -> VisitorResult {
 
        let while_stmt = &ctx.heap[id];
 

	
 
        let body_id = while_stmt.body;
 
        let test_expr_id = while_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_stmt(ctx, body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_break_stmt(&mut self, _: &mut Ctx, _: BreakStatementId) -> VisitorResult { return Ok(()) }
 
    fn visit_continue_stmt(&mut self, _: &mut Ctx, _: ContinueStatementId) -> VisitorResult { return Ok(()) }
 

	
 
    fn visit_synchronous_stmt(&mut self, ctx: &mut Ctx, id: SynchronousStatementId) -> VisitorResult {
 
        let sync_stmt = &ctx.heap[id];
 
        let body_id = sync_stmt.body;
 

	
 
        self.visit_stmt(ctx, body_id)
 
    }
 

	
 
    fn visit_fork_stmt(&mut self, ctx: &mut Ctx, id: ForkStatementId) -> VisitorResult {
 
        let fork_stmt = &ctx.heap[id];
 
        let left_body_id = fork_stmt.left_body;
 
        let right_body_id = fork_stmt.right_body;
 

	
 
        self.visit_stmt(ctx, left_body_id)?;
 
        if let Some(right_body_id) = right_body_id {
 
            self.visit_stmt(ctx, right_body_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_select_stmt(&mut self, ctx: &mut Ctx, id: SelectStatementId) -> VisitorResult {
 
        let select_stmt = &ctx.heap[id];
 

	
 
        let mut section = self.stmt_buffer.start_section();
 
        let num_cases = select_stmt.cases.len();
 

	
 
        for case in &select_stmt.cases {
 
            section.push(case.guard);
 
            section.push(case.body);
 
        }
 

	
 
        for case_index in 0..num_cases {
 
            let base_index = 2 * case_index;
 
            let guard_stmt_id = section[base_index    ];
 
            let block_stmt_id = section[base_index + 1];
 

	
 
            self.visit_stmt(ctx, guard_stmt_id)?;
 
            self.visit_stmt(ctx, block_stmt_id)?;
 
        }
 
        section.forget();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_return_stmt(&mut self, ctx: &mut Ctx, id: ReturnStatementId) -> VisitorResult {
 
        let return_stmt = &ctx.heap[id];
 
        debug_assert_eq!(return_stmt.expressions.len(), 1);
 
        let expr_id = return_stmt.expressions[0];
 

	
 
        self.visit_expr(ctx, expr_id)?;
 
        return Ok(());
 
    }
 

	
 
    fn visit_goto_stmt(&mut self, _: &mut Ctx, _: GotoStatementId) -> VisitorResult { return Ok(()) }
 

	
 
    fn visit_new_stmt(&mut self, ctx: &mut Ctx, id: NewStatementId) -> VisitorResult {
 
        let new_stmt = &ctx.heap[id];
 
        let call_expr_id = new_stmt.expression;
 

	
 
        self.visit_call_expr(ctx, call_expr_id)?;
 
        return Ok(());
 
    }
 

	
 
    fn visit_expr_stmt(&mut self, ctx: &mut Ctx, id: ExpressionStatementId) -> VisitorResult {
 
        let expr_stmt = &ctx.heap[id];
 
        let subexpr_id = expr_stmt.expression;
 

	
 
        self.visit_expr(ctx, subexpr_id)?;
 
        return Ok(());
 
    }
 

	
 
    // Expressions
 

	
 
    fn visit_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> VisitExprResult {
 
        return visitor_recursive_expression_impl!(self, &ctx.heap[id], ctx);
 
    }
 

	
 
    fn visit_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> VisitExprResult {
 
        use AssignmentOperator as AO;
 

	
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let assign_expr = &ctx.heap[id];
 
        let assign_op = assign_expr.operation;
 
        let left_expr_id = assign_expr.left;
 
        let right_expr_id = assign_expr.right;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let left_index = self.visit_expr(ctx, left_expr_id)?;
 
        let right_index = self.visit_expr(ctx, right_expr_id)?;
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        let argument_template = match assign_op {
 
            AO::Set =>
 
                InferenceRuleTemplate::new_none(),
 
            AO::Concatenated =>
 
                InferenceRuleTemplate::new_template(&ARRAYLIKE_TEMPLATE),
 
            AO::Multiplied | AO::Divided | AO::Added | AO::Subtracted =>
 
                InferenceRuleTemplate::new_template(&NUMBERLIKE_TEMPLATE),
 
            AO::Remained | AO::ShiftedLeft | AO::ShiftedRight |
 
            AO::BitwiseAnded | AO::BitwiseXored | AO::BitwiseOred =>
 
                InferenceRuleTemplate::new_template(&INTEGERLIKE_TEMPLATE),
 
        };
 

	
 
        node.inference_rule = InferenceRule::TriEqualArgs(InferenceRuleTriEqualArgs{
 
            argument_template,
 
            result_template: InferenceRuleTemplate::new_forced(&VOID_TEMPLATE),
 
            argument1_index: left_index,
 
            argument2_index: right_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_inference_rule_tri_equal_args(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_binding_expr(&mut self, ctx: &mut Ctx, id: BindingExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let binding_expr = &ctx.heap[id];
 
        let bound_to_id = binding_expr.bound_to;
 
        let bound_from_id = binding_expr.bound_from;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let arg_to_index = self.visit_expr(ctx, bound_to_id)?;
 
        let arg_from_index = self.visit_expr(ctx, bound_from_id)?;
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::TriEqualArgs(InferenceRuleTriEqualArgs{
 
            argument_template: InferenceRuleTemplate::new_none(),
 
            result_template: InferenceRuleTemplate::new_forced(&BOOL_TEMPLATE),
 
            argument1_index: arg_to_index,
 
            argument2_index: arg_from_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_inference_rule_tri_equal_args(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let conditional_expr = &ctx.heap[id];
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        self.visit_expr(ctx, test_expr_id)?;
 
        let true_index = self.visit_expr(ctx, true_expr_id)?;
 
        let false_index = self.visit_expr(ctx, false_expr_id)?;
 

	
 
        // Note: the test to the conditional expression has already been forced
 
        // to the boolean type. So the only thing we need to do while progressing
 
        // is to apply an equal3 constraint to the arguments and the result of
 
        // the expression.
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::TriEqualAll(InferenceRuleTriEqualAll{
 
            template: InferenceRuleTemplate::new_none(),
 
            argument1_index: true_index,
 
            argument2_index: false_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_inference_rule_tri_equal_all(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> VisitExprResult {
 
        use BinaryOperator as BO;
 

	
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let binary_expr = &ctx.heap[id];
 
        let binary_op = binary_expr.operation;
 
        let lhs_expr_id = binary_expr.left;
 
        let rhs_expr_id = binary_expr.right;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let left_index = self.visit_expr(ctx, lhs_expr_id)?;
 
        let right_index = self.visit_expr(ctx, rhs_expr_id)?;
 

	
 
        let inference_rule = match binary_op {
 
            BO::Concatenate =>
 
                InferenceRule::Concatenate(InferenceRuleTwoArgs{
 
                    argument1_index: left_index,
 
                    argument2_index: right_index,
 
                }),
 
            BO::LogicalAnd | BO::LogicalOr =>
 
                InferenceRule::TriEqualAll(InferenceRuleTriEqualAll{
 
                    template: InferenceRuleTemplate::new_forced(&BOOL_TEMPLATE),
 
                    argument1_index: left_index,
 
                    argument2_index: right_index,
 
                }),
 
            BO::BitwiseOr | BO::BitwiseXor | BO::BitwiseAnd | BO::Remainder | BO::ShiftLeft | BO::ShiftRight =>
 
                InferenceRule::TriEqualAll(InferenceRuleTriEqualAll{
 
                    template: InferenceRuleTemplate::new_template(&INTEGERLIKE_TEMPLATE),
 
                    argument1_index: left_index,
 
                    argument2_index: right_index,
 
                }),
 
            BO::Equality | BO::Inequality =>
 
                InferenceRule::TriEqualArgs(InferenceRuleTriEqualArgs{
 
                    argument_template: InferenceRuleTemplate::new_none(),
 
                    result_template: InferenceRuleTemplate::new_forced(&BOOL_TEMPLATE),
 
                    argument1_index: left_index,
 
                    argument2_index: right_index,
 
                }),
 
            BO::LessThan | BO::GreaterThan | BO::LessThanEqual | BO::GreaterThanEqual =>
 
                InferenceRule::TriEqualArgs(InferenceRuleTriEqualArgs{
 
                    argument_template: InferenceRuleTemplate::new_template(&NUMBERLIKE_TEMPLATE),
 
                    result_template: InferenceRuleTemplate::new_forced(&BOOL_TEMPLATE),
 
                    argument1_index: left_index,
 
                    argument2_index: right_index,
 
                }),
 
            BO::Add | BO::Subtract | BO::Multiply | BO::Divide =>
 
                InferenceRule::TriEqualAll(InferenceRuleTriEqualAll{
 
                    template: InferenceRuleTemplate::new_template(&NUMBERLIKE_TEMPLATE),
 
                    argument1_index: left_index,
 
                    argument2_index: right_index,
 
                }),
 
        };
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = inference_rule;
 

	
 
        self.parent_index = old_parent;
 
        self.progress_inference_rule(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> VisitExprResult {
 
        use UnaryOperator as UO;
 

	
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let unary_expr = &ctx.heap[id];
 
        let operation = unary_expr.operation;
 
        let arg_expr_id = unary_expr.expression;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let argument_index = self.visit_expr(ctx, arg_expr_id)?;
 

	
 
        let template = match operation {
 
            UO::Positive | UO::Negative =>
 
                InferenceRuleTemplate::new_template(&NUMBERLIKE_TEMPLATE),
 
            UO::BitwiseNot =>
 
                InferenceRuleTemplate::new_template(&INTEGERLIKE_TEMPLATE),
 
            UO::LogicalNot =>
 
                InferenceRuleTemplate::new_forced(&BOOL_TEMPLATE),
 
        };
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::BiEqual(InferenceRuleBiEqual{
 
            template, argument_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_inference_rule_bi_equal(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let indexing_expr = &ctx.heap[id];
 
        let subject_expr_id = indexing_expr.subject;
 
        let index_expr_id = indexing_expr.index;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let subject_index = self.visit_expr(ctx, subject_expr_id)?;
 
        let index_index = self.visit_expr(ctx, index_expr_id)?; // cool name, bro
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::IndexingExpr(InferenceRuleIndexingExpr{
 
            subject_index, index_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_inference_rule_indexing_expr(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let slicing_expr = &ctx.heap[id];
 
        let subject_expr_id = slicing_expr.subject;
 
        let from_expr_id = slicing_expr.from_index;
 
        let to_expr_id = slicing_expr.to_index;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let subject_index = self.visit_expr(ctx, subject_expr_id)?;
 
        let from_index = self.visit_expr(ctx, from_expr_id)?;
 
        let to_index = self.visit_expr(ctx, to_expr_id)?;
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::SlicingExpr(InferenceRuleSlicingExpr{
 
            subject_index, from_index, to_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_inference_rule_slicing_expr(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let select_expr = &ctx.heap[id];
 
        let subject_expr_id = select_expr.subject;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let subject_index = self.visit_expr(ctx, subject_expr_id)?;
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        let inference_rule = match &ctx.heap[id].kind {
 
            SelectKind::StructField(field_identifier) =>
 
                InferenceRule::SelectStructField(InferenceRuleSelectStructField{
 
                    subject_index,
 
                    selected_field: field_identifier.clone(),
 
                }),
 
            SelectKind::TupleMember(member_index) =>
 
                InferenceRule::SelectTupleMember(InferenceRuleSelectTupleMember{
 
                    subject_index,
 
                    selected_index: *member_index,
 
                }),
 
        };
 
        node.inference_rule = inference_rule;
 

	
 
        self.parent_index = old_parent;
 
        self.progress_inference_rule(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 

	
 
        let literal_expr = &ctx.heap[id];
 
        match &literal_expr.value {
 
            Literal::Null => {
 
                let node = &mut self.infer_nodes[self_index];
 
                node.inference_rule = InferenceRule::MonoTemplate(InferenceRuleTemplate::new_template(&MESSAGE_TEMPLATE));
 
            },
 
            Literal::Integer(_) => {
 
                let node = &mut self.infer_nodes[self_index];
 
                node.inference_rule = InferenceRule::MonoTemplate(InferenceRuleTemplate::new_template(&INTEGERLIKE_TEMPLATE));
 
            },
 
            Literal::True | Literal::False => {
 
                let node = &mut self.infer_nodes[self_index];
 
                node.inference_rule = InferenceRule::MonoTemplate(InferenceRuleTemplate::new_forced(&BOOL_TEMPLATE));
 
            },
 
            Literal::Character(_) => {
 
                let node = &mut self.infer_nodes[self_index];
 
                node.inference_rule = InferenceRule::MonoTemplate(InferenceRuleTemplate::new_forced(&CHARACTER_TEMPLATE));
 
            },
 
            Literal::String(_) => {
 
                let node = &mut self.infer_nodes[self_index];
 
                node.inference_rule = InferenceRule::MonoTemplate(InferenceRuleTemplate::new_forced(&STRING_TEMPLATE));
 
            },
 
            Literal::Struct(literal) => {
 
                // Visit field expressions
 
                let mut expr_ids = self.expr_buffer.start_section();
 
                for field in &literal.fields {
 
                    expr_ids.push(field.value);
 
                }
 

	
 
                let mut expr_indices = self.index_buffer.start_section();
 
                for expr_id in expr_ids.iter_copied() {
 
                    let expr_index = self.visit_expr(ctx, expr_id)?;
 
                    expr_indices.push(expr_index);
 
                }
 
                expr_ids.forget();
 
                let element_indices = expr_indices.into_vec();
 

	
 
                // Assign rule and extra data index to inference node
 
                let poly_data_index = self.insert_initial_struct_polymorph_data(ctx, id);
 
                let node = &mut self.infer_nodes[self_index];
 
                node.poly_data_index = poly_data_index;
 
                node.inference_rule = InferenceRule::LiteralStruct(InferenceRuleLiteralStruct{
 
                    element_indices,
 
                });
 
            },
 
            Literal::Enum(_) => {
 
                // Enumerations do not carry any subexpressions, but may still
 
                // have a user-defined polymorphic marker variable. For this 
 
                // reason we may still have to apply inference to this 
 
                // polymorphic variable
 
                let poly_data_index = self.insert_initial_enum_polymorph_data(ctx, id);
 
                let node = &mut self.infer_nodes[self_index];
 
                node.poly_data_index = poly_data_index;
 
                node.inference_rule = InferenceRule::LiteralEnum;
 
            },
 
            Literal::Union(literal) => {
 
                // May carry subexpressions and polymorphic arguments
 
                let expr_ids = self.expr_buffer.start_section_initialized(literal.values.as_slice());
 
                let poly_data_index = self.insert_initial_union_polymorph_data(ctx, id);
 

	
 
                let mut expr_indices = self.index_buffer.start_section();
 
                for expr_id in expr_ids.iter_copied() {
 
                    let expr_index = self.visit_expr(ctx, expr_id)?;
 
                    expr_indices.push(expr_index);
 
                }
 
                expr_ids.forget();
 
                let element_indices = expr_indices.into_vec();
 

	
 
                let node = &mut self.infer_nodes[self_index];
 
                node.poly_data_index = poly_data_index;
 
                node.inference_rule = InferenceRule::LiteralUnion(InferenceRuleLiteralUnion{
 
                    element_indices,
 
                });
 
            },
 
            Literal::Array(expressions) => {
 
                let expr_ids = self.expr_buffer.start_section_initialized(expressions.as_slice());
 

	
 
                let mut expr_indices = self.index_buffer.start_section();
 
                for expr_id in expr_ids.iter_copied() {
 
                    let expr_index = self.visit_expr(ctx, expr_id)?;
 
                    expr_indices.push(expr_index);
 
                }
 
                expr_ids.forget();
 
                let element_indices = expr_indices.into_vec();
 

	
 
                let node = &mut self.infer_nodes[self_index];
 
                node.inference_rule = InferenceRule::LiteralArray(InferenceRuleLiteralArray{
 
                    element_indices,
 
                });
 
            },
 
            Literal::Tuple(expressions) => {
 
                let expr_ids = self.expr_buffer.start_section_initialized(expressions.as_slice());
 

	
 
                let mut expr_indices = self.index_buffer.start_section();
 
                for expr_id in expr_ids.iter_copied() {
 
                    let expr_index = self.visit_expr(ctx, expr_id)?;
 
                    expr_indices.push(expr_index);
 
                }
 
                expr_ids.forget();
 
                let element_indices = expr_indices.into_vec();
 

	
 
                let node = &mut self.infer_nodes[self_index];
 
                node.inference_rule = InferenceRule::LiteralTuple(InferenceRuleLiteralTuple{
 
                    element_indices,
 
                })
 
            }
 
        }
 

	
 
        self.parent_index = old_parent;
 
        self.progress_inference_rule(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_cast_expr(&mut self, ctx: &mut Ctx, id: CastExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let cast_expr = &ctx.heap[id];
 
        let subject_expr_id = cast_expr.subject;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let subject_index = self.visit_expr(ctx, subject_expr_id)?;
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::CastExpr(InferenceRuleCastExpr{
 
            subject_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 

	
 
        // The cast expression is a bit special at this point: the progression
 
        // function simply makes sure input/output types are compatible. But if
 
        // the programmer explicitly specified the output type, then we can
 
        // already perform that inference rule here.
 
        {
 
            let cast_expr = &ctx.heap[id];
 
            let specified_type = self.determine_inference_type_from_parser_type_elements(&cast_expr.to_type.elements, true);
 
            let _progress = self.apply_template_constraint(ctx, self_index, &specified_type.parts)?;
 
        }
 

	
 
        self.progress_inference_rule_cast_expr(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 
        let extra_index = self.insert_initial_call_polymorph_data(ctx, id);
 

	
 
        // By default we set the polymorph idx for calls to 0. If the call
 
        // refers to a non-polymorphic function, then it will be "monomorphed"
 
        // once, hence we end up pointing to the correct instance.
 
        self.infer_nodes[self_index].field_index = 0;
 

	
 
        // Visit all arguments
 
        let old_parent = self.parent_index.replace(self_index);
 

	
 
        let call_expr = &ctx.heap[id];
 
        let expr_ids = self.expr_buffer.start_section_initialized(call_expr.arguments.as_slice());
 
        let mut expr_indices = self.index_buffer.start_section();
 

	
 
        for arg_expr_id in expr_ids.iter_copied() {
 
            let expr_index = self.visit_expr(ctx, arg_expr_id)?;
 
            expr_indices.push(expr_index);
 
        }
 
        expr_ids.forget();
 
        let argument_indices = expr_indices.into_vec();
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.poly_data_index = extra_index;
 
        node.inference_rule = InferenceRule::CallExpr(InferenceRuleCallExpr{
 
            argument_indices,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_inference_rule_call_expr(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let var_expr = &ctx.heap[id];
 
        debug_assert!(var_expr.declaration.is_some());
 
        let old_parent = self.parent_index.replace(self_index);
 

	
 
        let declaration = &ctx.heap[var_expr.declaration.unwrap()];
 
        let mut var_data_index = None;
 
        for (index, var_data) in self.var_data.iter().enumerate() {
 
            if var_data.var_id == declaration.this {
 
                var_data_index = Some(index);
 
                break;
 
            }
 
        }
 

	
 
        let var_data_index = if let Some(var_data_index) = var_data_index {
 
            let var_data = &mut self.var_data[var_data_index];
 
            var_data.used_at.push(self_index);
 

	
 
            var_data_index
 
        } else {
 
            // If we're in a binding expression then it might the first time we
 
            // encounter the variable, so add a `VarData` entry.
 
            debug_assert_eq!(declaration.kind, VariableKind::Binding);
 
            let var_type = self.determine_inference_type_from_parser_type_elements(
 
                &declaration.parser_type.elements, true
 
            );
 
            let var_data_index = self.var_data.len();
 
            self.var_data.push(VarData{
 
                var_id: declaration.this,
 
                var_type,
 
                used_at: vec![self_index],
 
                linked_var: None,
 
            });
 

	
 
            var_data_index
 
        };
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::VariableExpr(InferenceRuleVariableExpr{
 
            var_data_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_inference_rule_variable_expr(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 
}
 

	
 
// -----------------------------------------------------------------------------
 
// PassTyping - Type-inference progression
 
// -----------------------------------------------------------------------------
 

	
 
impl PassTyping {
 
    #[allow(dead_code)] // used when debug flag at the top of this file is true.
 
    fn debug_get_display_name(&self, ctx: &Ctx, node_index: InferNodeIndex) -> String {
 
        let expr_type = &self.infer_nodes[node_index].expr_type;
 
        expr_type.display_name(&ctx.heap)
 
    }
 

	
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError> {
 
        // Keep inferring until we can no longer make any progress
 
        while !self.node_queued.is_empty() {
 
            while !self.node_queued.is_empty() {
 
                let node_index = self.node_queued.pop_front().unwrap();
 
                self.progress_inference_rule(ctx, node_index)?;
 
            }
 

	
 
            // Nothing is queued anymore. However we might have integer literals
 
            // whose type cannot be inferred. For convenience's sake we'll
 
            // infer these to be s32.
 
            for (infer_node_index, infer_node) in self.infer_nodes.iter_mut().enumerate() {
 
                let expr_type = &mut infer_node.expr_type;
 
                if !expr_type.is_done && expr_type.parts.len() == 1 && expr_type.parts[0] == InferenceTypePart::IntegerLike {
 
                    // Force integer type to s32
 
                    expr_type.parts[0] = InferenceTypePart::SInt32;
 
                    expr_type.is_done = true;
 

	
 
                    // Requeue expression (and its parent, if it exists)
 
                    self.node_queued.push_back(infer_node_index);
 
                    if let Some(node_parent_index) = infer_node.parent_index {
 
                        self.node_queued.push_back(node_parent_index);
 
                    }
 
                }
 
            }
 
        }
 

	
 
        // Helper for transferring polymorphic variables to concrete types and
 
        // checking if they're completely specified
 
        fn poly_data_type_to_concrete_type(
 
            ctx: &Ctx, expr_id: ExpressionId, inference_poly_args: &Vec<InferenceType>,
 
            first_concrete_part: ConcreteTypePart,
 
        ) -> Result<ConcreteType, ParseError> {
 
            // Prepare storage vector
 
            let mut num_inference_parts = 0;
 
            for inference_type in inference_poly_args {
 
                num_inference_parts += inference_type.parts.len();
 
            }
 

	
 
            let mut concrete_type = ConcreteType{
 
                parts: Vec::with_capacity(1 + num_inference_parts),
 
            };
 
            concrete_type.parts.push(first_concrete_part);
 

	
 
            // Go through all polymorphic arguments and add them to the concrete
 
            // types.
 
            for (poly_idx, poly_type) in inference_poly_args.iter().enumerate() {
 
                if !poly_type.is_done {
 
                    let expr = &ctx.heap[expr_id];
 
                    let definition = match expr {
 
                        Expression::Call(expr) => expr.procedure.upcast(),
 
                        Expression::Literal(expr) => match &expr.value {
 
                            Literal::Enum(lit) => lit.definition,
 
                            Literal::Union(lit) => lit.definition,
 
                            Literal::Struct(lit) => lit.definition,
 
                            _ => unreachable!()
 
                        },
 
                        _ => unreachable!(),
 
                    };
 
                    let poly_vars = ctx.heap[definition].poly_vars();
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module().source, expr.operation_span(), format!(
 
                            "could not fully infer the type of polymorphic variable '{}' of this expression (got '{}')",
 
                            poly_vars[poly_idx].value.as_str(), poly_type.display_name(&ctx.heap)
 
                        )
 
                    ));
 
                }
 

	
 
                poly_type.write_concrete_type(&mut concrete_type);
 
            }
 

	
 
            Ok(concrete_type)
 
        }
 

	
 
        // Every expression checked, and new monomorphs are queued. Transfer the
 
        // expression information to the AST. If this is the first time we're
 
        // visiting this procedure then we assign expression indices as well.
 
        let procedure = &ctx.heap[self.procedure_id];
 
        let num_infer_nodes = self.infer_nodes.len();
 
        let mut monomorph = ProcedureDefinitionMonomorph{
 
            argument_types: Vec::with_capacity(procedure.parameters.len()),
 
            expr_info: Vec::with_capacity(num_infer_nodes),
 
        };
 

	
 
        // For all of the expressions look up the TypeId (or create a new one).
 
        // For function calls and component instantiations figure out if they
 
        // need to be typechecked
 
        for infer_node in self.infer_nodes.iter_mut() {
 
            // Determine type ID
 
            let expr = &ctx.heap[infer_node.expr_id];
 

	
 
            // TODO: Maybe optimize? Split insertion up into lookup, then clone
 
            //  if needed?
 
            let mut concrete_type = ConcreteType::default();
 
            infer_node.expr_type.write_concrete_type(&mut concrete_type);
 
            let info_type_id = ctx.types.add_monomorphed_type(ctx.modules, ctx.heap, ctx.arch, concrete_type)?;
 

	
 
            // Determine procedure type ID, i.e. a called/instantiated
 
            // procedure's signature.
 
            let info_variant = if let Expression::Call(expr) = expr {
 
                // Construct full function type. If not yet typechecked then
 
                // queue it for typechecking.
 
                let poly_data = &self.poly_data[infer_node.poly_data_index as usize];
 
                debug_assert!(expr.method.is_user_defined() || expr.method.is_public_builtin());
 
                let procedure_id = expr.procedure;
 
                let num_poly_vars = poly_data.poly_vars.len() as u32;
 

	
 
                let first_part = match expr.method {
 
                    Method::UserFunction => ConcreteTypePart::Function(procedure_id, num_poly_vars),
 
                    Method::UserComponent => ConcreteTypePart::Component(procedure_id, num_poly_vars),
 
                    _ => ConcreteTypePart::Function(procedure_id, num_poly_vars),
 
                };
 

	
 

	
 
                let definition_id = procedure_id.upcast();
 
                let signature_type = poly_data_type_to_concrete_type(
 
                    ctx, infer_node.expr_id, &poly_data.poly_vars, first_part
 
                )?;
 

	
 
                let (type_id, monomorph_index) = if let Some(type_id) = ctx.types.get_procedure_monomorph_type_id(&definition_id, &signature_type.parts) {
 
                    // Procedure is already typechecked
 
                    let monomorph_index = ctx.types.get_monomorph(type_id).variant.as_procedure().monomorph_index;
 
                    (type_id, monomorph_index)
 
                } else {
 
                    // Procedure is not yet typechecked, reserve a TypeID and a monomorph index
 
                    let procedure_to_check = &mut ctx.heap[procedure_id];
 
                    let monomorph_index = procedure_to_check.monomorphs.len() as u32;
 
                    procedure_to_check.monomorphs.push(ProcedureDefinitionMonomorph::new_invalid());
 
                    let type_id = ctx.types.reserve_procedure_monomorph_type_id(&definition_id, signature_type, monomorph_index);
 

	
 
                    if !procedure_to_check.builtin {
 
                    if !procedure_to_check.source.is_builtin() {
 
                        // Only perform typechecking on the user-defined
 
                        // procedures
 
                        queue.push_back(ResolveQueueElement{
 
                            root_id: ctx.heap[definition_id].defined_in(),
 
                            definition_id,
 
                            reserved_type_id: type_id,
 
                            reserved_monomorph_index: monomorph_index,
 
                        });
 
                    }
 

	
 
                    (type_id, monomorph_index)
 
                };
 

	
 
                ExpressionInfoVariant::Procedure(type_id, monomorph_index)
 
            } else if let Expression::Select(_expr) = expr {
 
                ExpressionInfoVariant::Select(infer_node.field_index)
 
            } else {
 
                ExpressionInfoVariant::Generic
 
            };
 

	
 
            infer_node.info_type_id = info_type_id;
 
            infer_node.info_variant = info_variant;
 
        }
 

	
 
        // Write the types of the arguments
 
        let procedure = &ctx.heap[self.procedure_id];
 
        for parameter_id in procedure.parameters.iter().copied() {
 
            let mut concrete = ConcreteType::default();
 
            let var_data = self.var_data.iter().find(|v| v.var_id == parameter_id).unwrap();
 
            var_data.var_type.write_concrete_type(&mut concrete);
 
            let type_id = ctx.types.add_monomorphed_type(ctx.modules, ctx.heap, ctx.arch, concrete)?;
 
            monomorph.argument_types.push(type_id)
 
        }
 

	
 
        // Determine if we have already assigned type indices to the expressions
 
        // before (the indices that, for a monomorph, can retrieve the type of
 
        // the expression).
 
        let has_type_indices = self.reserved_monomorph_index > 0;
 
        if has_type_indices {
 
            // already have indices, so resize and then index into it
 
            debug_assert!(monomorph.expr_info.is_empty());
 
            monomorph.expr_info.resize(num_infer_nodes, ExpressionInfo::new_invalid());
 
            for infer_node in self.infer_nodes.iter() {
 
                let type_index = ctx.heap[infer_node.expr_id].type_index();
 
                monomorph.expr_info[type_index as usize] = infer_node.as_expression_info();
 
            }
 
        } else {
 
            // no indices yet, need to be assigned in AST
 
            for infer_node in self.infer_nodes.iter() {
 
                let type_index = monomorph.expr_info.len();
 
                monomorph.expr_info.push(infer_node.as_expression_info());
 
                *ctx.heap[infer_node.expr_id].type_index_mut() = type_index as i32;
 
            }
 
        }
 

	
 
        // Push the information into the AST
 
        let procedure = &mut ctx.heap[self.procedure_id];
 
        procedure.monomorphs[self.reserved_monomorph_index as usize] = monomorph;
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_inference_rule(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        use InferenceRule as IR;
 

	
 
        let node = &self.infer_nodes[node_index];
 
        match &node.inference_rule {
 
            IR::Noop =>
 
                unreachable!(),
 
            IR::MonoTemplate(_) =>
 
                self.progress_inference_rule_mono_template(ctx, node_index),
 
            IR::BiEqual(_) =>
 
                self.progress_inference_rule_bi_equal(ctx, node_index),
 
            IR::TriEqualArgs(_) =>
 
                self.progress_inference_rule_tri_equal_args(ctx, node_index),
 
            IR::TriEqualAll(_) =>
 
                self.progress_inference_rule_tri_equal_all(ctx, node_index),
 
            IR::Concatenate(_) =>
 
                self.progress_inference_rule_concatenate(ctx, node_index),
 
            IR::IndexingExpr(_) =>
 
                self.progress_inference_rule_indexing_expr(ctx, node_index),
 
            IR::SlicingExpr(_) =>
 
                self.progress_inference_rule_slicing_expr(ctx, node_index),
 
            IR::SelectStructField(_) =>
 
                self.progress_inference_rule_select_struct_field(ctx, node_index),
 
            IR::SelectTupleMember(_) =>
 
                self.progress_inference_rule_select_tuple_member(ctx, node_index),
 
            IR::LiteralStruct(_) =>
 
                self.progress_inference_rule_literal_struct(ctx, node_index),
 
            IR::LiteralEnum =>
 
                self.progress_inference_rule_literal_enum(ctx, node_index),
 
            IR::LiteralUnion(_) =>
 
                self.progress_inference_rule_literal_union(ctx, node_index),
 
            IR::LiteralArray(_) =>
 
                self.progress_inference_rule_literal_array(ctx, node_index),
 
            IR::LiteralTuple(_) =>
 
                self.progress_inference_rule_literal_tuple(ctx, node_index),
 
            IR::CastExpr(_) =>
 
                self.progress_inference_rule_cast_expr(ctx, node_index),
 
            IR::CallExpr(_) =>
 
                self.progress_inference_rule_call_expr(ctx, node_index),
 
            IR::VariableExpr(_) =>
 
                self.progress_inference_rule_variable_expr(ctx, node_index),
 
        }
 
    }
 

	
 
    fn progress_inference_rule_mono_template(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = *node.inference_rule.as_mono_template();
 

	
 
        let progress = self.progress_template(ctx, node_index, rule.application, rule.template)?;
 
        if progress { self.queue_node_parent(node_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_bi_equal(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_bi_equal();
 
        let template = rule.template;
 
        let arg_index = rule.argument_index;
 

	
 
        let base_progress = self.progress_template(ctx, node_index, template.application, template.template)?;
 
        let (node_progress, arg_progress) = self.apply_equal2_constraint(ctx, node_index, node_index, 0, arg_index, 0)?;
 

	
 
        if base_progress || node_progress { self.queue_node_parent(node_index); }
 
        if arg_progress { self.queue_node(arg_index); }
 

	
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_tri_equal_args(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_tri_equal_args();
 

	
 
        let result_template = rule.result_template;
 
        let argument_template = rule.argument_template;
 
        let arg1_index = rule.argument1_index;
 
        let arg2_index = rule.argument2_index;
 

	
 
        let self_template_progress = self.progress_template(ctx, node_index, result_template.application, result_template.template)?;
 
        let arg1_template_progress = self.progress_template(ctx, arg1_index, argument_template.application, argument_template.template)?;
 
        let (arg1_progress, arg2_progress) = self.apply_equal2_constraint(ctx, node_index, arg1_index, 0, arg2_index, 0)?;
 

	
 
        if self_template_progress { self.queue_node_parent(node_index); }
 
        if arg1_template_progress || arg1_progress { self.queue_node(arg1_index); }
 
        if arg2_progress { self.queue_node(arg2_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_tri_equal_all(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_tri_equal_all();
 

	
 
        let template = rule.template;
 
        let arg1_index = rule.argument1_index;
 
        let arg2_index = rule.argument2_index;
 

	
 
        let template_progress = self.progress_template(ctx, node_index, template.application, template.template)?;
 
        let (node_progress, arg1_progress, arg2_progress) =
 
            self.apply_equal3_constraint(ctx, node_index, arg1_index, arg2_index, 0)?;
 

	
 
        if template_progress || node_progress { self.queue_node_parent(node_index); }
 
        if arg1_progress { self.queue_node(arg1_index); }
 
        if arg2_progress { self.queue_node(arg2_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_concatenate(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_concatenate();
 
        let arg1_index = rule.argument1_index;
 
        let arg2_index = rule.argument2_index;
 

	
 
        // Two cases: one of the arguments is a string (then all must be), or
 
        // one of the arguments is an array (and all must be arrays).
 
        let (expr_is_str, expr_is_not_str) = self.type_is_certainly_or_certainly_not_string(node_index);
 
        let (arg1_is_str, arg1_is_not_str) = self.type_is_certainly_or_certainly_not_string(arg1_index);
 
        let (arg2_is_str, arg2_is_not_str) = self.type_is_certainly_or_certainly_not_string(arg2_index);
 

	
 
        let someone_is_str = expr_is_str || arg1_is_str || arg2_is_str;
 
        let someone_is_not_str = expr_is_not_str || arg1_is_not_str || arg2_is_not_str;
 
        // Note: this statement is an expression returning the progression bools
 
        let (node_progress, arg1_progress, arg2_progress) = if someone_is_str {
 
            // One of the arguments is a string, then all must be strings
 
            self.apply_equal3_constraint(ctx, node_index, arg1_index, arg2_index, 0)?
 
        } else {
 
            let progress_expr = if someone_is_not_str {
 
                // Output must be a normal array
 
                self.apply_template_constraint(ctx, node_index, &ARRAY_TEMPLATE)?
 
            } else {
 
                // Output may still be anything
 
                self.apply_template_constraint(ctx, node_index, &ARRAYLIKE_TEMPLATE)?
 
            };
 

	
 
            let progress_arg1 = self.apply_template_constraint(ctx, arg1_index, &ARRAYLIKE_TEMPLATE)?;
 
            let progress_arg2 = self.apply_template_constraint(ctx, arg2_index, &ARRAYLIKE_TEMPLATE)?;
 

	
 
            // If they're all arraylike, then we want the subtype to match
 
            let (subtype_expr, subtype_arg1, subtype_arg2) =
 
                self.apply_equal3_constraint(ctx, node_index, arg1_index, arg2_index, 1)?;
 

	
 
            (progress_expr || subtype_expr, progress_arg1 || subtype_arg1, progress_arg2 || subtype_arg2)
 
        };
 

	
 
        if node_progress { self.queue_node_parent(node_index); }
 
        if arg1_progress { self.queue_node(arg1_index); }
 
        if arg2_progress { self.queue_node(arg2_index); }
 

	
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_indexing_expr(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_indexing_expr();
 
        let subject_index = rule.subject_index;
 
        let index_index = rule.index_index; // which one?
 

	
 
        // Subject is arraylike, index in integerlike
 
        let subject_template_progress = self.apply_template_constraint(ctx, subject_index, &ARRAYLIKE_TEMPLATE)?;
 
        let index_template_progress = self.apply_template_constraint(ctx, index_index, &INTEGERLIKE_TEMPLATE)?;
 

	
 
        // If subject is type `Array<T>`, then expr type is `T`
 
        let (node_progress, subject_progress) =
 
            self.apply_equal2_constraint(ctx, node_index, node_index, 0, subject_index, 1)?;
 

	
 
        if node_progress { self.queue_node_parent(node_index); }
 
        if subject_template_progress || subject_progress { self.queue_node(subject_index); }
 
        if index_template_progress { self.queue_node(index_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_slicing_expr(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_slicing_expr();
 
        let subject_index = rule.subject_index;
 
        let from_index_index = rule.from_index;
 
        let to_index_index = rule.to_index;
 

	
 
        debug_log!("Rule slicing [node: {}, expr: {}]", node_index, node.expr_id.index);
 

	
 
        // Subject is arraylike, indices are integerlike
 
        let subject_template_progress = self.apply_template_constraint(ctx, subject_index, &ARRAYLIKE_TEMPLATE)?;
 
        let from_template_progress = self.apply_template_constraint(ctx, from_index_index, &INTEGERLIKE_TEMPLATE)?;
 
        let to_template_progress = self.apply_template_constraint(ctx, to_index_index, &INTEGERLIKE_TEMPLATE)?;
 
        let (from_index_progress, to_index_progress) =
 
            self.apply_equal2_constraint(ctx, node_index, from_index_index, 0, to_index_index, 0)?;
 

	
 
        // Same as array indexing: result depends on whether subject is string
 
        // or array
 
        let (is_string, is_not_string) = self.type_is_certainly_or_certainly_not_string(node_index);
 
        let (node_progress, subject_progress) = if is_string {
 
            // Certainly a string
 
            (
 
                self.apply_forced_constraint(ctx, node_index, &STRING_TEMPLATE)?,
 
                false
 
            )
 
        } else if is_not_string {
 
            // Certainly not a string, apply template constraint. Then make sure
 
            // that if we have an `Array<T>`, that the slice produces `Slice<T>`
 
            let node_template_progress = self.apply_template_constraint(ctx, node_index, &SLICE_TEMPLATE)?;
 
            let (node_progress, subject_progress) =
 
                self.apply_equal2_constraint(ctx, node_index, node_index, 1, subject_index, 1)?;
 

	
 
            (
 
                node_template_progress || node_progress,
 
                subject_progress
 
            )
 
        } else {
 
            // Not sure yet
 
            let node_template_progress = self.apply_template_constraint(ctx, node_index, &ARRAYLIKE_TEMPLATE)?;
 
            let (node_progress, subject_progress) =
 
                self.apply_equal2_constraint(ctx, node_index, node_index, 1, subject_index, 1)?;
 

	
 
            (
 
                node_template_progress || node_progress,
 
                subject_progress
 
            )
 
        };
 

	
 
        if node_progress { self.queue_node_parent(node_index); }
 
        if subject_template_progress || subject_progress { self.queue_node(subject_index); }
 
        if from_template_progress || from_index_progress { self.queue_node(from_index_index); }
 
        if to_template_progress || to_index_progress { self.queue_node(to_index_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_select_struct_field(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_select_struct_field();
 

	
 
        let subject_index = rule.subject_index;
 
        let selected_field = rule.selected_field.clone();
 

	
 
        fn get_definition_id_from_inference_type(inference_type: &InferenceType) -> Result<Option<DefinitionId>, ()> {
 
            for part in inference_type.parts.iter() {
 
                if part.is_marker() { continue; }
 
                if !part.is_concrete() { break; }
 

	
 
                if let InferenceTypePart::Instance(definition_id, _) = part {
 
                    return Ok(Some(*definition_id));
 
                } else {
 
                    return Err(())
 
                }
 
            }
 

	
 
            // Nothing is known yet
 
            return Ok(None);
 
        }
 

	
 
        if node.field_index < 0 {
 
            // Don't know the subject definition, hence the field yet. Try to
 
            // determine it.
 
            let subject_node = &self.infer_nodes[subject_index];
 
            match get_definition_id_from_inference_type(&subject_node.expr_type) {
 
                Ok(Some(definition_id)) => {
 
                    // Determined definition of subject for the first time.
 
                    let base_definition = ctx.types.get_base_definition(&definition_id).unwrap();
 
                    let struct_definition = if let DefinedTypeVariant::Struct(struct_definition) = &base_definition.definition {
 
                        struct_definition
 
                    } else {
 
                        return Err(ParseError::new_error_at_span(
 
                            &ctx.module().source, selected_field.span, format!(
 
                                "Can only apply field access to structs, got a subject of type '{}'",
 
                                subject_node.expr_type.display_name(&ctx.heap)
 
                            )
 
                        ));
 
                    };
 

	
 
                    // Seek the field that is referenced by the select
 
                    // expression
 
                    let mut field_found = false;
 
                    for (field_index, field) in struct_definition.fields.iter().enumerate() {
 
                        if field.identifier.value == selected_field.value {
 
                            // Found the field of interest
 
                            field_found = true;
 
                            let node = &mut self.infer_nodes[node_index];
 
                            node.field_index = field_index as i32;
 
                            break;
 
                        }
 
                    }
 

	
 
                    if !field_found {
 
                        let struct_definition = ctx.heap[definition_id].as_struct();
 
                        return Err(ParseError::new_error_at_span(
 
                            &ctx.module().source, selected_field.span, format!(
 
                                "this field does not exist on the struct '{}'",
 
                                struct_definition.identifier.value.as_str()
 
                            )
 
                        ));
 
                    }
 

	
 
                    // Insert the initial data needed to infer polymorphic
 
                    // fields
 
                    let extra_index = self.insert_initial_select_polymorph_data(ctx, node_index, definition_id);
 
                    let node = &mut self.infer_nodes[node_index];
 
                    node.poly_data_index = extra_index;
 
                },
 
                Ok(None) => {
 
                    // We don't know what to do yet, because we don't know the
 
                    // subject type yet.
 
                    return Ok(())
 
                },
 
                Err(()) => {
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module().source, rule.selected_field.span, format!(
 
                            "Can only apply field access to structs, got a subject of type '{}'",
 
                            subject_node.expr_type.display_name(&ctx.heap)
 
                        )
 
                    ));
 
                },
 
            }
 
        }
 

	
 
        // If here then the field index is known, hence we can start inferring
 
        // the type of the selected field
 
        let field_expr_id = self.infer_nodes[node_index].expr_id;
 
        let subject_expr_id = self.infer_nodes[subject_index].expr_id;
 
        let mut poly_progress_section = self.poly_progress_buffer.start_section();
 

	
 
        let (_, progress_subject_1) = self.apply_polydata_equal2_constraint(
 
            ctx, node_index, subject_expr_id, "selected struct's",
 
            PolyDataTypeIndex::Associated(0), 0, subject_index, 0, &mut poly_progress_section
 
        )?;
 
        let (_, progress_field_1) = self.apply_polydata_equal2_constraint(
 
            ctx, node_index, field_expr_id, "selected field's",
 
            PolyDataTypeIndex::Returned, 0, node_index, 0, &mut poly_progress_section
 
        )?;
 

	
 
        // Maybe make progress on types due to inferred polymorphic variables
 
        let progress_subject_2 = self.apply_polydata_polyvar_constraint(
 
            ctx, node_index, PolyDataTypeIndex::Associated(0), subject_index, &poly_progress_section
 
        );
 
        let progress_field_2 = self.apply_polydata_polyvar_constraint(
 
            ctx, node_index, PolyDataTypeIndex::Returned, node_index, &poly_progress_section
 
        );
 

	
 
        if progress_subject_1 || progress_subject_2 { self.queue_node(subject_index); }
 
        if progress_field_1 || progress_field_2 { self.queue_node_parent(node_index); }
 

	
 
        poly_progress_section.forget();
 
        self.finish_polydata_constraint(node_index);
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_select_tuple_member(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_select_tuple_member();
 
        let subject_index = rule.subject_index;
 
        let tuple_member_index = rule.selected_index;
 

	
 
        if node.field_index < 0 {
 
            let subject_type = &self.infer_nodes[subject_index].expr_type;
 
            let tuple_size = get_tuple_size_from_inference_type(subject_type);
 
            let tuple_size = match tuple_size {
 
                Ok(Some(tuple_size)) => {
 
                    tuple_size
 
                },
 
                Ok(None) => {
 
                    // We can't infer anything yet
 
                    return Ok(())
 
                },
 
                Err(()) => {
 
                    let select_expr_span = ctx.heap[node.expr_id].full_span();
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module().source, select_expr_span, format!(
 
                            "tuple element select cannot be applied to a subject of type '{}'",
 
                            subject_type.display_name(&ctx.heap)
 
                        )
 
                    ));
 
                }
 
            };
 

	
 
            // If here then we at least have the tuple size. Now check if the
 
            // index doesn't exceed that size.
 
            if tuple_member_index >= tuple_size as u64 {
 
                let select_expr_span = ctx.heap[node.expr_id].full_span();
 
                return Err(ParseError::new_error_at_span(
 
                    &ctx.module().source, select_expr_span, format!(
 
                        "element index {} is out of bounds, tuple has {} elements",
 
                        tuple_member_index, tuple_size
 
                    )
 
                ));
 
            }
 

	
 
            // Within bounds, set index on the type inference node
 
            let node = &mut self.infer_nodes[node_index];
 
            node.field_index = tuple_member_index as i32;
 
        }
 

	
 
        // If here then we know we can use `tuple_member_index`. We need to keep
 
        // computing the offset to the subtype, as its value changes during
 
        // inference
 
        let subject_type = &self.infer_nodes[subject_index].expr_type;
 
        let mut selected_member_start_index = 1; // start just after the InferenceTypeElement::Tuple
 
        for _ in 0..tuple_member_index {
 
            selected_member_start_index = InferenceType::find_subtree_end_idx(&subject_type.parts, selected_member_start_index);
 
        }
 

	
 
        let (progress_member, progress_subject) = self.apply_equal2_constraint(
 
            ctx, node_index, node_index, 0, subject_index, selected_member_start_index
 
        )?;
 

	
 
        if progress_member { self.queue_node_parent(node_index); }
 
        if progress_subject { self.queue_node(subject_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_literal_struct(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let node_expr_id = node.expr_id;
 
        let rule = node.inference_rule.as_literal_struct();
 

	
 
        // For each of the fields in the literal struct, apply the type equality
 
        // constraint. If the literal is polymorphic, then we try to progress
 
        // their types during this process
 
        let element_indices_section = self.index_buffer.start_section_initialized(&rule.element_indices);
 
        let mut poly_progress_section = self.poly_progress_buffer.start_section();
 
        for (field_index, field_node_index) in element_indices_section.iter_copied().enumerate() {
 
            let field_expr_id = self.infer_nodes[field_node_index].expr_id;
 
            let (_, progress_field) = self.apply_polydata_equal2_constraint(
 
                ctx, node_index, field_expr_id, "struct field's",
 
                PolyDataTypeIndex::Associated(field_index), 0,
 
                field_node_index, 0, &mut poly_progress_section
 
            )?;
 

	
 
            if progress_field { self.queue_node(field_node_index); }
 
        }
 

	
 
        // Now we do the same thing for the struct literal expression (the type
 
        // of the struct itself).
 
        let (_, progress_literal_1) = self.apply_polydata_equal2_constraint(
 
            ctx, node_index, node_expr_id, "struct literal's",
 
            PolyDataTypeIndex::Returned, 0, node_index, 0, &mut poly_progress_section
 
        )?;
 

	
 
        // And the other way around: if any of our polymorphic variables are
 
        // more specific then they were before, then we forward that information
 
        // back to our struct/fields.
 
        for (field_index, field_node_index) in element_indices_section.iter_copied().enumerate() {
 
            let progress_field = self.apply_polydata_polyvar_constraint(
 
                ctx, node_index, PolyDataTypeIndex::Associated(field_index),
 
                field_node_index, &poly_progress_section
 
            );
 

	
 
            if progress_field { self.queue_node(field_node_index); }
 
        }
 

	
 
        let progress_literal_2 = self.apply_polydata_polyvar_constraint(
 
            ctx, node_index, PolyDataTypeIndex::Returned,
 
            node_index, &poly_progress_section
 
        );
 

	
 
        if progress_literal_1 || progress_literal_2 { self.queue_node_parent(node_index); }
 

	
 
        poly_progress_section.forget();
 
        element_indices_section.forget();
 

	
 
        self.finish_polydata_constraint(node_index);
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_literal_enum(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let node_expr_id = node.expr_id;
 
        let mut poly_progress_section = self.poly_progress_buffer.start_section();
 

	
 
        // An enum literal type is simply, well, the enum's type. However, it
 
        // might still have polymorphic variables, hence the use of `PolyData`.
 
        let (_, progress_literal_1) = self.apply_polydata_equal2_constraint(
 
            ctx, node_index, node_expr_id, "enum literal's",
 
            PolyDataTypeIndex::Returned, 0, node_index, 0, &mut poly_progress_section
 
        )?;
 

	
 
        let progress_literal_2 = self.apply_polydata_polyvar_constraint(
 
            ctx, node_index, PolyDataTypeIndex::Returned, node_index, &poly_progress_section
 
        );
 

	
 
        if progress_literal_1 || progress_literal_2 { self.queue_node_parent(node_index); }
 

	
 
        poly_progress_section.forget();
 
        self.finish_polydata_constraint(node_index);
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_literal_union(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let node_expr_id = node.expr_id;
 
        let rule = node.inference_rule.as_literal_union();
 

	
 
        // Infer type of any embedded values in the union variant. At the same
 
        // time progress the polymorphic variables associated with the union.
 
        let element_indices_section = self.index_buffer.start_section_initialized(&rule.element_indices);
 
        let mut poly_progress_section = self.poly_progress_buffer.start_section();
 

	
 
        for (embedded_index, embedded_node_index) in element_indices_section.iter_copied().enumerate() {
 
            let embedded_node_expr_id = self.infer_nodes[embedded_node_index].expr_id;
 
            let (_, progress_embedded) = self.apply_polydata_equal2_constraint(
 
                ctx, node_index, embedded_node_expr_id, "embedded value's",
 
                PolyDataTypeIndex::Associated(embedded_index), 0,
 
                embedded_node_index, 0, &mut poly_progress_section
 
            )?;
 

	
 
            if progress_embedded { self.queue_node(embedded_node_index); }
 
        }
 

	
 
        let (_, progress_literal_1) = self.apply_polydata_equal2_constraint(
 
            ctx, node_index, node_expr_id, "union's",
 
            PolyDataTypeIndex::Returned, 0, node_index, 0, &mut poly_progress_section
 
        )?;
 

	
 
        // Propagate progress in the polymorphic variables to the expressions
 
        // that constitute the union literal.
 
        for (embedded_index, embedded_node_index) in element_indices_section.iter_copied().enumerate() {
 
            let progress_embedded = self.apply_polydata_polyvar_constraint(
 
                ctx, node_index, PolyDataTypeIndex::Associated(embedded_index),
 
                embedded_node_index, &poly_progress_section
 
            );
 

	
 
            if progress_embedded { self.queue_node(embedded_node_index); }
 
        }
 

	
 
        let progress_literal_2 = self.apply_polydata_polyvar_constraint(
 
            ctx, node_index, PolyDataTypeIndex::Returned, node_index, &poly_progress_section
 
        );
 

	
 
        if progress_literal_1 || progress_literal_2 { self.queue_node_parent(node_index); }
 

	
 
        poly_progress_section.forget();
 
        self.finish_polydata_constraint(node_index);
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_literal_array(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_literal_array();
 

	
 
        // Apply equality rule to all of the elements that form the array
 
        let argument_node_indices = self.index_buffer.start_section_initialized(&rule.element_indices);
 
        let mut argument_progress_section = self.bool_buffer.start_section();
 
        self.apply_equal_n_constraint(ctx, node_index, &argument_node_indices, &mut argument_progress_section)?;
 

	
 
        debug_assert_eq!(argument_node_indices.len(), argument_progress_section.len());
 
        for argument_index in 0..argument_node_indices.len() {
 
            let argument_node_index = argument_node_indices[argument_index];
 
            let progress = argument_progress_section[argument_index];
 

	
 
            if progress { self.queue_node(argument_node_index); }
 
        }
 

	
 
        // If elements are of type `T`, then the array is of type `Array<T>`, so:
 
        let mut progress_literal = self.apply_template_constraint(ctx, node_index, &ARRAY_TEMPLATE)?;
 
        if argument_node_indices.len() != 0 {
 
            let argument_node_index = argument_node_indices[0];
 
            let (progress_literal_inner, progress_argument) = self.apply_equal2_constraint(
 
                ctx, node_index, node_index, 1, argument_node_index, 0
 
            )?;
 

	
 
            progress_literal = progress_literal || progress_literal_inner;
 

	
 
            // It is possible that the `Array<T>` has a more progress `T` then
 
            // the arguments. So in the case we progress our argument type we
 
            // simply queue this rule again
 
            if progress_argument { self.queue_node(node_index); }
 
        }
 

	
 
        argument_node_indices.forget();
 
        argument_progress_section.forget();
 

	
 
        if progress_literal { self.queue_node_parent(node_index); }
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_literal_tuple(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_literal_tuple();
 

	
 
        let element_indices = self.index_buffer.start_section_initialized(&rule.element_indices);
 

	
 
        // Check if we need to apply the initial tuple template type. Note that
 
        // this is a hacky check.
 
        let num_tuple_elements = rule.element_indices.len();
 
        let mut template_type = Vec::with_capacity(num_tuple_elements + 1); // TODO: @performance
 
        template_type.push(InferenceTypePart::Tuple(num_tuple_elements as u32));
 
        for _ in 0..num_tuple_elements {
 
            template_type.push(InferenceTypePart::Unknown);
 
        }
 

	
 
        let mut progress_literal = self.apply_template_constraint(ctx, node_index, &template_type)?;
 

	
 
        // Because of the (early returning error) check above, we're certain
 
        // that the tuple has the correct number of elements. Now match each
 
        // element expression type to the tuple subtype.
 
        let mut element_subtree_start_index = 1; // first element is InferenceTypePart::Tuple
 
        for element_node_index in element_indices.iter_copied() {
 
            let (progress_literal_element, progress_element) = self.apply_equal2_constraint(
 
                ctx, node_index, node_index, element_subtree_start_index, element_node_index, 0
 
            )?;
 

	
 
            progress_literal = progress_literal || progress_literal_element;
 
            if progress_element {
 
                self.queue_node(element_node_index);
 
            }
 

	
 
            // Prepare for next element
 
            let node = &self.infer_nodes[node_index];
 
            let subtree_end_index = InferenceType::find_subtree_end_idx(&node.expr_type.parts, element_subtree_start_index);
 
            element_subtree_start_index = subtree_end_index;
 
        }
 
        debug_assert_eq!(element_subtree_start_index, self.infer_nodes[node_index].expr_type.parts.len());
 

	
 
        if progress_literal { self.queue_node_parent(node_index); }
 

	
 
        element_indices.forget();
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_cast_expr(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_cast_expr();
 
        let subject_index = rule.subject_index;
 
        let subject = &self.infer_nodes[subject_index];
 

	
 
        // Make sure that both types are completely done. Note: a cast
 
        // expression cannot really infer anything between the subject and the
 
        // output type, we can only make sure that, at the end, the cast is
 
        // correct.
 
        if !node.expr_type.is_done || !subject.expr_type.is_done {
 
            return Ok(());
 
        }
 

	
 
        // Both types are known, currently the only valid casts are bool,
 
        // integer and character casts.
 
        fn is_bool_int_or_char(parts: &[InferenceTypePart]) -> bool {
 
            let mut index = 0;
 
            while index < parts.len() {
 
                let part = &parts[index];
 
                if !part.is_marker() { break; }
 
                index += 1;
 
            }
 

	
 
            debug_assert!(index != parts.len());
 
            let part = &parts[index];
 
            if *part == InferenceTypePart::Bool || *part == InferenceTypePart::Character || part.is_concrete_integer() {
 
                debug_assert!(index + 1 == parts.len()); // type is done, first part does not have children -> must be at end
 
                return true;
 
            } else {
 
                return false;
 
            }
 
        }
 

	
 
        let is_valid = if is_bool_int_or_char(&node.expr_type.parts) && is_bool_int_or_char(&subject.expr_type.parts) {
 
            true
 
        } else if InferenceType::check_subtrees(&node.expr_type.parts, 0, &subject.expr_type.parts, 0) {
 
            // again: check_subtrees is sufficient since both types are done
 
            true
 
        } else {
 
            false
 
        };
 

	
 
        if !is_valid {
 
            let cast_expr = &ctx.heap[node.expr_id];
 
            let subject_expr = &ctx.heap[subject.expr_id];
 
            return Err(ParseError::new_error_str_at_span(
 
                &ctx.module().source, cast_expr.full_span(), "invalid casting operation"
 
            ).with_info_at_span(
 
                &ctx.module().source, subject_expr.full_span(), format!(
 
                    "cannot cast the argument type '{}' to the type '{}'",
 
                    subject.expr_type.display_name(&ctx.heap),
 
                    node.expr_type.display_name(&ctx.heap)
 
                )
 
            ));
 
        }
 

	
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_call_expr(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let node_expr_id = node.expr_id;
 
        let rule = node.inference_rule.as_call_expr();
 

	
 
        let mut poly_progress_section = self.poly_progress_buffer.start_section();
 
        let argument_node_indices = self.index_buffer.start_section_initialized(&rule.argument_indices);
 

	
 
        // Perform inference on arguments to function, while trying to figure
 
        // out the polymorphic variables
 
        for (argument_index, argument_node_index) in argument_node_indices.iter_copied().enumerate() {
 
            let argument_expr_id = self.infer_nodes[argument_node_index].expr_id;
 
            let (_, progress_argument) = self.apply_polydata_equal2_constraint(
 
                ctx, node_index, argument_expr_id, "argument's",
 
                PolyDataTypeIndex::Associated(argument_index), 0,
 
                argument_node_index, 0, &mut poly_progress_section
 
            )?;
 

	
 
            if progress_argument { self.queue_node(argument_node_index); }
 
        }
 

	
 
        // Same for the return type.
 
        let (_, progress_call_1) = self.apply_polydata_equal2_constraint(
 
            ctx, node_index, node_expr_id, "return",
 
            PolyDataTypeIndex::Returned, 0,
 
            node_index, 0, &mut poly_progress_section
 
        )?;
 

	
 
        // We will now apply any progression in the polymorphic variable type
 
        // back to the arguments.
 
        for (argument_index, argument_node_index) in argument_node_indices.iter_copied().enumerate() {
 
            let progress_argument = self.apply_polydata_polyvar_constraint(
 
                ctx, node_index, PolyDataTypeIndex::Associated(argument_index),
 
                argument_node_index, &poly_progress_section
 
            );
 

	
 
            if progress_argument { self.queue_node(argument_node_index); }
 
        }
 

	
 
        // And back to the return type.
 
        let progress_call_2 = self.apply_polydata_polyvar_constraint(
 
            ctx, node_index, PolyDataTypeIndex::Returned,
 
            node_index, &poly_progress_section
 
        );
 

	
 
        if progress_call_1 || progress_call_2 { self.queue_node_parent(node_index); }
 

	
 
        poly_progress_section.forget();
 
        argument_node_indices.forget();
 

	
 
        self.finish_polydata_constraint(node_index);
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_variable_expr(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &mut self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_variable_expr();
 
        let var_data_index = rule.var_data_index;
 

	
 
        let var_data = &mut self.var_data[var_data_index];
 
        // Apply inference to the shared variable type and the expression type
 
        let shared_type: *mut _ = &mut var_data.var_type;
 
        let expr_type: *mut _ = &mut node.expr_type;
 

	
 
        let inference_result = unsafe {
 
            // safety: vectors exist in different storage vectors, so cannot alias
 
            InferenceType::infer_subtrees_for_both_types(shared_type, 0, expr_type, 0)
 
        };
 

	
 
        if inference_result == DualInferenceResult::Incompatible {
 
            return Err(self.construct_variable_type_error(ctx, node_index));
 
        }
 

	
 
        let progress_var_data = inference_result.modified_lhs();
 
        let progress_expr = inference_result.modified_rhs();
 

	
 
        if progress_var_data {
 
            // We progressed the type of the shared variable, so propagate this
 
            // to all associated variable expressions (and relatived variables).
 
            for other_node_index in var_data.used_at.iter().copied() {
 
                if other_node_index != node_index {
 
                    self.node_queued.push_back(other_node_index);
 
                }
 
            }
 

	
 
            if let Some(linked_var_data_index) = var_data.linked_var {
 
                // Only perform one-way inference, progressing the linked
 
                // variable.
 
                // note: because this "linking" is used only for channels, we
 
                // will start inference one level below the top-level in the
 
                // type tree (i.e. ensure `T` in `in<T>` and `out<T>` is equal).
 
                debug_assert!(
 
                    var_data.var_type.parts[0] == InferenceTypePart::Input ||
 
                    var_data.var_type.parts[0] == InferenceTypePart::Output
 
                );
 
                let this_var_type: *const _ = &var_data.var_type;
 
                let linked_var_data = &mut self.var_data[linked_var_data_index];
 
                debug_assert!(
 
                    linked_var_data.var_type.parts[0] == InferenceTypePart::Input ||
 
                    linked_var_data.var_type.parts[0] == InferenceTypePart::Output
 
                );
 

	
 
                // safety: by construction var_data_index and linked_var_data_index cannot be the
 
                // same, hence we're not aliasing here.
 
                let inference_result = InferenceType::infer_subtree_for_single_type(
 
                    &mut linked_var_data.var_type, 1,
 
                    unsafe{ &(*this_var_type).parts }, 1, false
 
                );
 
                match inference_result {
 
                    SingleInferenceResult::Modified => {
 
                        for used_at in linked_var_data.used_at.iter().copied() {
 
                            self.node_queued.push_back(used_at);
 
                        }
 
                    },
 
                    SingleInferenceResult::Unmodified => {},
 
                    SingleInferenceResult::Incompatible => {
 
                        let var_data_this = &self.var_data[var_data_index];
 
                        let var_decl_this = &ctx.heap[var_data_this.var_id];
 
                        let var_data_linked = &self.var_data[linked_var_data_index];
 
                        let var_decl_linked = &ctx.heap[var_data_linked.var_id];
 

	
 
                        return Err(ParseError::new_error_at_span(
 
                            &ctx.module().source, var_decl_this.identifier.span, format!(
 
                                "conflicting types for this channel, this port has type '{}'",
 
                                var_data_this.var_type.display_name(&ctx.heap)
 
                            )
 
                        ).with_info_at_span(
 
                            &ctx.module().source, var_decl_linked.identifier.span, format!(
 
                                "while this port has type '{}'",
 
                                var_data_linked.var_type.display_name(&ctx.heap)
 
                            )
 
                        ));
 
                    }
 
                }
 
            }
 
        }
 

	
 
        if progress_expr { self.queue_node_parent(node_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_template(&mut self, ctx: &Ctx, node_index: InferNodeIndex, application: InferenceRuleTemplateApplication, template: &[InferenceTypePart]) -> Result<bool, ParseError> {
 
        use InferenceRuleTemplateApplication as TA;
 

	
 
        match application {
 
            TA::None => Ok(false),
 
            TA::Template => self.apply_template_constraint(ctx, node_index, template),
 
            TA::Forced => self.apply_forced_constraint(ctx, node_index, template),
 
        }
 
    }
 

	
 
    fn queue_node_parent(&mut self, node_index: InferNodeIndex) {
 
        let node = &self.infer_nodes[node_index];
 
        if let Some(parent_node_index) = node.parent_index {
 
            self.node_queued.push_back(parent_node_index);
 
        }
 
    }
 

	
 
    #[inline]
 
    fn queue_node(&mut self, node_index: InferNodeIndex) {
 
        self.node_queued.push_back(node_index);
 
    }
 

	
 
    /// Returns whether the type is certainly a string (true, false), certainly
 
    /// not a string (false, true), or still unknown (false, false).
 
    fn type_is_certainly_or_certainly_not_string(&self, node_index: InferNodeIndex) -> (bool, bool) {
 
        let expr_type = &self.infer_nodes[node_index].expr_type;
 
        let mut part_index = 0;
 
        while part_index < expr_type.parts.len() {
 
            let part = &expr_type.parts[part_index];
 

	
 
            if part.is_marker() {
 
                part_index += 1;
 
                continue;
 
            }
 
            if !part.is_concrete() { break; }
 

	
 
            if *part == InferenceTypePart::String {
 
                // First part is a string
 
                return (true, false);
 
            } else {
 
                return (false, true);
 
            }
 
        }
 

	
 
        // If here then first non-marker type is not concrete
 
        if part_index == expr_type.parts.len() {
 
            // nothing known at all
 
            return (false, false);
 
        }
 

	
 
        // Special case: array-like where its argument is not a character
 
        if part_index + 1 < expr_type.parts.len() {
 
            if expr_type.parts[part_index] == InferenceTypePart::ArrayLike && expr_type.parts[part_index + 1] != InferenceTypePart::Character {
 
                return (false, true);
 
            }
 
        }
 

	
 

	
 
        (false, false)
 
    }
 

	
 
    /// Applies a template type constraint: the type associated with the
 
    /// supplied expression will be molded into the provided `template`. But
 
    /// will be considered valid if the template could've been molded into the
 
    /// expression type as well. Hence the template may be fully specified (e.g.
 
    /// a bool) or contain "inference" variables (e.g. an array of T)
 
    fn apply_template_constraint(
 
        &mut self, ctx: &Ctx, node_index: InferNodeIndex, template: &[InferenceTypePart]
 
    ) -> Result<bool, ParseError> {
 
        let expr_type = &mut self.infer_nodes[node_index].expr_type;
 
        match InferenceType::infer_subtree_for_single_type(expr_type, 0, template, 0, false) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(
 
                self.construct_template_type_error(ctx, node_index, template)
 
            )
 
        }
 
    }
 

	
 
    /// Applies a forced constraint: the supplied expression's type MUST be
 
    /// inferred from the template, the other way around is considered invalid.
 
    fn apply_forced_constraint(
 
        &mut self, ctx: &Ctx, node_index: InferNodeIndex, template: &[InferenceTypePart]
 
    ) -> Result<bool, ParseError> {
 
        let expr_type = &mut self.infer_nodes[node_index].expr_type;
 

	
 
        match InferenceType::infer_subtree_for_single_type(expr_type, 0, template, 0, true) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(
 
                self.construct_template_type_error(ctx, node_index, template)
 
            )
 
        }
 
    }
 

	
 
    /// Applies a type constraint that expects the two provided types to be
 
    /// equal. We attempt to make progress in inferring the types. If the call
 
    /// is successful then the composition of all types are made equal.
 
    /// The "parent" `expr_id` is provided to construct errors.
 
    fn apply_equal2_constraint(
 
        &mut self, ctx: &Ctx, node_index: InferNodeIndex,
 
        arg1_index: InferNodeIndex, arg1_start_idx: usize,
 
        arg2_index: InferNodeIndex, arg2_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError> {
 
        let arg1_type: *mut _ = &mut self.infer_nodes[arg1_index].expr_type;
 
        let arg2_type: *mut _ = &mut self.infer_nodes[arg2_index].expr_type;
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            arg1_type, arg1_start_idx,
 
            arg2_type, arg2_start_idx
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, node_index, arg1_index, arg2_index));
 
        }
 

	
 
        Ok((infer_res.modified_lhs(), infer_res.modified_rhs()))
 
    }
 

	
 
    /// Applies an equal2 constraint between a member of the `PolyData` struct,
 
    /// and another inferred type. If any progress is made in the `PolyData`
 
    /// struct then the affected polymorphic variables are updated as well.
 
    ///
 
    /// Because a lot of types/expressions are involved in polymorphic typFe
 
    /// inference, some explanation: "outer_node" refers to the main expression
 
    /// that is the root cause of type inference (e.g. a struct literal
 
    /// expression, or a tuple member select expression). Associated with that
 
    /// outer node is `PolyData`, so that is what the "poly_data" variables
 
    /// are referring to. We are applying equality between a "poly_data" type
 
    /// and an associated expression (not necessarily the "outer_node", e.g.
 
    /// the expression that constructs the value of a struct field). Hence the
 
    /// "associated" variables.
 
    ///
 
    /// Finally, when an error occurs we'll first show the outer node's
 
    /// location. As info, the `error_location_expr_id` span is shown,
 
    /// indicating that the "`error_type_name` type has been resolved to
 
    /// `outer_node_type`, but this expression has been resolved to
 
    /// `associated_node_type`".
 
    fn apply_polydata_equal2_constraint(
 
        &mut self, ctx: &Ctx,
 
        outer_node_index: InferNodeIndex, error_location_expr_id: ExpressionId, error_type_name: &str,
 
        poly_data_type_index: PolyDataTypeIndex, poly_data_start_index: usize,
 
        associated_node_index: InferNodeIndex, associated_node_start_index: usize,
 
        poly_progress_section: &mut ScopedSection<u32>,
 
    ) -> Result<(bool, bool), ParseError> {
 
        let poly_data_index = self.infer_nodes[outer_node_index].poly_data_index;
 
        let poly_data = &mut self.poly_data[poly_data_index as usize];
 
        let poly_data_type = poly_data.expr_types.get_type_mut(poly_data_type_index);
 
        let associated_type: *mut _ = &mut self.infer_nodes[associated_node_index].expr_type;
 

	
 
        let inference_result = unsafe{
 
            // Safety: pointers originate from different vectors, so cannot
 
            // alias.
 
            let poly_data_type: *mut _ = poly_data_type;
 
            InferenceType::infer_subtrees_for_both_types(
 
                poly_data_type, poly_data_start_index,
 
                associated_type, associated_node_start_index
 
            )
 
        };
 

	
 
        let modified_poly_data = inference_result.modified_lhs();
 
        let modified_associated = inference_result.modified_rhs();
 
        if inference_result == DualInferenceResult::Incompatible {
 
            let outer_node_expr_id = self.infer_nodes[outer_node_index].expr_id;
 
            let outer_node_span = ctx.heap[outer_node_expr_id].full_span();
 
            let detailed_span = ctx.heap[error_location_expr_id].full_span();
 

	
 
            let outer_node_type = poly_data_type.display_name(&ctx.heap);
 
            let associated_type = self.infer_nodes[associated_node_index].expr_type.display_name(&ctx.heap);
 

	
 
            let source = &ctx.module().source;
 
            return Err(ParseError::new_error_str_at_span(
 
                source, outer_node_span, "failed to resolve the types of this expression"
 
            ).with_info_str_at_span(
 
                source, detailed_span, &format!(
 
                    "because the {} type has been resolved to '{}', but this expression has been resolved to '{}'",
 
                    error_type_name, outer_node_type, associated_type
 
                )
 
            ));
 
        }
 

	
 
        if modified_poly_data {
 
            debug_assert!(poly_data_type.has_marker);
 

	
 
            // Go through markers for polymorphic variables and use the
 
            // (hopefully) more specific types to update their representation
 
            // in the PolyData struct
 
            for (poly_var_index, poly_var_section) in poly_data_type.marker_iter() {
 
                let poly_var_type = &mut poly_data.poly_vars[poly_var_index as usize];
 
                match InferenceType::infer_subtree_for_single_type(poly_var_type, 0, poly_var_section, 0, false) {
 
                    SingleInferenceResult::Modified => {
 
                        poly_progress_section.push_unique(poly_var_index);
 
                    },
 
                    SingleInferenceResult::Unmodified => {
 
                        // nothing to do
 
                    },
 
                    SingleInferenceResult::Incompatible => {
 
                        return Err(Self::construct_poly_arg_error(
 
                            ctx, &self.poly_data[poly_data_index as usize],
 
                            self.infer_nodes[outer_node_index].expr_id
 
                        ));
 
                    }
 
                }
 
            }
 
        }
 

	
 
        return Ok((modified_poly_data, modified_associated));
 
    }
 

	
 
    /// After calling `apply_polydata_equal2_constraint` on several expressions
 
    /// that are associated with some kind of polymorphic expression, several of
 
    /// the polymorphic variables might have been inferred to more specific
 
    /// types than before.
 
    ///
 
    /// At this point one should call this function to apply the progress in
 
    /// these polymorphic variables back onto the types that are functions of
 
    /// these polymorphic variables.
 
    ///
 
    /// An example: a struct literal with a polymorphic variable `T` may have
 
    /// two fields `foo` and `bar` each with different types that are a function
 
    /// of the polymorhic variable `T`. If the expressions constructing the
 
    /// value for the field `foo` causes the type `T` to progress, then we can
 
    /// also progress the type of the expression that constructs `bar`.
 
    ///
 
    /// And so we have `outer_node_index` + `poly_data_type_index` pointing to
 
    /// the appropriate type in the `PolyData` struct. Which will be updated
 
    /// first using the polymorphic variables. If we happen to have updated that
 
    /// type, then we should also progress the associated expression, hence the
 
    /// `associated_node_index`.
 
    fn apply_polydata_polyvar_constraint(
 
        &mut self, _ctx: &Ctx,
 
        outer_node_index: InferNodeIndex, poly_data_type_index: PolyDataTypeIndex,
 
        associated_node_index: InferNodeIndex, poly_progress_section: &ScopedSection<u32>
 
    ) -> bool {
 
        let poly_data_index = self.infer_nodes[outer_node_index].poly_data_index;
 
        let poly_data = &mut self.poly_data[poly_data_index as usize];
 

	
 
        // Early exit, most common case (literals or functions calls which are
 
        // actually not polymorphic)
 
        if !poly_data.first_rule_application && poly_progress_section.len() == 0 {
 
            return false;
 
        }
 

	
 
        // safety: we're borrowing from two distinct fields, so should be fine
 
        let poly_data_type = poly_data.expr_types.get_type_mut(poly_data_type_index);
 
        let mut last_start_index = 0;
 
        let mut modified_poly_type = false;
 

	
 
        while let Some((poly_var_index, poly_var_start_index)) = poly_data_type.find_marker(last_start_index) {
 
            let poly_var_end_index = InferenceType::find_subtree_end_idx(&poly_data_type.parts, poly_var_start_index);
 

	
 
            if poly_data.first_rule_application || poly_progress_section.contains(&poly_var_index) {
 
                // We have updated this polymorphic variable, so try updating it
 
                // in the PolyData type
 
                let modified_in_poly_data = match InferenceType::infer_subtree_for_single_type(
 
                    poly_data_type, poly_var_start_index, &poly_data.poly_vars[poly_var_index as usize].parts, 0, false
 
                ) {
 
                    SingleInferenceResult::Modified => true,
 
                    SingleInferenceResult::Unmodified => false,
 
                    SingleInferenceResult::Incompatible => {
 
                        // practically impossible: before calling this function we gather all the
 
                        // data on the polymorphic variables from the associated expressions. So if
 
                        // the polymorphic variables in those expressions were not mutually
 
                        // compatible, we must have encountered that error already.
 
                        unreachable!()
 
                    },
 
                };
 

	
 
                modified_poly_type = modified_poly_type || modified_in_poly_data;
 
            }
 

	
 
            last_start_index = poly_var_end_index;
 
        }
 

	
 
        if modified_poly_type {
 
            let associated_type = &mut self.infer_nodes[associated_node_index].expr_type;
 
            match InferenceType::infer_subtree_for_single_type(
 
                associated_type, 0, &poly_data_type.parts, 0, true
 
            ) {
 
                SingleInferenceResult::Modified => return true,
 
                SingleInferenceResult::Unmodified => return false,
 
                SingleInferenceResult::Incompatible => unreachable!(), // same as above
 
            }
 
        } else {
 
            // Did not update associated type
 
            return false;
 
        }
 
    }
 

	
 
    /// Should be called after completing one full round of applying polydata
 
    /// constraints.
 
    fn finish_polydata_constraint(&mut self, outer_node_index: InferNodeIndex) {
 
        let poly_data_index = self.infer_nodes[outer_node_index].poly_data_index;
 
        let poly_data = &mut self.poly_data[poly_data_index as usize];
 
        poly_data.first_rule_application = false;
 
    }
 

	
 
    /// Applies a type constraint that expects all three provided types to be
 
    /// equal. In case we can make progress in inferring the types then we
 
    /// attempt to do so. If the call is successful then the composition of all
 
    /// types is made equal.
 
    fn apply_equal3_constraint(
 
        &mut self, ctx: &Ctx, node_index: InferNodeIndex,
 
        arg1_index: InferNodeIndex, arg2_index: InferNodeIndex,
 
        start_idx: usize
 
    ) -> Result<(bool, bool, bool), ParseError> {
 
        // Safety: all indices are unique
 
        //         containers may not be modified
 
        let expr_type: *mut _ = &mut self.infer_nodes[node_index].expr_type;
 
        let arg1_type: *mut _ = &mut self.infer_nodes[arg1_index].expr_type;
 
        let arg2_type: *mut _ = &mut self.infer_nodes[arg2_index].expr_type;
 

	
 
        let expr_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(expr_type, start_idx, arg1_type, start_idx)
 
        };
 
        if expr_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_expr_type_error(ctx, node_index, arg1_index));
 
        }
 

	
 
        let args_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(arg1_type, start_idx, arg2_type, start_idx) };
 
        if args_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, node_index, arg1_index, arg2_index));
 
        }
 

	
 
        // If all types are compatible, but the second call caused the arg1_type
 
        // to be expanded, then we must also assign this to expr_type.
 
        let mut progress_expr = expr_res.modified_lhs();
 
        let mut progress_arg1 = expr_res.modified_rhs();
 
        let progress_arg2 = args_res.modified_rhs();
 

	
 
        if args_res.modified_lhs() { 
 
            unsafe {
 
                let end_idx = InferenceType::find_subtree_end_idx(&(*arg2_type).parts, start_idx);
 
                let subtree = &((*arg2_type).parts[start_idx..end_idx]);
 
                (*expr_type).replace_subtree(start_idx, subtree);
 
            }
 
            progress_expr = true;
 
            progress_arg1 = true;
 
        }
 

	
 
        Ok((progress_expr, progress_arg1, progress_arg2))
 
    }
 

	
 
    /// Applies equal constraint to N consecutive expressions. The returned
 
    /// `progress` vec will contain which expressions were progressed and will
 
    /// have length N.
 
    fn apply_equal_n_constraint(
 
        &mut self, ctx: &Ctx, outer_node_index: InferNodeIndex,
 
        arguments: &ScopedSection<InferNodeIndex>, progress: &mut ScopedSection<bool>
 
    ) -> Result<(), ParseError> {
 
        // Depending on the argument perform an early exit. This simplifies
 
        // later logic
 
        debug_assert_eq!(progress.len(), 0);
 
        match arguments.len() {
 
            0 => {
 
                // nothing to progress
 
                return Ok(())
 
            },
 
            1 => {
 
                // only one type, so nothing to infer
 
                progress.push(false);
 
                return Ok(())
 
            },
 
            n => {
 
                for _ in 0..n {
 
                    progress.push(false);
 
                }
 
            }
 
        }
 

	
 
        // We'll start doing pairwise inference for all of the inference nodes
 
        // (node[0] with node[1], then node[1] with node[2], then node[2] ...,
 
        // etc.), so when we're at the end we have `node[N-1]` as the most
 
        // progressed type.
 
        let mut last_index_requiring_inference = 0;
 

	
 
        for prev_argument_index in 0..arguments.len() - 1 {
 
            let next_argument_index = prev_argument_index + 1;
 

	
 
            let prev_node_index = arguments[prev_argument_index];
 
            let next_node_index = arguments[next_argument_index];
 
            let (prev_progress, next_progress) = self.apply_equal2_constraint(
 
                ctx, outer_node_index, prev_node_index, 0, next_node_index, 0
 
            )?;
 

	
 
            if prev_progress {
 
                // Previous node is progress, so every type in front of it needs
 
                // to be reinferred.
 
                progress[prev_argument_index] = true;
 
                last_index_requiring_inference = prev_argument_index;
 
            }
 
            progress[next_argument_index] = next_progress;
 
        }
 

	
 
        // Apply inference using the most progressed type (the last one) to the
 
        // ones that did not obtain this information during the inference
 
        // process.
 
        let last_argument_node_index = arguments[arguments.len() - 1];
 
        let last_argument_type: *mut _ = &mut self.infer_nodes[last_argument_node_index].expr_type;
 

	
 
        for argument_index in 0..last_index_requiring_inference {
 
            // We can cheat, we know the LHS is less specific than the right
 
            // hand side, so:
 
            let argument_node_index = arguments[argument_index];
 
            let argument_type = &mut self.infer_nodes[argument_node_index].expr_type;
 
            unsafe {
 
                // safety: we're dealing with different vectors, so cannot alias
 
                argument_type.replace_subtree(0, &(*last_argument_type).parts);
 
            }
 
            progress[argument_index] = true;
 
        }
 

	
 
        return Ok(());
 
    }
 

	
 
    /// Determines the `InferenceType` for the expression based on the
 
    /// expression parent (this is not done if the parent is a regular 'ol
 
    /// expression). Expects `parent_index` to be set to the parent of the
 
    /// inference node that is created here.
 
    fn insert_initial_inference_node(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId
 
    ) -> Result<InferNodeIndex, ParseError> {
 
        use ExpressionParent as EP;
 
        use InferenceTypePart as ITP;
 

	
 
        // Set the initial inference type based on the expression parent.
 
        let expr = &ctx.heap[expr_id];
 
        let inference_type = match expr.parent() {
 
            EP::None =>
 
                // Should have been set by linker
 
                unreachable!(),
 
            EP::Memory(_) | EP::ExpressionStmt(_) =>
 
                // Determined during type inference
 
                InferenceType::new(false, false, vec![ITP::Unknown]),
 
            EP::Expression(parent_id, idx_in_parent) => {
 
                // If we are the test expression of a conditional expression,
 
                // then we must resolve to a boolean
 
                let is_conditional = if let Expression::Conditional(_) = &ctx.heap[*parent_id] {
 
                    true
 
                } else {
 
                    false
 
                };
 

	
 
                if is_conditional && *idx_in_parent == 0 {
 
                    InferenceType::new(false, true, vec![ITP::Bool])
 
                } else {
 
                    InferenceType::new(false, false, vec![ITP::Unknown])
 
                }
 
            },
 
            EP::If(_) | EP::While(_) =>
 
                // Must be a boolean
 
                InferenceType::new(false, true, vec![ITP::Bool]),
 
            EP::Return(_) => {
 
                // Must match the return type of the function
 
                debug_assert_eq!(self.procedure_kind, ProcedureKind::Function);
 
                let returned = &ctx.heap[self.procedure_id].return_type.as_ref().unwrap();
 
                self.determine_inference_type_from_parser_type_elements(&returned.elements, true)
 
            },
 
            EP::New(_) =>
 
                // Must be a component call, which we assign a "Void" return
 
                // type
 
                InferenceType::new(false, true, vec![ITP::Void]),
 
        };
 

	
 
        let infer_index = self.infer_nodes.len() as InferNodeIndex;
 
        self.infer_nodes.push(InferenceNode {
 
            expr_type: inference_type,
 
            expr_id,
 
            inference_rule: InferenceRule::Noop,
 
            parent_index: self.parent_index,
 
            field_index: -1,
 
            poly_data_index: -1,
 
            info_type_id: TypeId::new_invalid(),
 
            info_variant: ExpressionInfoVariant::Generic,
 
        });
 

	
 
        return Ok(infer_index);
 
    }
 

	
 
    fn insert_initial_call_polymorph_data(
 
        &mut self, ctx: &mut Ctx, call_id: CallExpressionId
 
    ) -> PolyDataIndex {
 
        // Note: the polymorph variables may be partially specified and may
 
        // contain references to the wrapping definition's (i.e. the proctype
 
        // we are currently visiting) polymorphic arguments.
 
        //
 
        // The arguments of the call may refer to polymorphic variables in the
 
        // definition of the function we're calling, not of the wrapping
 
        // definition. We insert markers in these inferred types to be able to
 
        // map them back and forth to the polymorphic arguments of the function
 
        // we are calling.
 
        let call = &ctx.heap[call_id];
 

	
 
        // Handle the polymorphic arguments (if there are any)
 
        let num_poly_args = call.parser_type.elements[0].variant.num_embedded();
 
        let mut poly_args = Vec::with_capacity(num_poly_args);
 
        for embedded_elements in call.parser_type.iter_embedded(0) {
 
            poly_args.push(self.determine_inference_type_from_parser_type_elements(embedded_elements, true));
 
        }
 

	
 
        // Handle the arguments and return types
 
        let definition = &ctx.heap[call.procedure];
 
        debug_assert_eq!(poly_args.len(), definition.poly_vars.len());
 

	
 
        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
        let parameter_section = self.var_buffer.start_section_initialized(&definition.parameters);
 
        for parameter_id in parameter_section.iter_copied() {
 
            let param = &ctx.heap[parameter_id];
 
            parameter_types.push(self.determine_inference_type_from_parser_type_elements(&param.parser_type.elements, false));
 
        }
 
        parameter_section.forget();
 

	
 
        let return_type = match &definition.return_type {
 
            None => {
 
                // Component, so returns a "Void"
 
                debug_assert_ne!(definition.kind, ProcedureKind::Function);
 
                InferenceType::new(false, true, vec![InferenceTypePart::Void])
 
            },
 
            Some(returned) => {
 
                debug_assert_eq!(definition.kind, ProcedureKind::Function);
 
                self.determine_inference_type_from_parser_type_elements(&returned.elements, false)
 
            }
 
        };
 

	
 
        let extra_data_idx = self.poly_data.len() as PolyDataIndex;
 
        self.poly_data.push(PolyData {
 
            first_rule_application: true,
 
            definition_id: call.procedure.upcast(),
 
            poly_vars: poly_args,
 
            expr_types: PolyDataTypes {
 
                associated: parameter_types,
 
                returned: return_type
 
            }
 
        });
 
        return extra_data_idx
 
    }
 

	
 
    fn insert_initial_struct_polymorph_data(
 
        &mut self, ctx: &mut Ctx, lit_id: LiteralExpressionId,
 
    ) -> PolyDataIndex {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_struct();
 

	
 
        // Handle polymorphic arguments
 
        let num_embedded = literal.parser_type.elements[0].variant.num_embedded();
 
        let mut total_num_poly_parts = 0;
 
        let mut poly_args = Vec::with_capacity(num_embedded);
 

	
 
        for embedded_elements in literal.parser_type.iter_embedded(0) {
 
            let poly_type = self.determine_inference_type_from_parser_type_elements(embedded_elements, true);
 
            total_num_poly_parts += poly_type.parts.len();
 
            poly_args.push(poly_type);
 
        }
 

	
 
        // Handle parser types on struct definition
 
        let defined_type = ctx.types.get_base_definition(&literal.definition).unwrap();
 
        let struct_type = defined_type.definition.as_struct();
 
        debug_assert_eq!(poly_args.len(), defined_type.poly_vars.len());
 

	
 
        // Note: programmer is capable of specifying fields in a struct literal
 
        // in a different order than on the definition. We take the literal-
 
        // specified order to be leading.
 
        let mut embedded_types = Vec::with_capacity(struct_type.fields.len());
 
        for lit_field in literal.fields.iter() {
 
            let def_field = &struct_type.fields[lit_field.field_idx];
 
            let inference_type = self.determine_inference_type_from_parser_type_elements(&def_field.parser_type.elements, false);
 
            embedded_types.push(inference_type);
 
        }
 

	
 
        // Return type is the struct type itself, with the appropriate 
 
        // polymorphic variables. So:
 
        // - 1 part for definition
 
        // - N_poly_arg marker parts for each polymorphic argument
 
        // - all the parts for the currently known polymorphic arguments 
 
        let parts_reserved = 1 + poly_args.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(literal.definition, poly_args.len() as u32));
 
        let mut return_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_args.iter().enumerate() {
 
            if !poly_var.is_done { return_type_done = false; }
 

	
 
            parts.push(ITP::Marker(poly_var_idx as u32));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let return_type = InferenceType::new(!poly_args.is_empty(), return_type_done, parts);
 

	
 
        let extra_data_index = self.poly_data.len() as PolyDataIndex;
 
        self.poly_data.push(PolyData {
 
            first_rule_application: true,
 
            definition_id: literal.definition,
 
            poly_vars: poly_args,
 
            expr_types: PolyDataTypes {
 
                associated: embedded_types,
 
                returned: return_type,
 
            },
 
        });
 

	
 
        return extra_data_index
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct for enum expressions. These
 
    /// can never be determined from the enum itself, but may be inferred from
 
    /// the use of the enum.
 
    fn insert_initial_enum_polymorph_data(
 
        &mut self, ctx: &Ctx, lit_id: LiteralExpressionId
 
    ) -> PolyDataIndex {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_enum();
 

	
 
        // Handle polymorphic arguments to the enum
 
        let num_poly_args = literal.parser_type.elements[0].variant.num_embedded();
 
        let mut total_num_poly_parts = 0;
 
        let mut poly_args = Vec::with_capacity(num_poly_args);
 

	
 
        for embedded_elements in literal.parser_type.iter_embedded(0) {
 
            let poly_type = self.determine_inference_type_from_parser_type_elements(embedded_elements, true);
 
            total_num_poly_parts += poly_type.parts.len();
 
            poly_args.push(poly_type);
 
        }
 

	
 
        // Handle enum type itself
 
        let parts_reserved = 1 + poly_args.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(literal.definition, poly_args.len() as u32));
 
        let mut enum_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_args.iter().enumerate() {
 
            if !poly_var.is_done { enum_type_done = false; }
 

	
 
            parts.push(ITP::Marker(poly_var_idx as u32));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let enum_type = InferenceType::new(!poly_args.is_empty(), enum_type_done, parts);
 

	
 
        let extra_data_index = self.poly_data.len() as PolyDataIndex;
 
        self.poly_data.push(PolyData {
 
            first_rule_application: true,
 
            definition_id: literal.definition,
 
            poly_vars: poly_args,
 
            expr_types: PolyDataTypes {
 
                associated: Vec::new(),
 
                returned: enum_type,
 
            },
 
        });
 

	
 
        return extra_data_index;
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct for unions. The polymorphic
src/protocol/parser/type_table.rs
Show inline comments
 
/**
 
 * type_table.rs
 
 *
 
 * The type table is a lookup from AST definition (which contains just what the
 
 * programmer typed) to a type with additional information computed (e.g. the
 
 * byte size and offsets of struct members). The type table should be considered
 
 * the authoritative source of information on types by the compiler (not the
 
 * AST itself!).
 
 *
 
 * The type table operates in two modes: one is where we just look up the type,
 
 * check its fields for correctness and mark whether it is polymorphic or not.
 
 * The second one is where we compute byte sizes, alignment and offsets.
 
 *
 
 * The basic algorithm for type resolving and computing byte sizes is to
 
 * recursively try to lay out each member type of a particular type. This is
 
 * done in a stack-like fashion, where each embedded type pushes a breadcrumb
 
 * unto the stack. We may discover a cycle in embedded types (we call this a
 
 * "type loop"). After which the type table attempts to break the type loop by
 
 * making specific types heap-allocated. Upon doing so we know their size
 
 * because their stack-size is now based on pointers. Hence breaking the type
 
 * loop required for computing the byte size of types.
 
 *
 
 * The reason for these type shenanigans is because PDL is a value-based
 
 * language, but we would still like to be able to express recursively defined
 
 * types like trees or linked lists. Hence we need to insert pointers somewhere
 
 * to break these cycles.
 
 *
 
 * We will insert these pointers into the variants of unions. However note that
 
 * we can only compute the stack size of a union until we've looked at *all*
 
 * variants. Hence we perform an initial pass where we detect type loops, a
 
 * second pass where we compute the stack sizes of everything, and a third pass
 
 * where we actually compute the size of the heap allocations for unions.
 
 *
 
 * As a final bit of global documentation: non-polymorphic types will always
 
 * have one "monomorph" entry. This contains the non-polymorphic type's memory
 
 * layout.
 
 */
 

	
 
// Programmer note: deduplication of types is currently disabled, see the
 
// @Deduplication key. Tests might fail when it is re-enabled.
 
use std::collections::HashMap;
 
use std::hash::{Hash, Hasher};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::parser::symbol_table::SymbolScope;
 
use crate::protocol::input_source::ParseError;
 
use crate::protocol::parser::*;
 

	
 
//------------------------------------------------------------------------------
 
// Defined Types
 
//------------------------------------------------------------------------------
 

	
 
/// Struct wrapping around a potentially polymorphic type. If the type does not
 
/// have any polymorphic arguments then it will not have any monomorphs and
 
/// `is_polymorph` will be set to `false`. A type with polymorphic arguments
 
/// only has `is_polymorph` set to `true` if the polymorphic arguments actually
 
/// appear in the types associated types (function return argument, struct
 
/// field, enum variant, etc.). Otherwise the polymorphic argument is just a
 
/// marker and does not influence the bytesize of the type.
 
#[allow(unused)]
 
pub struct DefinedType {
 
    pub(crate) ast_root: RootId,
 
    pub(crate) ast_definition: DefinitionId,
 
    pub(crate) definition: DefinedTypeVariant,
 
    pub(crate) poly_vars: Vec<PolymorphicVariable>,
 
    pub(crate) is_polymorph: bool,
 
}
 

	
 
pub enum DefinedTypeVariant {
 
    Enum(EnumType),
 
    Union(UnionType),
 
    Struct(StructType),
 
    Procedure(ProcedureType),
 
}
 

	
 
impl DefinedTypeVariant {
 
    pub(crate) fn is_data_type(&self) -> bool {
 
        use DefinedTypeVariant as DTV;
 

	
 
        match self {
 
            DTV::Struct(_) | DTV::Enum(_) | DTV::Union(_) => return true,
 
            DTV::Procedure(_) => return false,
 
        }
 
    }
 

	
 
    pub(crate) fn as_struct(&self) -> &StructType {
 
        match self {
 
            DefinedTypeVariant::Struct(v) => v,
 
            _ => unreachable!()
 
        }
 
    }
 

	
 
    pub(crate) fn as_enum(&self) -> &EnumType {
 
        match self {
 
            DefinedTypeVariant::Enum(v) => v,
 
            _ => unreachable!()
 
        }
 
    }
 

	
 
    pub(crate) fn as_union(&self) -> &UnionType {
 
        match self {
 
            DefinedTypeVariant::Union(v) => v,
 
            _ => unreachable!()
 
        }
 
    }
 
}
 

	
 
pub struct PolymorphicVariable {
 
    pub(crate) identifier: Identifier,
 
    pub(crate) is_in_use: bool, // a polymorphic argument may be defined, but not used by the type definition
 
}
 

	
 
/// `EnumType` is the classical C/C++ enum type. It has various variants with
 
/// an assigned integer value. The integer values may be user-defined,
 
/// compiler-defined, or a mix of the two. If a user assigns the same enum
 
/// value multiple times, we assume the user is an expert and we consider both
 
/// variants to be equal to one another.
 
pub struct EnumType {
 
    pub variants: Vec<EnumVariant>,
 
    pub minimum_tag_value: i64,
 
    pub maximum_tag_value: i64,
 
    pub tag_type: ConcreteType,
 
    pub size: usize,
 
    pub alignment: usize,
 
}
 

	
 
// TODO: Also support maximum u64 value
 
pub struct EnumVariant {
 
    pub identifier: Identifier,
 
    pub value: i64,
 
}
 

	
 
/// `UnionType` is the algebraic datatype (or sum type, or discriminated union).
 
/// A value is an element of the union, identified by its tag, and may contain
 
/// a single subtype.
 
/// For potentially infinite types (i.e. a tree, or a linked list) only unions
 
/// can break the infinite cycle. So when we lay out these unions in memory we
 
/// will reserve enough space on the stack for all union variants that do not
 
/// cause "type loops" (i.e. a union `A` with a variant containing a struct
 
/// `B`). And we will reserve enough space on the heap (and store a pointer in
 
/// the union) for all variants which do cause type loops (i.e. a union `A`
 
/// with a variant to a struct `B` that contains the union `A` again).
 
pub struct UnionType {
 
    pub variants: Vec<UnionVariant>,
 
    pub tag_type: ConcreteType,
 
    pub tag_size: usize,
 
}
 

	
 
pub struct UnionVariant {
 
    pub identifier: Identifier,
 
    pub embedded: Vec<ParserType>, // zero-length does not have embedded values
 
    pub tag_value: i64,
 
}
 

	
 
/// `StructType` is a generic C-like struct type (or record type, or product
 
/// type) type.
 
pub struct StructType {
 
    pub fields: Vec<StructField>,
 
}
 

	
 
pub struct StructField {
 
    pub identifier: Identifier,
 
    pub parser_type: ParserType,
 
}
 

	
 
/// `ProcedureType` is the signature of a procedure/component
 
pub struct ProcedureType {
 
    pub kind: ProcedureKind,
 
    pub return_type: Option<ParserType>,
 
    pub arguments: Vec<ProcedureArgument>,
 
}
 

	
 
pub struct ProcedureArgument {
 
    identifier: Identifier,
 
    parser_type: ParserType,
 
}
 

	
 
/// Represents the data associated with a single expression after type inference
 
/// for a monomorph (or just the normal expression types, if dealing with a
 
/// non-polymorphic function/component).
 
pub struct MonomorphExpression {
 
    // The output type of the expression. Note that for a function it is not the
 
    // function's signature but its return type
 
    pub(crate) expr_type: ConcreteType,
 
    // Has multiple meanings: the field index for select expressions, the
 
    // monomorph index for polymorphic function calls or literals. Negative
 
    // values are never used, but used to catch programming errors.
 
    pub(crate) field_or_monomorph_idx: i32,
 
    pub(crate) type_id: TypeId,
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Type monomorph storage
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) enum MonoTypeVariant {
 
    Builtin, // no extra data, added manually in compiler initialization code
 
    Enum, // no extra data
 
    Struct(StructMonomorph),
 
    Union(UnionMonomorph),
 
    Procedure(ProcedureMonomorph), // functions, components
 
    Tuple(TupleMonomorph),
 
}
 

	
 
impl MonoTypeVariant {
 
    fn as_struct_mut(&mut self) -> &mut StructMonomorph {
 
        match self {
 
            MonoTypeVariant::Struct(v) => v,
 
            _ => unreachable!(),
 
        }
 
    }
 

	
 
    pub(crate) fn as_union(&self) -> &UnionMonomorph {
 
        match self {
 
            MonoTypeVariant::Union(v) => v,
 
            _ => unreachable!(),
 
        }
 
    }
 

	
 
    fn as_union_mut(&mut self) -> &mut UnionMonomorph {
 
        match self {
 
            MonoTypeVariant::Union(v) => v,
 
            _ => unreachable!(),
 
        }
 
    }
 

	
 
    fn as_tuple_mut(&mut self) -> &mut TupleMonomorph {
 
        match self {
 
            MonoTypeVariant::Tuple(v) => v,
 
            _ => unreachable!(),
 
        }
 
    }
 

	
 
    pub(crate) fn as_procedure(&self) -> &ProcedureMonomorph {
 
        match self {
 
            MonoTypeVariant::Procedure(v) => v,
 
            _ => unreachable!(),
 
        }
 
    }
 

	
 
    fn as_procedure_mut(&mut self) -> &mut ProcedureMonomorph {
 
        match self {
 
            MonoTypeVariant::Procedure(v) => v,
 
            _ => unreachable!(),
 
        }
 
    }
 
}
 

	
 
/// Struct monomorph
 
pub struct StructMonomorph {
 
    pub fields: Vec<StructMonomorphField>,
 
}
 

	
 
pub struct StructMonomorphField {
 
    pub type_id: TypeId,
 
    concrete_type: ConcreteType,
 
    pub size: usize,
 
    pub alignment: usize,
 
    pub offset: usize,
 
}
 

	
 
/// Union monomorph
 
pub struct UnionMonomorph {
 
    pub variants: Vec<UnionMonomorphVariant>,
 
    pub tag_size: usize, // copied from `UnionType` upon monomorph construction.
 
    // note that the stack size is in the `TypeMonomorph` struct. This size and
 
    // alignment will include the size of the union tag.
 
    //
 
    // heap_size contains the allocated size of the union in the case it
 
    // is used to break a type loop. If it is 0, then it doesn't require
 
    // allocation and lives entirely on the stack.
 
    pub heap_size: usize,
 
    pub heap_alignment: usize,
 
}
 

	
 
pub struct UnionMonomorphVariant {
 
    pub lives_on_heap: bool,
 
    pub embedded: Vec<UnionMonomorphEmbedded>,
 
}
 

	
 
pub struct UnionMonomorphEmbedded {
 
    pub type_id: TypeId,
 
    concrete_type: ConcreteType,
 
    // Note that the meaning of the offset (and alignment) depend on whether or
 
    // not the variant lives on the stack/heap. If it lives on the stack then
 
    // they refer to the offset from the start of the union value (so the first
 
    // embedded type lives at a non-zero offset, because the union tag sits in
 
    // the front). If it lives on the heap then it refers to the offset from the
 
    // allocated memory region (so the first embedded type lives at a 0 offset).
 
    pub size: usize,
 
    pub alignment: usize,
 
    pub offset: usize,
 
}
 

	
 
/// Procedure (functions and components of all possible types) monomorph. Also
 
/// stores the expression type data from the typechecking/inferencing pass.
 
pub struct ProcedureMonomorph {
 
    pub monomorph_index: u32,
 
    pub builtin: bool,
 
}
 

	
 
/// Tuple monomorph. Again a kind of exception because one cannot define a named
 
/// tuple type containing explicit polymorphic variables. But again: we need to
 
/// store size/offset/alignment information, so we do it here.
 
pub struct TupleMonomorph {
 
    pub members: Vec<TupleMonomorphMember>
 
}
 

	
 
pub struct TupleMonomorphMember {
 
    pub type_id: TypeId,
 
    concrete_type: ConcreteType,
 
    pub size: usize,
 
    pub alignment: usize,
 
    pub offset: usize,
 
}
 

	
 
/// Generic unique type ID. Every monomorphed type and every non-polymorphic
 
/// type will have one of these associated with it.
 
#[derive(Debug, Clone, Copy, PartialEq)]
 
pub struct TypeId(i64);
 

	
 
impl TypeId {
 
    pub(crate) fn new_invalid() -> Self {
 
        return Self(-1);
 
    }
 
}
 

	
 
/// A monomorphed type (or non-polymorphic type's) memory layout and information
 
/// regarding associated types (like a struct's field type).
 
pub struct MonoType {
 
    pub type_id: TypeId,
 
    pub concrete_type: ConcreteType,
 
    pub size: usize,
 
    pub alignment: usize,
 
    pub(crate) variant: MonoTypeVariant
 
}
 

	
 
impl MonoType {
 
    #[inline]
 
    fn new_empty(type_id: TypeId, concrete_type: ConcreteType, variant: MonoTypeVariant) -> Self {
 
        return Self {
 
            type_id, concrete_type,
 
            size: 0,
 
            alignment: 0,
 
            variant,
 
        }
 
    }
 

	
 
    /// Little internal helper function as a reminder: if alignment is 0, then
 
    /// the size/alignment are not actually computed yet!
 
    #[inline]
 
    fn get_size_alignment(&self) -> Option<(usize, usize)> {
 
        if self.alignment == 0 {
 
            return None
 
        } else {
 
            return Some((self.size, self.alignment));
 
        }
 
    }
 
}
 

	
 
/// Special structure that acts like the lookup key for `ConcreteType` instances
 
/// that have already been added to the type table before.
 
#[derive(Clone)]
 
struct MonoSearchKey {
 
    // Uses bitflags to denote when parts between search keys should match and
 
    // whether they should be checked. Needs to have a system like this to
 
    // accommodate tuples.
 
    parts: Vec<(u8, ConcreteTypePart)>,
 
    change_bit: u8,
 
}
 

	
 
impl MonoSearchKey {
 
    const KEY_IN_USE: u8 = 0x01;
 
    const KEY_CHANGE_BIT: u8 = 0x02;
 

	
 
    fn with_capacity(capacity: usize) -> Self {
 
        return MonoSearchKey{
 
            parts: Vec::with_capacity(capacity),
 
            change_bit: 0,
 
        };
 
    }
 

	
 
    /// Sets the search key based on a single concrete type and its polymorphic
 
    /// variables.
 
    fn set(&mut self, concrete_type_parts: &[ConcreteTypePart], poly_var_in_use: &[PolymorphicVariable]) {
 
        self.set_top_type(concrete_type_parts[0]);
 

	
 
        let mut poly_var_index = 0;
 
        for subtype in ConcreteTypeIter::new(concrete_type_parts, 0) {
 
            let in_use = poly_var_in_use[poly_var_index].is_in_use;
 
            poly_var_index += 1;
 
            self.push_subtype(subtype, in_use);
 
        }
 

	
 
        debug_assert_eq!(poly_var_index, poly_var_in_use.len());
 
    }
 

	
 
    /// Starts setting the search key based on an initial top-level type,
 
    /// programmer must call `push_subtype` the appropriate number of times
 
    /// after calling this function
 
    fn set_top_type(&mut self, type_part: ConcreteTypePart) {
 
        self.parts.clear();
 
        self.parts.push((Self::KEY_IN_USE, type_part));
 
        self.change_bit = Self::KEY_CHANGE_BIT;
 
    }
 

	
 
    fn push_subtype(&mut self, concrete_type: &[ConcreteTypePart], in_use: bool) {
 
        let flag = self.change_bit | (if in_use { Self::KEY_IN_USE } else { 0 });
 

	
 
        for part in concrete_type {
 
            self.parts.push((flag, *part));
 
        }
 
        self.change_bit ^= Self::KEY_CHANGE_BIT;
 
    }
 

	
 
    fn push_subtree(&mut self, concrete_type: &[ConcreteTypePart], poly_var_in_use: &[PolymorphicVariable]) {
 
        self.parts.push((self.change_bit | Self::KEY_IN_USE, concrete_type[0]));
 
        self.change_bit ^= Self::KEY_CHANGE_BIT;
 

	
 
        let mut poly_var_index = 0;
 
        for subtype in ConcreteTypeIter::new(concrete_type, 0) {
 
            let in_use = poly_var_in_use[poly_var_index].is_in_use;
 
            poly_var_index += 1;
 
            self.push_subtype(subtype, in_use);
 
        }
 

	
 
        debug_assert_eq!(poly_var_index, poly_var_in_use.len());
 
    }
 

	
 
    // Utilities for hashing and comparison
 
    fn find_end_index(&self, start_index: usize) -> usize {
 
        // Check if we're already at the end
 
        let mut index = start_index;
 
        if index >= self.parts.len() {
 
            return index;
 
        }
 

	
 
        // Iterate until bit flips, or until at end
 
        let expected_bit = self.parts[index].0 & Self::KEY_CHANGE_BIT;
 

	
 
        index += 1;
 
        while index < self.parts.len() {
 
            let current_bit = self.parts[index].0 & Self::KEY_CHANGE_BIT;
 
            if current_bit != expected_bit {
 
                return index;
 
            }
 

	
 
            index += 1;
 
        }
 

	
 
        return index;
 
    }
 
}
 

	
 
impl Hash for MonoSearchKey {
 
    fn hash<H: Hasher>(&self, state: &mut H) {
 
        for index in 0..self.parts.len() {
 
            let (_flags, part) = self.parts[index];
 
            // if flags & Self::KEY_IN_USE != 0 { @Deduplication
 
            part.hash(state);
 
            // }
 
        }
 
    }
 
}
 

	
 
impl PartialEq for MonoSearchKey {
 
    fn eq(&self, other: &Self) -> bool {
 
        let mut self_index = 0;
 
        let mut other_index = 0;
 

	
 
        while self_index < self.parts.len() && other_index < other.parts.len() {
 
            // Retrieve part and flags
 
            let (_self_bits, _) = self.parts[self_index];
 
            let (_other_bits, _) = other.parts[other_index];
 
            let self_in_use = true; // (self_bits & Self::KEY_IN_USE) != 0; @Deduplication
 
            let other_in_use = true; // (other_bits & Self::KEY_IN_USE) != 0; @Deduplication
 

	
 
            // Determine ending indices
 
            let self_end_index = self.find_end_index(self_index);
 
            let other_end_index = other.find_end_index(other_index);
 

	
 
            if self_in_use == other_in_use {
 
                if self_in_use {
 
                    // Both are in use, so both parts should be equal
 
                    let delta_self = self_end_index - self_index;
 
                    let delta_other = other_end_index - other_index;
 
                    if delta_self != delta_other {
 
                        // Both in use, but not of equal length, so the types
 
                        // cannot match
 
                        return false;
 
                    }
 

	
 
                    for _ in 0..delta_self {
 
                        let (_, self_part) = self.parts[self_index];
 
                        let (_, other_part) = other.parts[other_index];
 

	
 
                        if self_part != other_part {
 
                            return false;
 
                        }
 

	
 
                        self_index += 1;
 
                        other_index += 1;
 
                    }
 
                } else {
 
                    // Both not in use, so skip associated parts
 
                    self_index = self_end_index;
 
                    other_index = other_end_index;
 
                }
 
            } else {
 
                // No agreement on importance of parts. This is practically
 
                // impossible
 
                unreachable!();
 
            }
 
        }
 

	
 
        // Everything matched, so if we're at the end of both arrays then we're
 
        // certain that the two keys are equal.
 
        return self_index == self.parts.len() && other_index == other.parts.len();
 
    }
 
}
 

	
 
impl Eq for MonoSearchKey{}
 

	
 
//------------------------------------------------------------------------------
 
// Type table
 
//------------------------------------------------------------------------------
 

	
 
const POLY_VARS_IN_USE: [PolymorphicVariable; 1] = [PolymorphicVariable{ identifier: Identifier::new_empty(InputSpan::new()), is_in_use: true }];
 

	
 
// Programmer note: keep this struct free of dynamically allocated memory
 
#[derive(Clone)]
 
struct TypeLoopBreadcrumb {
 
    type_id: TypeId,
 
    next_member: u32,
 
    next_embedded: u32, // for unions, the index into the variant's embedded types
 
}
 

	
 
// Programmer note: keep this struct free of dynamically allocated memory
 
#[derive(Clone)]
 
struct MemoryBreadcrumb {
 
    type_id: TypeId,
 
    next_member: u32,
 
    next_embedded: u32,
 
    first_size_alignment_idx: u32,
 
}
 

	
 
#[derive(Debug, PartialEq, Eq)]
 
enum TypeLoopResult {
 
    TypeExists,
 
    PushBreadcrumb(DefinitionId, ConcreteType),
 
    TypeLoop(usize), // index into vec of breadcrumbs at which the type matched
 
}
 

	
 
enum MemoryLayoutResult {
 
    TypeExists(usize, usize), // (size, alignment)
 
    PushBreadcrumb(MemoryBreadcrumb),
 
}
 

	
 
// TODO: @Optimize, initial memory-unoptimized implementation
 
struct TypeLoopEntry {
 
    type_id: TypeId,
 
    is_union: bool,
 
}
 

	
 
struct TypeLoop {
 
    members: Vec<TypeLoopEntry>,
 
}
 

	
 
type DefinitionMap = HashMap<DefinitionId, DefinedType>;
 
type MonoTypeMap = HashMap<MonoSearchKey, TypeId>;
 
type MonoTypeArray = Vec<MonoType>;
 

	
 
pub struct TypeTable {
 
    // Lookup from AST DefinitionId to a defined type. Also lookups for
 
    // concrete type to monomorphs
 
    pub(crate) definition_lookup: DefinitionMap,
 
    mono_type_lookup: MonoTypeMap,
 
    pub(crate) mono_types: MonoTypeArray,
 
    mono_search_key: MonoSearchKey,
 
    // Breadcrumbs left behind while trying to find type loops. Also used to
 
    // determine sizes of types when all type loops are detected.
 
    type_loop_breadcrumbs: Vec<TypeLoopBreadcrumb>,
 
    type_loops: Vec<TypeLoop>,
 
    // Stores all encountered types during type loop detection. Used afterwards
 
    // to iterate over all types in order to compute size/alignment.
 
    encountered_types: Vec<TypeLoopEntry>,
 
    // Breadcrumbs and temporary storage during memory layout computation.
 
    memory_layout_breadcrumbs: Vec<MemoryBreadcrumb>,
 
    size_alignment_stack: Vec<(usize, usize)>,
 
}
 

	
 
impl TypeTable {
 
    /// Construct a new type table without any resolved types.
 
    pub(crate) fn new() -> Self {
 
        Self{ 
 
            definition_lookup: HashMap::with_capacity(128),
 
            mono_type_lookup: HashMap::with_capacity(128),
 
            mono_types: Vec::with_capacity(128),
 
            mono_search_key: MonoSearchKey::with_capacity(32),
 
            type_loop_breadcrumbs: Vec::with_capacity(32),
 
            type_loops: Vec::with_capacity(8),
 
            encountered_types: Vec::with_capacity(32),
 
            memory_layout_breadcrumbs: Vec::with_capacity(32),
 
            size_alignment_stack: Vec::with_capacity(64),
 
        }
 
    }
 

	
 
    /// Iterates over all defined types (polymorphic and non-polymorphic) and
 
    /// add their types in two passes. In the first pass we will just add the
 
    /// base types (we will not consider monomorphs, and we will not compute
 
    /// byte sizes). In the second pass we will compute byte sizes of
 
    /// non-polymorphic types, and potentially the monomorphs that are embedded
 
    /// in those types.
 
    pub(crate) fn build_base_types(&mut self, modules: &mut [Module], ctx: &mut PassCtx) -> Result<(), ParseError> {
 
        // Make sure we're allowed to cast root_id to index into ctx.modules
 
        debug_assert!(modules.iter().all(|m| m.phase >= ModuleCompilationPhase::DefinitionsParsed));
 
        debug_assert!(self.definition_lookup.is_empty());
 

	
 
        dbg_code!({
 
            for (index, module) in modules.iter().enumerate() {
 
                debug_assert_eq!(index, module.root_id.index as usize);
 
            }
 
        });
 

	
 
        // Use context to guess hashmap size of the base types
 
        let reserve_size = ctx.heap.definitions.len();
 
        self.definition_lookup.reserve(reserve_size);
 

	
 
        // Resolve all base types
 
        for definition_idx in 0..ctx.heap.definitions.len() {
 
            let definition_id = ctx.heap.definitions.get_id(definition_idx);
 
            let definition = &ctx.heap[definition_id];
 

	
 
            match definition {
 
                Definition::Enum(_) => self.build_base_enum_definition(modules, ctx, definition_id)?,
 
                Definition::Union(_) => self.build_base_union_definition(modules, ctx, definition_id)?,
 
                Definition::Struct(_) => self.build_base_struct_definition(modules, ctx, definition_id)?,
 
                Definition::Procedure(_) => self.build_base_procedure_definition(modules, ctx, definition_id)?,
 
            }
 
        }
 

	
 
        debug_assert_eq!(self.definition_lookup.len(), reserve_size, "mismatch in reserved size of type table");
 
        for module in modules.iter_mut() {
 
            module.phase = ModuleCompilationPhase::TypesAddedToTable;
 
        }
 

	
 
        // Go through all types again, lay out all types that are not
 
        // polymorphic. This might cause us to lay out monomorphized polymorphs
 
        // if these were member types of non-polymorphic types.
 
        for definition_idx in 0..ctx.heap.definitions.len() {
 
            let definition_id = ctx.heap.definitions.get_id(definition_idx);
 
            let poly_type = self.definition_lookup.get(&definition_id).unwrap();
 

	
 
            if !poly_type.definition.is_data_type() || !poly_type.poly_vars.is_empty() {
 
                continue;
 
            }
 

	
 
            // If here then the type is a data type without polymorphic
 
            // variables, but we might have instantiated it already, so:
 
            let concrete_parts = [ConcreteTypePart::Instance(definition_id, 0)];
 
            self.mono_search_key.set(&concrete_parts, &[]);
 
            let type_id = self.mono_type_lookup.get(&self.mono_search_key);
 
            if type_id.is_none() {
 
                self.detect_and_resolve_type_loops_for(
 
                    modules, ctx.heap, ctx.arch,
 
                    ConcreteType{
 
                        parts: vec![ConcreteTypePart::Instance(definition_id, 0)]
 
                    },
 
                )?;
 
                self.lay_out_memory_for_encountered_types(ctx.arch);
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    /// Retrieves base definition from type table. We must be able to retrieve
 
    /// it as we resolve all base types upon type table construction (for now).
 
    /// However, in the future we might do on-demand type resolving, so return
 
    /// an option anyway
 
    #[inline]
 
    pub(crate) fn get_base_definition(&self, definition_id: &DefinitionId) -> Option<&DefinedType> {
 
        self.definition_lookup.get(&definition_id)
 
    }
 

	
 
    /// Returns the index into the monomorph type array if the procedure type
 
    /// already has a (reserved) monomorph.
 
    #[inline]
 
    pub(crate) fn get_procedure_monomorph_type_id(&self, definition_id: &DefinitionId, type_parts: &[ConcreteTypePart]) -> Option<TypeId> {
 
        // Cannot use internal search key due to mutability issues. But this
 
        // method should end up being deprecated at some point anyway.
 
        debug_assert_eq!(get_concrete_type_definition(type_parts).unwrap(), *definition_id);
 
        let base_type = self.definition_lookup.get(definition_id).unwrap();
 
        let mut search_key = MonoSearchKey::with_capacity(type_parts.len());
 
        search_key.set(type_parts, &base_type.poly_vars);
 

	
 
        return self.mono_type_lookup.get(&search_key).copied();
 
    }
 

	
 
    #[inline]
 
    pub(crate) fn get_monomorph(&self, type_id: TypeId) -> &MonoType {
 
        return &self.mono_types[type_id.0 as usize];
 
    }
 

	
 
    /// Reserves space for a monomorph of a polymorphic procedure. The index
 
    /// will point into a (reserved) slot of the array of expression types. The
 
    /// monomorph may NOT exist yet (because the reservation implies that we're
 
    /// going to be performing typechecking on it, and we don't want to
 
    /// check the same monomorph twice)
 
    pub(crate) fn reserve_procedure_monomorph_type_id(&mut self, definition_id: &DefinitionId, concrete_type: ConcreteType, monomorph_index: u32) -> TypeId {
 
        debug_assert_eq!(get_concrete_type_definition(&concrete_type.parts).unwrap(), *definition_id);
 
        let type_id = TypeId(self.mono_types.len() as i64);
 
        let base_type = self.definition_lookup.get_mut(definition_id).unwrap();
 
        self.mono_search_key.set(&concrete_type.parts, &base_type.poly_vars);
 

	
 
        debug_assert!(!self.mono_type_lookup.contains_key(&self.mono_search_key));
 
        self.mono_type_lookup.insert(self.mono_search_key.clone(), type_id);
 
        self.mono_types.push(MonoType::new_empty(type_id, concrete_type, MonoTypeVariant::Procedure(ProcedureMonomorph{
 
            monomorph_index,
 
            builtin: false,
 
        })));
 

	
 
        return type_id;
 
    }
 

	
 
    /// Adds a builtin type to the type table. As this is only called by the
 
    /// compiler during setup we assume it cannot fail.
 
    pub(crate) fn add_builtin_data_type(&mut self, concrete_type: ConcreteType, poly_vars: &[PolymorphicVariable], size: usize, alignment: usize) -> TypeId {
 
        self.mono_search_key.set(&concrete_type.parts, poly_vars);
 
        debug_assert!(!self.mono_type_lookup.contains_key(&self.mono_search_key));
 
        debug_assert_ne!(alignment, 0);
 
        let type_id = TypeId(self.mono_types.len() as i64);
 
        self.mono_type_lookup.insert(self.mono_search_key.clone(), type_id);
 
        self.mono_types.push(MonoType{
 
            type_id,
 
            concrete_type,
 
            size,
 
            alignment,
 
            variant: MonoTypeVariant::Builtin,
 
        });
 

	
 
        return type_id;
 
    }
 

	
 
    /// Adds a builtin procedure to the type table.
 
    pub(crate) fn add_builtin_procedure_type(&mut self, concrete_type: ConcreteType, poly_vars: &[PolymorphicVariable]) -> TypeId {
 
        self.mono_search_key.set(&concrete_type.parts, poly_vars);
 
        debug_assert!(!self.mono_type_lookup.contains_key(&self.mono_search_key));
 
        let type_id = TypeId(self.mono_types.len() as i64);
 
        self.mono_type_lookup.insert(self.mono_search_key.clone(), type_id);
 
        self.mono_types.push(MonoType{
 
            type_id,
 
            concrete_type,
 
            size: 0,
 
            alignment: 0,
 
            variant: MonoTypeVariant::Procedure(ProcedureMonomorph{
 
                monomorph_index: u32::MAX,
 
                builtin: true,
 
            })
 
        });
 

	
 
        return type_id;
 
    }
 

	
 
    /// Adds a monomorphed type to the type table. If it already exists then the
 
    /// previous entry will be used.
 
    pub(crate) fn add_monomorphed_type(
 
        &mut self, modules: &[Module], heap: &Heap, arch: &TargetArch, concrete_type: ConcreteType
 
    ) -> Result<TypeId, ParseError> {
 
        // Check if the concrete type was already added
 
        Self::set_search_key_to_type(&mut self.mono_search_key, &self.definition_lookup, &concrete_type.parts);
 
        if let Some(type_id) = self.mono_type_lookup.get(&self.mono_search_key) {
 
            return Ok(*type_id);
 
        }
 

	
 
        // Concrete type needs to be added
 
        self.detect_and_resolve_type_loops_for(modules, heap, arch, concrete_type)?;
 
        let type_id = self.encountered_types[0].type_id;
 
        self.lay_out_memory_for_encountered_types(arch);
 

	
 
        return Ok(type_id);
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Building base types
 
    //--------------------------------------------------------------------------
 

	
 
    /// Builds the base type for an enum. Will not compute byte sizes
 
    fn build_base_enum_definition(&mut self, modules: &[Module], ctx: &mut PassCtx, definition_id: DefinitionId) -> Result<(), ParseError> {
 
        debug_assert!(!self.definition_lookup.contains_key(&definition_id), "base enum already built");
 
        let definition = ctx.heap[definition_id].as_enum();
 
        let root_id = definition.defined_in;
 

	
 
        // Determine enum variants
 
        let mut enum_value = -1;
 
        let mut variants = Vec::with_capacity(definition.variants.len());
 

	
 
        for variant in &definition.variants {
 
            if enum_value == i64::MAX {
 
                let source = &modules[definition.defined_in.index as usize].source;
 
                return Err(ParseError::new_error_str_at_span(
 
                    source, variant.identifier.span,
 
                    "this enum variant has an integer value that is too large"
 
                ));
 
            }
 

	
 
            enum_value += 1;
 
            if let EnumVariantValue::Integer(explicit_value) = variant.value {
 
                enum_value = explicit_value;
 
            }
 

	
 
            variants.push(EnumVariant{
 
                identifier: variant.identifier.clone(),
 
                value: enum_value,
 
            });
 
        }
 

	
 
        // Determine tag size
 
        let mut min_enum_value = 0;
 
        let mut max_enum_value = 0;
 
        if !variants.is_empty() {
 
            min_enum_value = variants[0].value;
 
            max_enum_value = variants[0].value;
 
            for variant in variants.iter().skip(1) {
 
                min_enum_value = min_enum_value.min(variant.value);
 
                max_enum_value = max_enum_value.max(variant.value);
 
            }
 
        }
 

	
 
        let (tag_type, size_and_alignment) = Self::variant_tag_type_from_values(min_enum_value, max_enum_value);
 

	
 
        // Enum names and polymorphic args do not conflict
 
        Self::check_identifier_collision(
 
            modules, root_id, &variants, |variant| &variant.identifier, "enum variant"
 
        )?;
 

	
 
        // Polymorphic arguments cannot appear as embedded types, because
 
        // they can only consist of integer variants.
 
        Self::check_poly_args_collision(modules, ctx, root_id, &definition.poly_vars)?;
 
        let poly_vars = Self::create_polymorphic_variables(&definition.poly_vars);
 

	
 
        self.definition_lookup.insert(definition_id, DefinedType {
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Enum(EnumType{
 
                variants,
 
                minimum_tag_value: min_enum_value,
 
                maximum_tag_value: max_enum_value,
 
                tag_type,
 
                size: size_and_alignment,
 
                alignment: size_and_alignment
 
            }),
 
            poly_vars,
 
            is_polymorph: false,
 
        });
 

	
 
        return Ok(());
 
    }
 

	
 
    /// Builds the base type for a union. Will compute byte sizes.
 
    fn build_base_union_definition(&mut self, modules: &[Module], ctx: &mut PassCtx, definition_id: DefinitionId) -> Result<(), ParseError> {
 
        debug_assert!(!self.definition_lookup.contains_key(&definition_id), "base union already built");
 
        let definition = ctx.heap[definition_id].as_union();
 
        let root_id = definition.defined_in;
 

	
 
        // Check all variants and their embedded types
 
        let mut variants = Vec::with_capacity(definition.variants.len());
 
        let mut tag_counter = 0;
 
        for variant in &definition.variants {
 
            for embedded in &variant.value {
 
                Self::check_member_parser_type(
 
                    modules, ctx, root_id, embedded, false
 
                )?;
 
            }
 

	
 
            variants.push(UnionVariant{
 
                identifier: variant.identifier.clone(),
 
                embedded: variant.value.clone(),
 
                tag_value: tag_counter,
 
            });
 
            tag_counter += 1;
 
        }
 

	
 
        let mut max_tag_value = 0;
 
        if tag_counter != 0 {
 
            max_tag_value = tag_counter - 1
 
        }
 

	
 
        let (tag_type, tag_size) = Self::variant_tag_type_from_values(0, max_tag_value);
 

	
 
        // Make sure there are no conflicts in identifiers
 
        Self::check_identifier_collision(
 
            modules, root_id, &variants, |variant| &variant.identifier, "union variant"
 
        )?;
 
        Self::check_poly_args_collision(modules, ctx, root_id, &definition.poly_vars)?;
 

	
 
        // Construct internal representation of union
 
        let mut poly_vars = Self::create_polymorphic_variables(&definition.poly_vars);
 
        for variant in &definition.variants {
 
            for embedded in &variant.value {
 
                Self::mark_used_polymorphic_variables(&mut poly_vars, embedded);
 
            }
 
        }
 

	
 
        let is_polymorph = poly_vars.iter().any(|arg| arg.is_in_use);
 

	
 
        self.definition_lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Union(UnionType{ variants, tag_type, tag_size }),
 
            poly_vars,
 
            is_polymorph
 
        });
 

	
 
        return Ok(());
 
    }
 

	
 
    /// Builds base struct type. Will not compute byte sizes.
 
    fn build_base_struct_definition(&mut self, modules: &[Module], ctx: &mut PassCtx, definition_id: DefinitionId) -> Result<(), ParseError> {
 
        debug_assert!(!self.definition_lookup.contains_key(&definition_id), "base struct already built");
 
        let definition = ctx.heap[definition_id].as_struct();
 
        let root_id = definition.defined_in;
 

	
 
        // Check all struct fields and construct internal representation
 
        let mut fields = Vec::with_capacity(definition.fields.len());
 

	
 
        for field in &definition.fields {
 
            Self::check_member_parser_type(
 
                modules, ctx, root_id, &field.parser_type, false
 
            )?;
 

	
 
            fields.push(StructField{
 
                identifier: field.field.clone(),
 
                parser_type: field.parser_type.clone(),
 
            });
 
        }
 

	
 
        // Make sure there are no conflicting variables
 
        Self::check_identifier_collision(
 
            modules, root_id, &fields, |field| &field.identifier, "struct field"
 
        )?;
 
        Self::check_poly_args_collision(modules, ctx, root_id, &definition.poly_vars)?;
 

	
 
        // Construct base type in table
 
        let mut poly_vars = Self::create_polymorphic_variables(&definition.poly_vars);
 
        for field in &fields {
 
            Self::mark_used_polymorphic_variables(&mut poly_vars, &field.parser_type);
 
        }
 

	
 
        let is_polymorph = poly_vars.iter().any(|arg| arg.is_in_use);
 

	
 
        self.definition_lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Struct(StructType{ fields }),
 
            poly_vars,
 
            is_polymorph
 
        });
 

	
 
        return Ok(())
 
    }
 

	
 
    /// Builds base procedure type.
 
    fn build_base_procedure_definition(&mut self, modules: &[Module], ctx: &mut PassCtx, definition_id: DefinitionId) -> Result<(), ParseError> {
 
        debug_assert!(!self.definition_lookup.contains_key(&definition_id), "base function already built");
 
        let definition = ctx.heap[definition_id].as_procedure();
 
        let root_id = definition.defined_in;
 

	
 
        // Check and construct return types and argument types.
 
        if let Some(return_type) = &definition.return_type {
 
            Self::check_member_parser_type(
 
                modules, ctx, root_id, return_type, definition.builtin
 
                modules, ctx, root_id, return_type, definition.source.is_builtin()
 
            )?;
 
        }
 

	
 
        let mut arguments = Vec::with_capacity(definition.parameters.len());
 
        for parameter_id in &definition.parameters {
 
            let parameter = &ctx.heap[*parameter_id];
 
            Self::check_member_parser_type(
 
                modules, ctx, root_id, &parameter.parser_type, definition.builtin
 
                modules, ctx, root_id, &parameter.parser_type, definition.source.is_builtin()
 
            )?;
 

	
 
            arguments.push(ProcedureArgument{
 
                identifier: parameter.identifier.clone(),
 
                parser_type: parameter.parser_type.clone(),
 
            });
 
        }
 

	
 
        // Check conflict of identifiers
 
        Self::check_identifier_collision(
 
            modules, root_id, &arguments, |arg| &arg.identifier, "procedure argument"
 
        )?;
 
        Self::check_poly_args_collision(modules, ctx, root_id, &definition.poly_vars)?;
 

	
 
        // Construct internal representation of function type
 
        // TODO: Marking used polymorphic variables should take statements in
 
        //  the body into account. But currently we don't. Hence mark them all
 
        //  as being in-use. Note to self: true condition should be that the
 
        //  polymorphic variables are used in places where the resulting types
 
        //  are themselves truly polymorphic types (e.g. not a phantom type).
 
        let mut poly_vars = Self::create_polymorphic_variables(&definition.poly_vars);
 
        for poly_var in &mut poly_vars {
 
            poly_var.is_in_use = true;
 
        }
 

	
 
        let is_polymorph = poly_vars.iter().any(|arg| arg.is_in_use);
 

	
 
        self.definition_lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Procedure(ProcedureType{
 
                kind: definition.kind,
 
                return_type: definition.return_type.clone(),
 
                arguments
 
            }),
 
            poly_vars,
 
            is_polymorph
 
        });
 

	
 
        return Ok(());
 
    }
 

	
 
    /// Will check if the member type (field of a struct, embedded type in a
 
    /// union variant) is valid.
 
    fn check_member_parser_type(
 
        modules: &[Module], ctx: &PassCtx, base_definition_root_id: RootId,
 
        member_parser_type: &ParserType, allow_special_compiler_types: bool
 
    ) -> Result<(), ParseError> {
 
        use ParserTypeVariant as PTV;
 

	
 
        for element in &member_parser_type.elements {
 
            match element.variant {
 
                // Special cases
 
                PTV::Void | PTV::InputOrOutput | PTV::ArrayLike | PTV::IntegerLike => {
 
                    if !allow_special_compiler_types {
 
                        unreachable!("compiler-only ParserTypeVariant in member type");
 
                    }
 
                },
 
                // Builtin types, always valid
 
                PTV::Message | PTV::Bool |
 
                PTV::UInt8 | PTV::UInt16 | PTV::UInt32 | PTV::UInt64 |
 
                PTV::SInt8 | PTV::SInt16 | PTV::SInt32 | PTV::SInt64 |
 
                PTV::Character | PTV::String |
 
                PTV::Array | PTV::Input | PTV::Output | PTV::Tuple(_) |
 
                // Likewise, polymorphic variables are always valid
 
                PTV::PolymorphicArgument(_, _) => {},
 
                // Types that are not constructable, or types that are not
 
                // allowed (and checked earlier)
 
                PTV::IntegerLiteral | PTV::Inferred => {
 
                    unreachable!("illegal ParserTypeVariant within type definition");
 
                },
 
                // Finally, user-defined types
 
                PTV::Definition(definition_id, _) => {
 
                    let definition = &ctx.heap[definition_id];
 
                    if !(definition.is_struct() || definition.is_enum() || definition.is_union()) {
 
                        let source = &modules[base_definition_root_id.index as usize].source;
 
                        return Err(ParseError::new_error_str_at_span(
 
                            source, element.element_span, "expected a datatype (a struct, enum or union)"
 
                        ));
 
                    }
 

	
 
                    // Otherwise, we're fine
 
                }
 
            }
 
        }
 

	
 
        // If here, then all elements check out
 
        return Ok(());
 
    }
 

	
 
    /// Go through a list of identifiers and ensure that all identifiers have
 
    /// unique names
 
    fn check_identifier_collision<T: Sized, F: Fn(&T) -> &Identifier>(
 
        modules: &[Module], root_id: RootId, items: &[T], getter: F, item_name: &'static str
 
    ) -> Result<(), ParseError> {
 
        for (item_idx, item) in items.iter().enumerate() {
 
            let item_ident = getter(item);
 
            for other_item in &items[0..item_idx] {
 
                let other_item_ident = getter(other_item);
 
                if item_ident == other_item_ident {
 
                    let module_source = &modules[root_id.index as usize].source;
 
                    return Err(ParseError::new_error_at_span(
 
                        module_source, item_ident.span, format!("This {} is defined more than once", item_name)
 
                    ).with_info_at_span(
 
                        module_source, other_item_ident.span, format!("The other {} is defined here", item_name)
 
                    ));
 
                }
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    /// Go through a list of polymorphic arguments and make sure that the
 
    /// arguments all have unique names, and the arguments do not conflict with
 
    /// any symbols defined at the module scope.
 
    fn check_poly_args_collision(
 
        modules: &[Module], ctx: &PassCtx, root_id: RootId, poly_args: &[Identifier]
 
    ) -> Result<(), ParseError> {
 
        // Make sure polymorphic arguments are unique and none of the
 
        // identifiers conflict with any imported scopes
 
        for (arg_idx, poly_arg) in poly_args.iter().enumerate() {
 
            for other_poly_arg in &poly_args[..arg_idx] {
 
                if poly_arg == other_poly_arg {
 
                    let module_source = &modules[root_id.index as usize].source;
 
                    return Err(ParseError::new_error_str_at_span(
 
                        module_source, poly_arg.span,
 
                        "This polymorphic argument is defined more than once"
 
                    ).with_info_str_at_span(
 
                        module_source, other_poly_arg.span,
 
                        "It conflicts with this polymorphic argument"
 
                    ));
 
                }
 
            }
 

	
 
            // Check if identifier conflicts with a symbol defined or imported
 
            // in the current module
 
            if let Some(symbol) = ctx.symbols.get_symbol_by_name(SymbolScope::Module(root_id), poly_arg.value.as_bytes()) {
 
                // We have a conflict
 
                let module_source = &modules[root_id.index as usize].source;
 
                let introduction_span = symbol.variant.span_of_introduction(ctx.heap);
 
                return Err(ParseError::new_error_str_at_span(
 
                    module_source, poly_arg.span,
 
                    "This polymorphic argument conflicts with another symbol"
 
                ).with_info_str_at_span(
 
                    module_source, introduction_span,
 
                    "It conflicts due to this symbol"
 
                ));
 
            }
 
        }
 

	
 
        // All arguments are fine
 
        Ok(())
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Detecting type loops
 
    //--------------------------------------------------------------------------
 

	
 
    /// Internal function that will detect type loops and check if they're
 
    /// resolvable. If so then the appropriate union variants will be marked as
 
    /// "living on heap". If not then a `ParseError` will be returned
 
    fn detect_and_resolve_type_loops_for(&mut self, modules: &[Module], heap: &Heap, arch: &TargetArch, concrete_type: ConcreteType) -> Result<(), ParseError> {
 
        // Programmer notes: what happens here is the we call
 
        // `check_member_for_type_loops` for a particular type's member, and
 
        // then take action using the return value:
 
        // 1. It might already be resolved: in this case it implies we don't
 
        //  have type loops, or they have been resolved.
 
        // 2. A new type is encountered. If so then it is added to the type loop
 
        //  breadcrumbs.
 
        // 3. A type loop is detected (implying the type is already resolved, or
 
        //  already exists in the type loop breadcrumbs).
 
        //
 
        // Using the breadcrumbs we incrementally check every member type of a
 
        // particular considered type (e.g. a struct field, tuple member), and
 
        // do the same as above. Note that when a breadcrumb is added we reserve
 
        // space in the monomorph storage, initialized to zero-values (i.e.
 
        // wrong values). The breadcrumbs keep track of how far and along we are
 
        // with resolving the member types.
 
        //
 
        // At the end we may have some type loops. If they're unresolvable then
 
        // we throw an error). If there are no type loops or they are all
 
        // resolvable then we end up with a list of `encountered_types`. These
 
        // are then used by `lay_out_memory_for_encountered_types`.
 
        debug_assert!(self.type_loop_breadcrumbs.is_empty());
 
        debug_assert!(self.type_loops.is_empty());
 
        debug_assert!(self.encountered_types.is_empty());
 

	
 
        // Push the initial breadcrumb
 
        let initial_breadcrumb = Self::check_member_for_type_loops(
 
            &self.type_loop_breadcrumbs, &self.definition_lookup, &self.mono_type_lookup,
 
            &mut self.mono_search_key, &concrete_type
 
        );
 

	
 
        if let TypeLoopResult::PushBreadcrumb(definition_id, concrete_type) = initial_breadcrumb {
 
            self.handle_new_breadcrumb_for_type_loops(arch, definition_id, concrete_type);
 
        } else {
 
            unreachable!()
 
        };
 

	
 
        // Enter into the main resolving loop
 
        while !self.type_loop_breadcrumbs.is_empty() {
 
            // Because we might be modifying the breadcrumb array we need to
 
            let breadcrumb_idx = self.type_loop_breadcrumbs.len() - 1;
 
            let mut breadcrumb = self.type_loop_breadcrumbs[breadcrumb_idx].clone();
 

	
 
            let mono_type = &self.mono_types[breadcrumb.type_id.0 as usize];
 
            let resolve_result = match &mono_type.variant {
 
                MonoTypeVariant::Builtin => {
 
                    TypeLoopResult::TypeExists
 
                }
 
                MonoTypeVariant::Enum => {
 
                    TypeLoopResult::TypeExists
 
                },
 
                MonoTypeVariant::Union(monomorph) => {
 
                    let num_variants = monomorph.variants.len() as u32;
 
                    let mut union_result = TypeLoopResult::TypeExists;
 

	
 
                    'member_loop: while breadcrumb.next_member < num_variants {
 
                        let mono_variant = &monomorph.variants[breadcrumb.next_member as usize];
 
                        let num_embedded = mono_variant.embedded.len() as u32;
 

	
 
                        while breadcrumb.next_embedded < num_embedded {
 
                            let mono_embedded = &mono_variant.embedded[breadcrumb.next_embedded as usize];
 
                            union_result = Self::check_member_for_type_loops(
 
                                &self.type_loop_breadcrumbs, &self.definition_lookup, &self.mono_type_lookup,
 
                                &mut self.mono_search_key, &mono_embedded.concrete_type
 
                            );
 

	
 
                            if union_result != TypeLoopResult::TypeExists {
 
                                // In type loop or new breadcrumb pushed, so
 
                                // break out of the resolving loop
 
                                break 'member_loop;
 
                            }
 

	
 
                            breadcrumb.next_embedded += 1;
 
                        }
 

	
 
                        breadcrumb.next_embedded = 0;
 
                        breadcrumb.next_member += 1
 
                    }
 

	
 
                    union_result
 
                },
 
                MonoTypeVariant::Struct(monomorph) => {
 
                    let num_fields = monomorph.fields.len() as u32;
 

	
 
                    let mut struct_result = TypeLoopResult::TypeExists;
 
                    while breadcrumb.next_member < num_fields {
 
                        let mono_field = &monomorph.fields[breadcrumb.next_member as usize];
 
                        struct_result = Self::check_member_for_type_loops(
 
                            &self.type_loop_breadcrumbs, &self.definition_lookup, &self.mono_type_lookup,
 
                            &mut self.mono_search_key, &mono_field.concrete_type
 
                        );
 

	
 
                        if struct_result != TypeLoopResult::TypeExists {
 
                            // Type loop or breadcrumb pushed, so break out of
 
                            // the resolving loop
 
                            break;
 
                        }
 

	
 
                        breadcrumb.next_member += 1;
 
                    }
 

	
 
                    struct_result
 
                },
 
                MonoTypeVariant::Procedure(_) => unreachable!(),
 
                MonoTypeVariant::Tuple(monomorph) => {
 
                    let num_members = monomorph.members.len() as u32;
 
                    let mut tuple_result = TypeLoopResult::TypeExists;
 

	
 
                    while breadcrumb.next_member < num_members {
 
                        let tuple_member = &monomorph.members[breadcrumb.next_member as usize];
 
                        tuple_result = Self::check_member_for_type_loops(
 
                            &self.type_loop_breadcrumbs, &self.definition_lookup, &self.mono_type_lookup,
 
                            &mut self.mono_search_key, &tuple_member.concrete_type
 
                        );
 

	
 
                        if tuple_result != TypeLoopResult::TypeExists {
 
                            break;
 
                        }
 

	
 
                        breadcrumb.next_member += 1;
 
                    }
 

	
 
                    tuple_result
 
                }
 
            };
 

	
 
            // Handle the result of attempting to resolve the current breadcrumb
 
            match resolve_result {
 
                TypeLoopResult::TypeExists => {
 
                    // We finished parsing the type
 
                    self.type_loop_breadcrumbs.pop();
 
                },
 
                TypeLoopResult::PushBreadcrumb(definition_id, concrete_type) => {
 
                    // We recurse into the member type.
 
                    self.type_loop_breadcrumbs[breadcrumb_idx] = breadcrumb;
 
                    self.handle_new_breadcrumb_for_type_loops(arch, definition_id, concrete_type);
 
                },
 
                TypeLoopResult::TypeLoop(first_idx) => {
 
                    // Because we will be modifying breadcrumbs within the
 
                    // type-loop handling code, put back the modified breadcrumb
 
                    self.type_loop_breadcrumbs[breadcrumb_idx] = breadcrumb;
 

	
 
                    // We're in a type loop. Add the type loop
 
                    let mut loop_members = Vec::with_capacity(self.type_loop_breadcrumbs.len() - first_idx);
 
                    let mut contains_union = false;
 

	
 
                    for breadcrumb_idx in first_idx..self.type_loop_breadcrumbs.len() {
 
                        let breadcrumb = &mut self.type_loop_breadcrumbs[breadcrumb_idx];
 
                        let mut is_union = false;
 

	
 
                        // Check if type loop member is a union that may be
 
                        // broken up by moving some of its members to the heap.
 
                        let mono_type = &mut self.mono_types[breadcrumb.type_id.0 as usize];
 
                        if let MonoTypeVariant::Union(union_type) = &mut mono_type.variant {
 
                            // Mark the variant that caused the loop as heap
 
                            // allocated to break the type loop.
 
                            let variant = &mut union_type.variants[breadcrumb.next_member as usize];
 
                            variant.lives_on_heap = true;
 
                            breadcrumb.next_embedded += 1;
 

	
 
                            is_union = true;
 
                            contains_union = true;
 
                        } // else: we don't care about the type for now
 

	
 
                        loop_members.push(TypeLoopEntry{
 
                            type_id: breadcrumb.type_id,
 
                            is_union
 
                        });
 
                    }
 

	
 
                    let new_type_loop = TypeLoop{ members: loop_members };
 
                    if !contains_union {
 
                        // No way to (potentially) break the union. So return a
 
                        // type loop error. This is because otherwise our
 
                        // breadcrumb resolver ends up in an infinite loop.
 
                        return Err(construct_type_loop_error(
 
                            &self.mono_types, &new_type_loop, modules, heap
 
                        ));
 
                    }
 

	
 
                    self.type_loops.push(new_type_loop);
 
                }
 
            }
 
        }
 

	
 
        // All breadcrumbs have been cleared. So now `type_loops` contains all
 
        // of the encountered type loops, and `encountered_types` contains a
 
        // list of all unique monomorphs we encountered.
 

	
 
        // The next step is to figure out if all of the type loops can be
 
        // broken. A type loop can be broken if at least one union exists in the
 
        // loop and that union ended up having variants that are not part of
 
        // a type loop.
 
        fn type_loop_source_span_and_message<'a>(
 
            modules: &'a [Module], heap: &Heap, mono_types: &MonoTypeArray,
 
            definition_id: DefinitionId, mono_type_id: TypeId, index_in_loop: usize
 
        ) -> (&'a InputSource, InputSpan, String) {
 
            // Note: because we will discover the type loop the *first* time we
 
            // instantiate a monomorph with the provided polymorphic arguments
 
            // (not all arguments are actually used in the type). We don't have
 
            // to care about a second instantiation where certain unused
 
            // polymorphic arguments are different.
 
            let mono_type = &mono_types[mono_type_id.0 as usize];
 
            let type_name = mono_type.concrete_type.display_name(heap);
 

	
 
            let message = if index_in_loop == 0 {
 
                format!(
 
                    "encountered an infinitely large type for '{}' (which can be fixed by \
 
                    introducing a union type that has a variant whose embedded types are \
 
                    not part of a type loop, or do not have embedded types)",
 
                    type_name
 
                )
 
            } else if index_in_loop == 1 {
 
                format!("because it depends on the type '{}'", type_name)
 
            } else {
 
                format!("which depends on the type '{}'", type_name)
 
            };
 

	
 
            let ast_definition = &heap[definition_id];
 
            let ast_root_id = ast_definition.defined_in();
 

	
 
            return (
 
                &modules[ast_root_id.index as usize].source,
 
                ast_definition.identifier().span,
 
                message
 
            );
 
        }
 

	
 
        fn construct_type_loop_error(mono_types: &MonoTypeArray, type_loop: &TypeLoop, modules: &[Module], heap: &Heap) -> ParseError {
 
            // Seek first entry to produce parse error. Then continue builder
 
            // pattern. This is the error case so efficiency can go home.
 
            let mut parse_error = None;
 
            let mut next_member_index = 0;
 
            while next_member_index < type_loop.members.len() {
 
                let first_entry = &type_loop.members[next_member_index];
 
                next_member_index += 1;
 

	
 
                // Retrieve definition of first type in loop
 
                let first_mono_type = &mono_types[first_entry.type_id.0 as usize];
 
                let first_definition_id = get_concrete_type_definition(&first_mono_type.concrete_type.parts);
 
                if first_definition_id.is_none() {
 
                    continue;
 
                }
 
                let first_definition_id = first_definition_id.unwrap();
 

	
 
                // Produce error message for first type in loop
 
                let (first_module, first_span, first_message) = type_loop_source_span_and_message(
 
                    modules, heap, mono_types, first_definition_id, first_entry.type_id, 0
 
                );
 
                parse_error = Some(ParseError::new_error_at_span(first_module, first_span, first_message));
 
                break;
 
            }
 

	
 
            let mut parse_error = parse_error.unwrap(); // Loop above cannot have failed, because we must have a type loop, type loops cannot contain only unnamed types
 

	
 
            let mut error_counter = 1;
 
            for member_idx in next_member_index..type_loop.members.len() {
 
                let entry = &type_loop.members[member_idx];
 
                let mono_type = &mono_types[entry.type_id.0 as usize];
 
                let definition_id = get_concrete_type_definition(&mono_type.concrete_type.parts);
 
                if definition_id.is_none() {
 
                    continue;
 
                }
 
                let definition_id = definition_id.unwrap();
 

	
 
                let (module, span, message) = type_loop_source_span_and_message(
 
                    modules, heap, mono_types, definition_id, entry.type_id, error_counter
 
                );
 
                parse_error = parse_error.with_info_at_span(module, span, message);
 
                error_counter += 1;
 
            }
 

	
 
            parse_error
 
        }
 

	
 
        for type_loop in &self.type_loops {
 
            let mut can_be_broken = false;
 
            debug_assert!(!type_loop.members.is_empty());
 

	
 
            for entry in &type_loop.members {
 
                if entry.is_union {
 
                    let mono_type = self.mono_types[entry.type_id.0 as usize].variant.as_union();
 
                    debug_assert!(!mono_type.variants.is_empty()); // otherwise it couldn't be part of the type loop
 
                    let has_stack_variant = mono_type.variants.iter().any(|variant| !variant.lives_on_heap);
 
                    if has_stack_variant {
 
                        can_be_broken = true;
 
                        break;
 
                    }
 
                }
 
            }
 

	
 
            if !can_be_broken {
 
                // Construct a type loop error
 
                return Err(construct_type_loop_error(&self.mono_types, type_loop, modules, heap));
 
            }
 
        }
 

	
 
        // If here, then all type loops have been resolved and we can lay out
 
        // all of the members
 
        self.type_loops.clear();
 

	
 
        return Ok(());
 
    }
 

	
 
    /// Checks if the specified type needs to be resolved (i.e. we need to push
 
    /// a breadcrumb), is already resolved (i.e. we can continue with the next
 
    /// member of the currently considered type) or is in the process of being
 
    /// resolved (i.e. we're in a type loop). Because of borrowing rules we
 
    /// don't do any modifications of internal types here. Hence: if we
 
    /// return `PushBreadcrumb` then call `handle_new_breadcrumb_for_type_loops`
 
    /// to take care of storing the appropriate types.
 
    fn check_member_for_type_loops(
 
        breadcrumbs: &[TypeLoopBreadcrumb], definition_map: &DefinitionMap, mono_type_map: &MonoTypeMap,
 
        mono_key: &mut MonoSearchKey, concrete_type: &ConcreteType
 
    ) -> TypeLoopResult {
 
        // Depending on the type, lookup if the type has already been visited
 
        // (i.e. either already has its memory layed out, or is part of a type
 
        // loop because we've already visited the type)
 
        debug_assert!(!concrete_type.parts.is_empty());
 
        let definition_id = if let ConcreteTypePart::Instance(definition_id, _) = concrete_type.parts[0] {
 
            definition_id
 
        } else {
 
            DefinitionId::new_invalid()
 
        };
 

	
 
        Self::set_search_key_to_type(mono_key, definition_map, &concrete_type.parts);
 
        if let Some(type_id) = mono_type_map.get(mono_key).copied() {
 
            for (breadcrumb_idx, breadcrumb) in breadcrumbs.iter().enumerate() {
 
                if breadcrumb.type_id == type_id {
 
                    return TypeLoopResult::TypeLoop(breadcrumb_idx);
 
                }
 
            }
 

	
 
            return TypeLoopResult::TypeExists;
 
        }
 

	
 
        // Type is not yet known, so we need to insert it into the lookup and
 
        // push a new breadcrumb.
 
        return TypeLoopResult::PushBreadcrumb(definition_id, concrete_type.clone());
 
    }
 

	
 
    /// Handles the `PushBreadcrumb` result for a `check_member_for_type_loops`
 
    /// call. Will preallocate entries in the monomorphed type storage (with
 
    /// all memory properties zeroed).
 
    fn handle_new_breadcrumb_for_type_loops(&mut self, arch: &TargetArch, definition_id: DefinitionId, concrete_type: ConcreteType) {
 
        use DefinedTypeVariant as DTV;
 
        use ConcreteTypePart as CTP;
 

	
 
        let mut is_union = false;
 

	
 
        let type_id = match &concrete_type.parts[0] {
 
            // Builtin types
 
            CTP::Void | CTP::Message | CTP::Bool |
 
            CTP::UInt8 | CTP::UInt16 | CTP::UInt32 | CTP::UInt64 |
 
            CTP::SInt8 | CTP::SInt16 | CTP::SInt32 | CTP::SInt64 |
 
            CTP::Character | CTP::String |
 
            CTP::Array | CTP::Slice | CTP::Input | CTP::Output | CTP::Pointer => {
 
                // Insert the entry for the builtin type, we should be able to
 
                // immediately "steal" the size from the preinserted builtins.
 
                let base_type_id = match &concrete_type.parts[0] {
 
                    CTP::Void => arch.void_type_id,
 
                    CTP::Message => arch.message_type_id,
 
                    CTP::Bool => arch.bool_type_id,
 
                    CTP::UInt8 => arch.uint8_type_id,
 
                    CTP::UInt16 => arch.uint16_type_id,
 
                    CTP::UInt32 => arch.uint32_type_id,
 
                    CTP::UInt64 => arch.uint64_type_id,
 
                    CTP::SInt8 => arch.sint8_type_id,
 
                    CTP::SInt16 => arch.sint16_type_id,
 
                    CTP::SInt32 => arch.sint32_type_id,
 
                    CTP::SInt64 => arch.sint64_type_id,
 
                    CTP::Character => arch.char_type_id,
 
                    CTP::String => arch.string_type_id,
 
                    CTP::Array => arch.array_type_id,
 
                    CTP::Slice => arch.slice_type_id,
 
                    CTP::Input => arch.input_type_id,
 
                    CTP::Output => arch.output_type_id,
 
                    CTP::Pointer => arch.pointer_type_id,
 
                    _ => unreachable!(),
 
                };
 
                let base_type = &self.mono_types[base_type_id.0 as usize];
 
                let base_type_size = base_type.size;
 
                let base_type_alignment = base_type.alignment;
 

	
 
                let type_id = TypeId(self.mono_types.len() as i64);
 
                Self::set_search_key_to_type(&mut self.mono_search_key, &self.definition_lookup, &concrete_type.parts);
 
                self.mono_type_lookup.insert(self.mono_search_key.clone(), type_id);
 
                self.mono_types.push(MonoType{
 
                    type_id,
 
                    concrete_type,
 
                    size: base_type_size,
 
                    alignment: base_type_alignment,
 
                    variant: MonoTypeVariant::Builtin
 
                });
 

	
 
                type_id
 
            },
 
            // User-defined types
 
            CTP::Tuple(num_embedded) => {
 
                debug_assert!(definition_id.is_invalid()); // because tuples do not have an associated `DefinitionId`
 
                let mut members = Vec::with_capacity(*num_embedded as usize);
 
                for section in ConcreteTypeIter::new(&concrete_type.parts, 0) {
 
                    members.push(TupleMonomorphMember{
 
                        type_id: TypeId::new_invalid(),
 
                        concrete_type: ConcreteType{ parts: Vec::from(section) },
 
                        size: 0,
 
                        alignment: 0,
 
                        offset: 0
 
                    });
 
                }
 

	
 
                let type_id = TypeId(self.mono_types.len() as i64);
 
                Self::set_search_key_to_tuple(&mut self.mono_search_key, &self.definition_lookup, &concrete_type.parts);
 
                self.mono_type_lookup.insert(self.mono_search_key.clone(), type_id);
 
                self.mono_types.push(MonoType::new_empty(type_id, concrete_type, MonoTypeVariant::Tuple(TupleMonomorph{ members })));
 

	
 
                type_id
 
            },
 
            CTP::Instance(_check_definition_id, _) => {
 
                debug_assert_eq!(definition_id, *_check_definition_id); // because this is how `definition_id` was determined
 

	
 
                Self::set_search_key_to_type(&mut self.mono_search_key, &self.definition_lookup, &concrete_type.parts);
 
                let base_type = self.definition_lookup.get(&definition_id).unwrap();
 
                let type_id = match &base_type.definition {
 
                    DTV::Enum(definition) => {
 
                        // The enum is a bit exceptional in that when we insert
 
                        // it we we will immediately set its size/alignment:
 
                        // there is nothing to compute here.
 
                        debug_assert!(definition.size != 0 && definition.alignment != 0);
 
                        let type_id = TypeId(self.mono_types.len() as i64);
 
                        self.mono_type_lookup.insert(self.mono_search_key.clone(), type_id);
 
                        self.mono_types.push(MonoType::new_empty(type_id, concrete_type, MonoTypeVariant::Enum));
 

	
 
                        let mono_type = &mut self.mono_types[type_id.0 as usize];
 
                        mono_type.size = definition.size;
 
                        mono_type.alignment = definition.alignment;
 

	
 
                        type_id
 
                    },
 
                    DTV::Union(definition) => {
 
                        // Create all the variants with their concrete types
 
                        let mut mono_variants = Vec::with_capacity(definition.variants.len());
 
                        for poly_variant in &definition.variants {
 
                            let mut mono_embedded = Vec::with_capacity(poly_variant.embedded.len());
 
                            for poly_embedded in &poly_variant.embedded {
 
                                let mono_concrete = Self::construct_concrete_type(poly_embedded, &concrete_type);
 
                                mono_embedded.push(UnionMonomorphEmbedded{
 
                                    type_id: TypeId::new_invalid(),
 
                                    concrete_type: mono_concrete,
 
                                    size: 0,
 
                                    alignment: 0,
 
                                    offset: 0
 
                                });
 
                            }
 

	
 
                            mono_variants.push(UnionMonomorphVariant{
 
                                lives_on_heap: false,
 
                                embedded: mono_embedded,
 
                            })
 
                        }
 

	
 
                        let type_id = TypeId(self.mono_types.len() as i64);
 
                        let tag_size = definition.tag_size;
 
                        Self::set_search_key_to_type(&mut self.mono_search_key, &self.definition_lookup, &concrete_type.parts);
 
                        self.mono_type_lookup.insert(self.mono_search_key.clone(), type_id);
 
                        self.mono_types.push(MonoType::new_empty(type_id, concrete_type, MonoTypeVariant::Union(UnionMonomorph{
 
                            variants: mono_variants,
 
                            tag_size,
 
                            heap_size: 0,
 
                            heap_alignment: 0,
 
                        })));
 

	
 
                        is_union = true;
 
                        type_id
 
                    },
 
                    DTV::Struct(definition) => {
 
                        // Create fields
 
                        let mut mono_fields = Vec::with_capacity(definition.fields.len());
 
                        for poly_field in &definition.fields {
 
                            let mono_concrete = Self::construct_concrete_type(&poly_field.parser_type, &concrete_type);
 
                            mono_fields.push(StructMonomorphField{
 
                                type_id: TypeId::new_invalid(),
 
                                concrete_type: mono_concrete,
 
                                size: 0,
 
                                alignment: 0,
 
                                offset: 0
 
                            })
 
                        }
 

	
 
                        let type_id = TypeId(self.mono_types.len() as i64);
 
                        Self::set_search_key_to_type(&mut self.mono_search_key, &self.definition_lookup, &concrete_type.parts);
 
                        self.mono_type_lookup.insert(self.mono_search_key.clone(), type_id);
 
                        self.mono_types.push(MonoType::new_empty(type_id, concrete_type, MonoTypeVariant::Struct(StructMonomorph{
 
                            fields: mono_fields,
 
                        })));
 

	
 
                        type_id
 
                    },
 
                    DTV::Procedure(_) => {
 
                        unreachable!("pushing type resolving breadcrumb for procedure type")
 
                    },
 
                };
 

	
 
                type_id
 
            },
 
            CTP::Function(_, _) | CTP::Component(_, _) => todo!("function pointers"),
 
        };
 

	
 
        self.encountered_types.push(TypeLoopEntry{ type_id, is_union });
 
        self.type_loop_breadcrumbs.push(TypeLoopBreadcrumb{
 
            type_id,
 
            next_member: 0,
 
            next_embedded: 0,
 
        });
 
    }
 

	
 
    /// Constructs a concrete type out of a parser type for a struct field or
 
    /// union embedded type. It will do this by looking up the polymorphic
 
    /// variables in the supplied concrete type. The assumption is that the
 
    /// polymorphic variable's indices correspond to the subtrees in the
 
    /// concrete type.
 
    fn construct_concrete_type(member_type: &ParserType, container_type: &ConcreteType) -> ConcreteType {
 
        use ParserTypeVariant as PTV;
 
        use ConcreteTypePart as CTP;
 

	
 
        // TODO: Combine with code in pass_typing.rs
 
        fn parser_to_concrete_part(part: &ParserTypeVariant) -> Option<ConcreteTypePart> {
 
            match part {
 
                PTV::Void      => Some(CTP::Void),
 
                PTV::Message   => Some(CTP::Message),
 
                PTV::Bool      => Some(CTP::Bool),
 
                PTV::UInt8     => Some(CTP::UInt8),
 
                PTV::UInt16    => Some(CTP::UInt16),
 
                PTV::UInt32    => Some(CTP::UInt32),
 
                PTV::UInt64    => Some(CTP::UInt64),
 
                PTV::SInt8     => Some(CTP::SInt8),
 
                PTV::SInt16    => Some(CTP::SInt16),
 
                PTV::SInt32    => Some(CTP::SInt32),
 
                PTV::SInt64    => Some(CTP::SInt64),
 
                PTV::Character => Some(CTP::Character),
 
                PTV::String    => Some(CTP::String),
 
                PTV::Array     => Some(CTP::Array),
 
                PTV::Input     => Some(CTP::Input),
 
                PTV::Output    => Some(CTP::Output),
 
                PTV::Tuple(num) => Some(CTP::Tuple(*num)),
 
                PTV::Definition(definition_id, num) => Some(CTP::Instance(*definition_id, *num)),
 
                _              => None
 
            }
 
        }
 

	
 
        let mut parts = Vec::with_capacity(member_type.elements.len()); // usually a correct estimation, might not be
 
        for member_part in &member_type.elements {
 
            // Check if we have a regular builtin type
 
            if let Some(part) = parser_to_concrete_part(&member_part.variant) {
 
                parts.push(part);
 
                continue;
 
            }
 

	
 
            // Not builtin, but if all code is working correctly, we only care
 
            // about the polymorphic argument at this point.
 
            if let PTV::PolymorphicArgument(_container_definition_id, poly_arg_idx) = member_part.variant {
 
                debug_assert_eq!(_container_definition_id, get_concrete_type_definition(&container_type.parts).unwrap());
 

	
 
                let mut container_iter = container_type.embedded_iter(0);
 
                for _ in 0..poly_arg_idx {
 
                    container_iter.next();
 
                }
 

	
 
                let poly_section = container_iter.next().unwrap();
 
                parts.extend(poly_section);
 

	
 
                continue;
 
            }
 

	
 
            unreachable!("unexpected type part {:?} from {:?}", member_part, member_type);
 
        }
 

	
 
        return ConcreteType{ parts };
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Determining memory layout for types
 
    //--------------------------------------------------------------------------
 

	
 
    /// Should be called after type loops are detected (and resolved
 
    /// successfully). As a result of this call we expect the
 
    /// `encountered_types` array to be filled. We'll calculate size/alignment/
 
    /// offset values for those types in this routine.
 
    fn lay_out_memory_for_encountered_types(&mut self, arch: &TargetArch) {
 
        // Programmers note: this works like a little stack machine. We have
 
        // memory layout breadcrumbs which, like the type loop breadcrumbs, keep
 
        // track of the currently considered member type. This breadcrumb also
 
        // stores an index into the `size_alignment_stack`, which will be used
 
        // to store intermediate size/alignment pairs until all members are
 
        // resolved. Note that this `size_alignment_stack` is NOT an
 
        // optimization, we're working around borrowing rules here.
 

	
 
        // Just finished type loop detection, so we're left with the encountered
 
        // types only. If we don't have any (a builtin type's monomorph was
 
        // added to the type table) then this function shouldn't be called at
 
        // all.
 
        debug_assert!(self.type_loops.is_empty());
 
        debug_assert!(!self.encountered_types.is_empty());
 
        debug_assert!(self.memory_layout_breadcrumbs.is_empty());
 
        debug_assert!(self.size_alignment_stack.is_empty());
 

	
 
        let (ptr_size, ptr_align) = self.mono_types[arch.pointer_type_id.0 as usize].get_size_alignment().unwrap();
 

	
 
        // Push the first entry (the type we originally started with when we
 
        // were detecting type loops)
 
        let first_entry = &self.encountered_types[0];
 
        self.memory_layout_breadcrumbs.push(MemoryBreadcrumb{
 
            type_id: first_entry.type_id,
 
            next_member: 0,
 
            next_embedded: 0,
 
            first_size_alignment_idx: 0,
 
        });
 

	
 
        // Enter the main resolving loop
 
        'breadcrumb_loop: while !self.memory_layout_breadcrumbs.is_empty() {
 
            let cur_breadcrumb_idx = self.memory_layout_breadcrumbs.len() - 1;
 
            let mut breadcrumb = self.memory_layout_breadcrumbs[cur_breadcrumb_idx].clone();
 

	
 
            let mono_type = &self.mono_types[breadcrumb.type_id.0 as usize];
 
            match &mono_type.variant {
 
                MonoTypeVariant::Builtin | MonoTypeVariant::Enum => {
 
                    // Size should already be computed
 
                    dbg_code!({
 
                        let mono_type = &self.mono_types[breadcrumb.type_id.0 as usize];
 
                        debug_assert!(mono_type.size != 0 && mono_type.alignment != 0);
 
                    });
 
                },
 
                MonoTypeVariant::Union(mono_type) => {
 
                    // Retrieve size/alignment of each embedded type. We do not
 
                    // compute the offsets or total type sizes yet.
 
                    let num_variants = mono_type.variants.len() as u32;
 
                    while breadcrumb.next_member < num_variants {
 
                        let mono_variant = &mono_type.variants[breadcrumb.next_member as usize];
 

	
 
                        if mono_variant.lives_on_heap {
 
                            // To prevent type loops we made this a heap-
 
                            // allocated variant. This implies we cannot
 
                            // compute sizes of members at this point.
 
                        } else {
 
                            let num_embedded = mono_variant.embedded.len() as u32;
 
                            while breadcrumb.next_embedded < num_embedded {
 
                                let mono_embedded = &mono_variant.embedded[breadcrumb.next_embedded as usize];
 
                                let layout_result = Self::get_memory_layout_or_breadcrumb(
 
                                    &self.definition_lookup, &self.mono_type_lookup, &self.mono_types,
 
                                    &mut self.mono_search_key, arch, &mono_embedded.concrete_type.parts,
 
                                    self.size_alignment_stack.len()
 
                                );
 
                                match layout_result {
 
                                    MemoryLayoutResult::TypeExists(size, alignment) => {
 
                                        self.size_alignment_stack.push((size, alignment));
 
                                    },
 
                                    MemoryLayoutResult::PushBreadcrumb(new_breadcrumb) => {
 
                                        self.memory_layout_breadcrumbs[cur_breadcrumb_idx] = breadcrumb;
 
                                        self.memory_layout_breadcrumbs.push(new_breadcrumb);
 
                                        continue 'breadcrumb_loop;
 
                                    }
 
                                }
 

	
 
                                breadcrumb.next_embedded += 1;
 
                            }
 
                        }
 

	
 
                        breadcrumb.next_member += 1;
 
                        breadcrumb.next_embedded = 0;
 
                    }
 

	
 
                    // If here then we can at least compute the stack size of
 
                    // the type, we'll have to come back at the very end to
 
                    // fill in the heap size/alignment/offset of each heap-
 
                    // allocated variant.
 
                    let mut max_size = mono_type.tag_size;
 
                    let mut max_alignment = mono_type.tag_size;
 

	
 
                    let mono_type = &mut self.mono_types[breadcrumb.type_id.0 as usize];
 
                    let union_type = mono_type.variant.as_union_mut();
 
                    let mut size_alignment_idx = breadcrumb.first_size_alignment_idx as usize;
 

	
 
                    for variant in &mut union_type.variants {
 
                        // We're doing stack computations, so always start with
 
                        // the tag size/alignment.
 
                        let mut variant_offset = union_type.tag_size;
 
                        let mut variant_alignment = union_type.tag_size;
 

	
 
                        if variant.lives_on_heap {
 
                            // Variant lives on heap, so just a pointer
 
                            align_offset_to(&mut variant_offset, ptr_align);
 

	
 
                            variant_offset += ptr_size;
 
                            variant_alignment = variant_alignment.max(ptr_align);
 
                        } else {
 
                            // Variant lives on stack, so walk all embedded
 
                            // types.
 
                            for embedded in &mut variant.embedded {
 
                                let (size, alignment) = self.size_alignment_stack[size_alignment_idx];
 
                                embedded.size = size;
 
                                embedded.alignment = alignment;
 
                                size_alignment_idx += 1;
 

	
 
                                align_offset_to(&mut variant_offset, alignment);
 
                                embedded.offset = variant_offset;
 

	
 
                                variant_offset += size;
 
                                variant_alignment = variant_alignment.max(alignment);
 
                            }
 
                        };
 

	
 
                        max_size = max_size.max(variant_offset);
 
                        max_alignment = max_alignment.max(variant_alignment);
 
                    }
 

	
 
                    mono_type.size = max_size;
 
                    mono_type.alignment = max_alignment;
 
                    self.size_alignment_stack.truncate(breadcrumb.first_size_alignment_idx as usize);
 
                },
 
                MonoTypeVariant::Struct(mono_type) => {
 
                    // Retrieve size and alignment of each struct member. We'll
 
                    // compute the offsets once all of those are known
 
                    let num_fields = mono_type.fields.len() as u32;
 
                    while breadcrumb.next_member < num_fields {
 
                        let mono_field = &mono_type.fields[breadcrumb.next_member as usize];
 

	
 
                        let layout_result = Self::get_memory_layout_or_breadcrumb(
 
                            &self.definition_lookup, &self.mono_type_lookup, &self.mono_types,
 
                            &mut self.mono_search_key, arch, &mono_field.concrete_type.parts,
 
                            self.size_alignment_stack.len()
 
                        );
 
                        match layout_result {
 
                            MemoryLayoutResult::TypeExists(size, alignment) => {
 
                                self.size_alignment_stack.push((size, alignment))
 
                            },
 
                            MemoryLayoutResult::PushBreadcrumb(new_breadcrumb) => {
 
                                self.memory_layout_breadcrumbs[cur_breadcrumb_idx] = breadcrumb;
 
                                self.memory_layout_breadcrumbs.push(new_breadcrumb);
 
                                continue 'breadcrumb_loop;
 
                            },
 
                        }
 

	
 
                        breadcrumb.next_member += 1;
 
                    }
 

	
 
                    // Compute offsets and size of total type
 
                    let mut cur_offset = 0;
 
                    let mut max_alignment = 1;
 

	
 
                    let mono_type = &mut self.mono_types[breadcrumb.type_id.0 as usize];
 
                    let struct_type = mono_type.variant.as_struct_mut();
 
                    let mut size_alignment_idx = breadcrumb.first_size_alignment_idx as usize;
 

	
 
                    for field in &mut struct_type.fields {
 
                        let (size, alignment) = self.size_alignment_stack[size_alignment_idx];
 
                        field.size = size;
 
                        field.alignment = alignment;
 
                        size_alignment_idx += 1;
 

	
 
                        align_offset_to(&mut cur_offset, alignment);
 
                        field.offset = cur_offset;
 

	
 
                        cur_offset += size;
 
                        max_alignment = max_alignment.max(alignment);
 
                    }
 

	
 
                    mono_type.size = cur_offset;
 
                    mono_type.alignment = max_alignment;
 
                    self.size_alignment_stack.truncate(breadcrumb.first_size_alignment_idx as usize);
 
                },
 
                MonoTypeVariant::Procedure(_) => {
 
                    unreachable!();
 
                },
 
                MonoTypeVariant::Tuple(mono_type) => {
 
                    let num_members = mono_type.members.len() as u32;
 
                    while breadcrumb.next_member < num_members {
 
                        let mono_member = &mono_type.members[breadcrumb.next_member as usize];
 
                        let layout_result = Self::get_memory_layout_or_breadcrumb(
 
                            &self.definition_lookup, &self.mono_type_lookup, &self.mono_types,
 
                            &mut self.mono_search_key, arch, &mono_member.concrete_type.parts,
 
                            self.size_alignment_stack.len()
 
                        );
 
                        match layout_result {
 
                            MemoryLayoutResult::TypeExists(size, alignment) => {
 
                                self.size_alignment_stack.push((size, alignment));
 
                            },
 
                            MemoryLayoutResult::PushBreadcrumb(new_breadcrumb) => {
 
                                self.memory_layout_breadcrumbs[cur_breadcrumb_idx] = breadcrumb;
 
                                self.memory_layout_breadcrumbs.push(new_breadcrumb);
 
                                continue 'breadcrumb_loop;
 
                            },
 
                        }
 

	
 
                        breadcrumb.next_member += 1;
 
                    }
 

	
 
                    // If here then we can compute the memory layout of the tuple.
 
                    let mut cur_offset = 0;
 
                    let mut max_alignment = 1;
 

	
 
                    let mono_type = &mut self.mono_types[breadcrumb.type_id.0 as usize];
 
                    let mono_tuple = mono_type.variant.as_tuple_mut();
 
                    let mut size_alignment_index = breadcrumb.first_size_alignment_idx as usize;
 
                    for member_index in 0..num_members {
 
                        let (member_size, member_alignment) = self.size_alignment_stack[size_alignment_index];
 
                        align_offset_to(&mut cur_offset, member_alignment);
 
                        size_alignment_index += 1;
 

	
 
                        let member = &mut mono_tuple.members[member_index as usize];
 
                        member.size = member_size;
 
                        member.alignment = member_alignment;
 
                        member.offset = cur_offset;
 

	
 
                        cur_offset += member_size;
 
                        max_alignment = max_alignment.max(member_alignment);
 
                    }
 

	
 
                    mono_type.size = cur_offset;
 
                    mono_type.alignment = max_alignment;
 
                    self.size_alignment_stack.truncate(breadcrumb.first_size_alignment_idx as usize);
 
                },
 
            }
 

	
 
            // If here, then we completely layed out the current type. So move
 
            // to the next breadcrumb
 
            self.memory_layout_breadcrumbs.pop();
 
        }
 

	
 
        debug_assert!(self.size_alignment_stack.is_empty());
 

	
 
        // If here then all types have been layed out. What remains is to
 
        // compute the sizes/alignment/offsets of the heap variants of the
 
        // unions we have encountered.
 
        for entry in &self.encountered_types {
 
            if !entry.is_union {
 
                continue;
 
            }
 

	
 
            // First pass, use buffer to store size/alignment to prevent
 
            // borrowing issues.
 
            let mono_type = self.mono_types[entry.type_id.0 as usize].variant.as_union();
 
            for variant in &mono_type.variants {
 
                if !variant.lives_on_heap {
 
                    continue;
 
                }
 

	
 
                debug_assert!(!variant.embedded.is_empty());
 

	
 
                for embedded in &variant.embedded {
 
                    let layout_result = Self::get_memory_layout_or_breadcrumb(
 
                        &self.definition_lookup, &self.mono_type_lookup, &self.mono_types,
 
                        &mut self.mono_search_key, arch, &embedded.concrete_type.parts,
 
                        self.size_alignment_stack.len()
 
                    );
 
                    match layout_result {
 
                        MemoryLayoutResult::TypeExists(size, alignment) => {
 
                            self.size_alignment_stack.push((size, alignment));
 
                        },
 
                        _ => unreachable!(), // type was not truly infinite, so type must have been found
 
                    }
 
                }
 
            }
 

	
 
            // Second pass, apply the size/alignment values in our buffer
 
            let mono_type = self.mono_types[entry.type_id.0 as usize].variant.as_union_mut();
 

	
 
            let mut max_size = 0;
 
            let mut max_alignment = 1;
 
            let mut size_alignment_idx = 0;
 

	
 
            for variant in &mut mono_type.variants {
 
                if !variant.lives_on_heap {
 
                    continue;
 
                }
 

	
 
                let mut variant_offset = 0;
 
                let mut variant_alignment = 1;
 

	
 
                for embedded in &mut variant.embedded {
 
                    let (size, alignment) = self.size_alignment_stack[size_alignment_idx];
 
                    embedded.size = size;
 
                    embedded.alignment = alignment;
 
                    size_alignment_idx += 1;
 

	
 
                    align_offset_to(&mut variant_offset, alignment);
 
                    embedded.alignment = variant_offset;
 

	
 
                    variant_offset += size;
 
                    variant_alignment = variant_alignment.max(alignment);
 
                }
 

	
 
                max_size = max_size.max(variant_offset);
 
                max_alignment = max_alignment.max(variant_alignment);
 
            }
 

	
 
            if max_size != 0 {
 
                // At least one entry lives on the heap
 
                mono_type.heap_size = max_size;
 
                mono_type.heap_alignment = max_alignment;
 
            }
 
        }
 

	
 
        // And now, we're actually, properly, done
 
        self.encountered_types.clear();
 
    }
 

	
 
    /// Attempts to compute size/alignment for the provided type. Note that this
 
    /// is called *after* type loops have been succesfully resolved. Hence we
 
    /// may assume that all monomorph entries exist, but we may not assume that
 
    /// those entries already have their size/alignment computed.
 
    // Passed parameters are messy. But need to strike balance between borrowing
 
    // and allocations in hot loops. So it is what it is.
 
    fn get_memory_layout_or_breadcrumb(
 
        definition_map: &DefinitionMap, mono_type_map: &MonoTypeMap, mono_types: &MonoTypeArray,
 
        search_key: &mut MonoSearchKey, arch: &TargetArch, parts: &[ConcreteTypePart],
 
        size_alignment_stack_len: usize,
 
    ) -> MemoryLayoutResult {
 
        use ConcreteTypePart as CTP;
 

	
 
        debug_assert!(!parts.is_empty());
 
        let type_id = match parts[0] {
 
            CTP::Void      => arch.void_type_id,
 
            CTP::Message   => arch.message_type_id,
 
            CTP::Bool      => arch.bool_type_id,
 
            CTP::UInt8     => arch.uint8_type_id,
 
            CTP::UInt16    => arch.uint16_type_id,
 
            CTP::UInt32    => arch.uint32_type_id,
 
            CTP::UInt64    => arch.uint64_type_id,
 
            CTP::SInt8     => arch.sint8_type_id,
 
            CTP::SInt16    => arch.sint16_type_id,
 
            CTP::SInt32    => arch.sint32_type_id,
 
            CTP::SInt64    => arch.sint64_type_id,
 
            CTP::Character => arch.char_type_id,
 
            CTP::String    => arch.string_type_id,
 
            CTP::Array     => arch.array_type_id,
 
            CTP::Slice     => arch.slice_type_id,
 
            CTP::Input     => arch.input_type_id,
 
            CTP::Output    => arch.output_type_id,
 
            CTP::Pointer   => arch.pointer_type_id,
 
            CTP::Tuple(_) => {
 
                Self::set_search_key_to_tuple(search_key, definition_map, parts);
 
                let type_id = mono_type_map.get(&search_key).copied().unwrap();
 

	
 
                type_id
 
            },
 
            CTP::Instance(definition_id, _) => {
 
                // Retrieve entry and the specific monomorph index by applying
 
                // the full concrete type.
 
                let definition_type = definition_map.get(&definition_id).unwrap();
 
                search_key.set(parts, &definition_type.poly_vars);
 
                let type_id = mono_type_map.get(&search_key).copied().unwrap();
 

	
 
                type_id
 
            },
 
            CTP::Function(_, _) | CTP::Component(_, _) => {
 
                todo!("storage for 'function pointers'");
 
            }
 
        };
 

	
 
        let mono_type = &mono_types[type_id.0 as usize];
 
        if let Some((size, alignment)) = mono_type.get_size_alignment() {
 
            return MemoryLayoutResult::TypeExists(size, alignment);
 
        } else {
 
            return MemoryLayoutResult::PushBreadcrumb(MemoryBreadcrumb{
 
                type_id,
 
                next_member: 0,
 
                next_embedded: 0,
 
                first_size_alignment_idx: size_alignment_stack_len as u32,
 
            });
 
        }
 
    }
 

	
 
    /// Returns tag concrete type (always a builtin integer type), the size of
 
    /// that type in bytes (and implicitly, its alignment)
 
    fn variant_tag_type_from_values(min_val: i64, max_val: i64) -> (ConcreteType, usize) {
 
        debug_assert!(min_val <= max_val);
 

	
 
        let (part, size) = if min_val >= 0 {
 
            // Can be an unsigned integer
 
            if max_val <= (u8::MAX as i64) {
 
                (ConcreteTypePart::UInt8, 1)
 
            } else if max_val <= (u16::MAX as i64) {
 
                (ConcreteTypePart::UInt16, 2)
 
            } else if max_val <= (u32::MAX as i64) {
 
                (ConcreteTypePart::UInt32, 4)
 
            } else {
 
                (ConcreteTypePart::UInt64, 8)
 
            }
 
        } else {
 
            // Must be a signed integer
 
            if min_val >= (i8::MIN as i64) && max_val <= (i8::MAX as i64) {
 
                (ConcreteTypePart::SInt8, 1)
 
            } else if min_val >= (i16::MIN as i64) && max_val <= (i16::MAX as i64) {
 
                (ConcreteTypePart::SInt16, 2)
 
            } else if min_val >= (i32::MIN as i64) && max_val <= (i32::MAX as i64) {
 
                (ConcreteTypePart::SInt32, 4)
 
            } else {
 
                (ConcreteTypePart::SInt64, 8)
 
            }
 
        };
 

	
 
        return (ConcreteType{ parts: vec![part] }, size);
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Small utilities
 
    //--------------------------------------------------------------------------
 

	
 
    fn create_polymorphic_variables(variables: &[Identifier]) -> Vec<PolymorphicVariable> {
 
        let mut result = Vec::with_capacity(variables.len());
 
        for variable in variables.iter() {
 
            result.push(PolymorphicVariable{ identifier: variable.clone(), is_in_use: false });
 
        }
 

	
 
        result
 
    }
 

	
 
    fn mark_used_polymorphic_variables(poly_vars: &mut Vec<PolymorphicVariable>, parser_type: &ParserType) {
 
        for element in &parser_type.elements {
 
            if let ParserTypeVariant::PolymorphicArgument(_, idx) = &element.variant {
 
                poly_vars[*idx as usize].is_in_use = true;
 
            }
 
        }
 
    }
 

	
 
    /// Sets the search key to a specific type.
 
    fn set_search_key_to_type(search_key: &mut MonoSearchKey, definition_map: &DefinitionMap, type_parts: &[ConcreteTypePart]) {
 
        use ConcreteTypePart as CTP;
 

	
 
        match type_parts[0] {
 
            // Builtin types without any embedded types
 
            CTP::Void | CTP::Message | CTP::Bool |
 
            CTP::UInt8 | CTP::UInt16 | CTP::UInt32 | CTP::UInt64 |
 
            CTP::SInt8 | CTP::SInt16 | CTP::SInt32 | CTP::SInt64 |
 
            CTP::Character | CTP::String => {
 
                debug_assert_eq!(type_parts.len(), 1);
 
                search_key.set_top_type(type_parts[0]);
 
            },
 
            // Builtin types with a single nested type
 
            CTP::Array | CTP::Slice | CTP::Input | CTP::Output | CTP::Pointer => {
 
                debug_assert_eq!(type_parts[0].num_embedded(), 1);
 
                search_key.set(type_parts, &POLY_VARS_IN_USE[..1])
 
            },
 
            // User-defined types
 
            CTP::Tuple(_) => {
 
                Self::set_search_key_to_tuple(search_key, definition_map, type_parts);
 
            },
 
            CTP::Instance(definition_id, _) => {
 
                let definition_type = definition_map.get(&definition_id).unwrap();
 
                search_key.set(type_parts, &definition_type.poly_vars);
 
            },
 
            CTP::Function(_, _) | CTP::Component(_, _) => {
 
                todo!("implement function pointers")
 
            },
 
        }
 
    }
 

	
 
    fn set_search_key_to_tuple(search_key: &mut MonoSearchKey, definition_map: &DefinitionMap, type_parts: &[ConcreteTypePart]) {
 
        dbg_code!({
 
            let is_tuple = if let ConcreteTypePart::Tuple(_) = type_parts[0] { true } else { false };
 
            assert!(is_tuple);
 
        });
 
        search_key.set_top_type(type_parts[0]);
 
        for subtree in ConcreteTypeIter::new(type_parts, 0) {
 
            if let Some(definition_id) = get_concrete_type_definition(subtree) {
 
                // A definition, so retrieve poly var usage info
 
                let definition_type = definition_map.get(&definition_id).unwrap();
 
                search_key.push_subtree(subtree, &definition_type.poly_vars);
 
            } else {
 
                // Not a definition, so all type information is important
 
                search_key.push_subtype(subtree, true);
 
            }
 
        }
 
    }
 
}
 

	
 
#[inline]
 
fn align_offset_to(offset: &mut usize, alignment: usize) {
 
    debug_assert!(alignment > 0);
 
    let alignment_min_1 = alignment - 1;
 
    *offset += alignment_min_1;
 
    *offset &= !(alignment_min_1);
 
}
 

	
 
#[inline]
 
fn get_concrete_type_definition(concrete_parts: &[ConcreteTypePart]) -> Option<DefinitionId> {
 
    match concrete_parts[0] {
 
        ConcreteTypePart::Instance(definition_id, _) => {
 
            return Some(definition_id)
 
        },
 
        ConcreteTypePart::Function(definition_id, _) |
 
        ConcreteTypePart::Component(definition_id, _) => {
 
            return Some(definition_id.upcast());
 
        },
 
        _ => {
 
            return None;
 
        },
 
    }
 
}
 
\ No newline at end of file
src/protocol/tests/utils.rs
Show inline comments
 
use crate::collections::StringPool;
 
use crate::protocol::{Module, ast::*, input_source::*, parser::{
 
    Parser,
 
    type_table::*,
 
    symbol_table::SymbolTable,
 
    token_parsing::*,
 
}, eval::*, RunContext};
 

	
 
// Carries information about the test into utility structures for builder-like
 
// assertions
 
#[derive(Clone, Copy)]
 
struct TestCtx<'a> {
 
    test_name: &'a str,
 
    heap: &'a Heap,
 
    modules: &'a Vec<Module>,
 
    types: &'a TypeTable,
 
    symbols: &'a SymbolTable,
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Interface for parsing and compiling
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct Tester {
 
    test_name: String,
 
    sources: Vec<String>
 
}
 

	
 
impl Tester {
 
    /// Constructs a new tester, allows adding multiple sources before compiling
 
    pub(crate) fn new<S: ToString>(test_name: S) -> Self {
 
        Self{
 
            test_name: test_name.to_string(),
 
            sources: Vec::new()
 
        }
 
    }
 

	
 
    /// Utility for quick tests that use a single source file and expect the
 
    /// compilation to succeed.
 
    pub(crate) fn new_single_source_expect_ok<T: ToString, S: ToString>(test_name: T, source: S) -> AstOkTester {
 
        Self::new(test_name)
 
            .with_source(source)
 
            .compile()
 
            .expect_ok()
 
    }
 

	
 
    /// Utility for quick tests that use a single source file and expect the
 
    /// compilation to fail.
 
    pub(crate) fn new_single_source_expect_err<T: ToString, S: ToString>(test_name: T, source: S) -> AstErrTester {
 
        Self::new(test_name)
 
            .with_source(source)
 
            .compile()
 
            .expect_err()
 
    }
 

	
 
    pub(crate) fn with_source<S: ToString>(mut self, source: S) -> Self {
 
        self.sources.push(source.to_string());
 
        self
 
    }
 

	
 
    pub(crate) fn compile(self) -> AstTesterResult {
 
        let mut parser = Parser::new();
 
        let mut parser = Parser::new().unwrap();
 
        for source in self.sources.into_iter() {
 
            let source = source.into_bytes();
 
            let input_source = InputSource::new(String::from(""), source);
 

	
 
            if let Err(err) = parser.feed(input_source) {
 
                return AstTesterResult::Err(AstErrTester::new(self.test_name, err))
 
            }
 
        }
 

	
 
        if let Err(err) = parser.parse() {
 
            return AstTesterResult::Err(AstErrTester::new(self.test_name, err))
 
        }
 

	
 
        AstTesterResult::Ok(AstOkTester::new(self.test_name, parser))
 
    }
 
}
 

	
 
pub(crate) enum AstTesterResult {
 
    Ok(AstOkTester),
 
    Err(AstErrTester)
 
}
 

	
 
impl AstTesterResult {
 
    pub(crate) fn expect_ok(self) -> AstOkTester {
 
        match self {
 
            AstTesterResult::Ok(v) => v,
 
            AstTesterResult::Err(err) => {
 
                let wrapped = ErrorTester{ test_name: &err.test_name, error: &err.error };
 
                println!("DEBUG: Full error:\n{}", &err.error);
 
                assert!(
 
                    false,
 
                    "[{}] Expected compilation to succeed, but it failed with {}",
 
                    err.test_name, wrapped.assert_postfix()
 
                );
 
                unreachable!();
 
            }
 
        }
 
    }
 

	
 
    pub(crate) fn expect_err(self) -> AstErrTester {
 
        match self {
 
            AstTesterResult::Ok(ok) => {
 
                assert!(false, "[{}] Expected compilation to fail, but it succeeded", ok.test_name);
 
                unreachable!();
 
            },
 
            AstTesterResult::Err(err) => err,
 
        }
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Interface for successful compilation
 
//------------------------------------------------------------------------------
 

	
 
#[allow(dead_code)]
 
pub(crate) struct AstOkTester {
 
    test_name: String,
 
    modules: Vec<Module>,
 
    heap: Heap,
 
    symbols: SymbolTable,
 
    types: TypeTable,
 
    pool: StringPool, // This is stored because if we drop it on the floor, we lose all our `StringRef<'static>`s
 
}
 

	
 
impl AstOkTester {
 
    fn new(test_name: String, parser: Parser) -> Self {
 
        Self {
 
            test_name,
 
            modules: parser.modules.into_iter().map(|module| Module{
 
                source: module.source,
 
                root_id: module.root_id,
 
                name: module.name.map(|(_, name)| name)
 
            }).collect(),
 
            heap: parser.heap,
 
            symbols: parser.symbol_table,
 
            types: parser.type_table,
 
            pool: parser.string_pool,
 
        }
 
    }
 

	
 
    pub(crate) fn for_struct<F: Fn(StructTester)>(self, name: &str, f: F) -> Self {
 
        let mut found = false;
 
        for definition in self.heap.definitions.iter() {
 
            if let Definition::Struct(ast_definition) = definition {
 
                if ast_definition.identifier.value.as_str() != name {
 
                    continue;
 
                }
 

	
 
                // Found struct with the same name
 
                let definition_id = ast_definition.this.upcast();
 
                let type_entry = self.types.get_base_definition(&definition_id).unwrap();
 
                let type_definition = type_entry.definition.as_struct();
 

	
 
                let tester = StructTester::new(self.ctx(), ast_definition, type_definition);
 
                f(tester);
 
                found = true;
 
                break
 
            }
 
        }
 

	
 
        assert!(
 
            found, "[{}] Failed to find definition for struct '{}'",
 
            self.test_name, name
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn for_enum<F: Fn(EnumTester)>(self, name: &str, f: F) -> Self {
 
        let mut found = false;
 
        for definition in self.heap.definitions.iter() {
 
            if let Definition::Enum(definition) = definition {
 
                if definition.identifier.value.as_str() != name {
 
                    continue;
 
                }
 

	
 
                // Found enum with the same name
 
                let tester = EnumTester::new(self.ctx(), definition);
 
                f(tester);
 
                found = true;
 
                break;
 
            }
 
        }
 

	
 
        assert!(
 
            found, "[{}] Failed to find definition for enum '{}'",
 
            self.test_name, name
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn for_union<F: Fn(UnionTester)>(self, name: &str, f: F) -> Self {
 
        let mut found = false;
 
        for definition in self.heap.definitions.iter() {
 
            if let Definition::Union(definition) = definition {
 
                if definition.identifier.value.as_str() != name {
 
                    continue;
 
                }
 

	
 
                // Found union with the same name
 
                let definition_id = definition.this.upcast();
 
                let base_type = self.types.get_base_definition(&definition_id).unwrap();
 
                let tester = UnionTester::new(self.ctx(), definition, &base_type.definition.as_union());
 
                f(tester);
 
                found = true;
 
                break;
 
            }
 
        }
 

	
 
        assert!(
 
            found, "[{}] Failed to find definition for union '{}'",
 
            self.test_name, name
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn for_function<F: FnOnce(FunctionTester)>(self, name: &str, f: F) -> Self {
 
        let mut found = false;
 
        for definition in self.heap.definitions.iter() {
 
            if let Definition::Procedure(definition) = definition {
 
                if definition.identifier.value.as_str() != name {
 
                    continue;
 
                }
 

	
 
                // Found function
 
                let tester = FunctionTester::new(self.ctx(), definition);
 
                f(tester);
 
                found = true;
 
                break;
 
            }
 
        }
 

	
 
        if found { return self }
 

	
 
        assert!(
 
            false, "[{}] failed to find definition for function '{}'",
 
            self.test_name, name
 
        );
 
        unreachable!();
 
    }
 

	
 
    fn ctx(&self) -> TestCtx {
 
        TestCtx{
 
            test_name: &self.test_name,
 
            modules: &self.modules,
 
            heap: &self.heap,
 
            types: &self.types,
 
            symbols: &self.symbols,
 
        }
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Utilities for successful compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct StructTester<'a> {
 
    ctx: TestCtx<'a>,
 
    ast_def: &'a StructDefinition,
 
    type_def: &'a StructType,
 
}
 

	
 
impl<'a> StructTester<'a> {
 
    fn new(ctx: TestCtx<'a>, ast_def: &'a StructDefinition, type_def: &'a StructType) -> Self {
 
        Self{ ctx, ast_def, type_def }
 
    }
 

	
 
    pub(crate) fn assert_num_fields(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.ast_def.fields.len(),
 
            "[{}] Expected {} struct fields, but found {} for {}",
 
            self.ctx.test_name, num, self.ast_def.fields.len(), self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_num_monomorphs(self, num: usize) -> Self {
 
        let (is_equal, num_encountered) = has_equal_num_monomorphs(self.ctx, num, self.ast_def.this.upcast());
 
        assert!(
 
            is_equal, "[{}] Expected {} monomorphs, but got {} for {}",
 
            self.ctx.test_name, num, num_encountered, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_has_monomorph(self, serialized_monomorph: &str) -> Self {
 
        let (has_monomorph, serialized) = has_monomorph(self.ctx, self.ast_def.this.upcast(), serialized_monomorph);
 
        assert!(
 
            has_monomorph.is_some(), "[{}] Expected to find monomorph {}, but got {} for {}",
 
            self.ctx.test_name, serialized_monomorph, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_size_alignment(mut self, monomorph: &str, size: usize, alignment: usize) -> Self {
 
        self = self.assert_has_monomorph(monomorph);
 
        let (mono_idx, _) = has_monomorph(self.ctx, self.ast_def.this.upcast(), monomorph);
 
        let type_id = mono_idx.unwrap();
 
        let mono = self.ctx.types.get_monomorph(type_id);
 

	
 
        assert!(
 
            mono.size == size && mono.alignment == alignment,
 
            "[{}] Expected (size,alignment) of ({}, {}), but got ({}, {}) for {}",
 
            self.ctx.test_name, size, alignment, mono.size, mono.alignment, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn for_field<F: Fn(StructFieldTester)>(self, name: &str, f: F) -> Self {
 
        // Find field with specified name
 
        for field in &self.ast_def.fields {
 
            if field.field.value.as_str() == name {
 
                let tester = StructFieldTester::new(self.ctx, field);
 
                f(tester);
 
                return self;
 
            }
 
        }
 

	
 
        assert!(
 
            false, "[{}] Could not find struct field '{}' for {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 
        unreachable!();
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("Struct{ name: ");
 
        v.push_str(self.ast_def.identifier.value.as_str());
 
        v.push_str(", fields: [");
 
        for (field_idx, field) in self.ast_def.fields.iter().enumerate() {
 
            if field_idx != 0 { v.push_str(", "); }
 
            v.push_str(field.field.value.as_str());
 
        }
 
        v.push_str("] }");
 
        v
 
    }
 
}
 

	
 
pub(crate) struct StructFieldTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a StructFieldDefinition,
 
}
 

	
 
impl<'a> StructFieldTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a StructFieldDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn assert_parser_type(self, expected: &str) -> Self {
 
        let mut serialized_type = String::new();
 
        serialize_parser_type(&mut serialized_type, &self.ctx.heap, &self.def.parser_type);
 
        assert_eq!(
 
            expected, &serialized_type,
 
            "[{}] Expected type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized_type, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut serialized_type = String::new();
 
        serialize_parser_type(&mut serialized_type, &self.ctx.heap, &self.def.parser_type);
 
        format!("StructField{{ name: {}, parser_type: {} }}", self.def.field.value.as_str(), serialized_type)
 
    }
 
}
 

	
 
pub(crate) struct EnumTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a EnumDefinition,
 
}
 

	
 
impl<'a> EnumTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a EnumDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn assert_num_variants(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.def.variants.len(),
 
            "[{}] Expected {} enum variants, but found {} for {}",
 
            self.ctx.test_name, num, self.def.variants.len(), self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_num_monomorphs(self, num: usize) -> Self {
 
        let (is_equal, num_encountered) = has_equal_num_monomorphs(self.ctx, num, self.def.this.upcast());
 
        assert!(
 
            is_equal, "[{}] Expected {} monomorphs, but got {} for {}",
 
            self.ctx.test_name, num, num_encountered, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_has_monomorph(self, serialized_monomorph: &str) -> Self {
 
        let (has_monomorph, serialized) = has_monomorph(self.ctx, self.def.this.upcast(), serialized_monomorph);
 
        assert!(
 
            has_monomorph.is_some(), "[{}] Expected to find monomorph {}, but got {} for {}",
 
            self.ctx.test_name, serialized_monomorph, serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_size_alignment(mut self, serialized_monomorph: &str, size: usize, alignment: usize) -> Self {
 
        self = self.assert_has_monomorph(serialized_monomorph);
 
        let (has_monomorph, _) = has_monomorph(self.ctx, self.def.this.upcast(), serialized_monomorph);
 
        let mono_index = has_monomorph.unwrap();
 
        let mono = self.ctx.types.get_monomorph(mono_index);
 

	
 
        assert!(
 
            mono.size == size && mono.alignment == alignment,
 
            "[{}] Expected (size,alignment) of ({}, {}), but got ({}, {}) for {}",
 
            self.ctx.test_name, size, alignment, mono.size, mono.alignment, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("Enum{ name: ");
 
        v.push_str(self.def.identifier.value.as_str());
 
        v.push_str(", variants: [");
 
        for (variant_idx, variant) in self.def.variants.iter().enumerate() {
 
            if variant_idx != 0 { v.push_str(", "); }
 
            v.push_str(variant.identifier.value.as_str());
 
        }
 
        v.push_str("] }");
 
        v
 
    }
 
}
 

	
 
pub(crate) struct UnionTester<'a> {
 
    ctx: TestCtx<'a>,
 
    ast_def: &'a UnionDefinition,
 
    type_def: &'a UnionType,
 
}
 

	
 
impl<'a> UnionTester<'a> {
 
    fn new(ctx: TestCtx<'a>, ast_def: &'a UnionDefinition, type_def: &'a UnionType) -> Self {
 
        Self{ ctx, ast_def, type_def }
 
    }
 

	
 
    pub(crate) fn assert_num_variants(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.ast_def.variants.len(),
 
            "[{}] Expected {} union variants, but found {} for {}",
 
            self.ctx.test_name, num, self.ast_def.variants.len(), self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_num_monomorphs(self, num: usize) -> Self {
 
        let (is_equal, num_encountered) = has_equal_num_monomorphs(self.ctx, num, self.ast_def.this.upcast());
 
        assert!(
 
            is_equal, "[{}] Expected {} monomorphs, but got {} for {}",
 
            self.ctx.test_name, num, num_encountered, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_has_monomorph(self, serialized_monomorph: &str) -> Self {
 
        let (has_monomorph, serialized) = has_monomorph(self.ctx, self.ast_def.this.upcast(), serialized_monomorph);
 
        assert!(
 
            has_monomorph.is_some(), "[{}] Expected to find monomorph {}, but got {} for {}",
 
            self.ctx.test_name, serialized_monomorph, serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_size_alignment(
 
        mut self, serialized_monomorph: &str,
 
        stack_size: usize, stack_alignment: usize, heap_size: usize, heap_alignment: usize
 
    ) -> Self {
 
        self = self.assert_has_monomorph(serialized_monomorph);
 
        let (mono_idx, _) = has_monomorph(self.ctx, self.ast_def.this.upcast(), serialized_monomorph);
 
        let mono_idx = mono_idx.unwrap();
 
        let mono_base = self.ctx.types.get_monomorph(mono_idx);
 
        let mono_union = mono_base.variant.as_union();
 

	
 
        assert!(
 
            stack_size == mono_base.size && stack_alignment == mono_base.alignment &&
 
                heap_size == mono_union.heap_size && heap_alignment == mono_union.heap_alignment,
 
            "[{}] Expected (stack | heap) (size, alignment) of ({}, {} | {}, {}), but got ({}, {} | {}, {}) for {}",
 
            self.ctx.test_name,
 
            stack_size, stack_alignment, heap_size, heap_alignment,
 
            mono_base.size, mono_base.alignment, mono_union.heap_size, mono_union.heap_alignment,
 
            self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("Union{ name: ");
 
        v.push_str(self.ast_def.identifier.value.as_str());
 
        v.push_str(", variants: [");
 
        for (variant_idx, variant) in self.ast_def.variants.iter().enumerate() {
 
            if variant_idx != 0 { v.push_str(", "); }
 
            v.push_str(variant.identifier.value.as_str());
 
        }
 
        v.push_str("] }");
 
        v
 
    }
 
}
 

	
 
pub(crate) struct FunctionTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a ProcedureDefinition,
 
}
 

	
 
impl<'a> FunctionTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a ProcedureDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn for_variable<F: Fn(VariableTester)>(self, name: &str, f: F) -> Self {
 
        // Seek through the blocks in order to find the variable
 
        let wrapping_scope = seek_scope(
 
            self.ctx.heap, self.def.scope,
 
            &|scope| {
 
                for variable_id in scope.variables.iter().copied() {
 
                    let var = &self.ctx.heap[variable_id];
 
                    if var.identifier.value.as_str() == name {
 
                        return true;
 
                    }
 
                }
 

	
 
                false
 
            }
 
        );
 

	
 
        let mut found_local_id = None;
 
        if let Some(scope_id) = wrapping_scope {
 
            // Found the right scope, find the variable inside the block again
 
            let scope = &self.ctx.heap[scope_id];
 
            for variable_id in scope.variables.iter().copied() {
 
                let variable = &self.ctx.heap[variable_id];
 
                if variable.identifier.value.as_str() == name {
 
                    found_local_id = Some(variable_id);
 
                }
 
            }
 
        }
 

	
 
        assert!(
 
            found_local_id.is_some(), "[{}] Failed to find variable '{}' in {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 

	
 
        let local = &self.ctx.heap[found_local_id.unwrap()];
 

	
 
        // Find an instance of the variable expression so we can determine its
 
        // type.
 
        let var_expr = seek_expr_in_stmt(
 
            self.ctx.heap, self.def.body.upcast(),
 
            &|expr| {
 
                if let Expression::Variable(variable_expr) = expr {
 
                    if variable_expr.identifier.value.as_str() == name {
 
                        return true;
 
                    }
 
                }
 

	
 
                false
 
            }
 
        );
 

	
 
        assert!(
 
            var_expr.is_some(), "[{}] Failed to find variable expression of '{}' in {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 

	
 
        let var_expr = &self.ctx.heap[var_expr.unwrap()];
 

	
 
        // Construct tester and pass to tester function
 
        let tester = VariableTester::new(
 
            self.ctx, self.def.this.upcast(), local,
 
            var_expr.as_variable()
 
        );
 

	
 
        f(tester);
 

	
 
        self
 
    }
 

	
 
    /// Finds a specific expression within a function. There are two matchers:
 
    /// one outer matcher (to find a rough indication of the expression) and an
 
    /// inner matcher to find the exact expression. 
 
    ///
 
    /// The reason being that, for example, a function's body might be littered
 
    /// with addition symbols, so we first match on "some_var + some_other_var",
 
    /// and then match exactly on "+".
 
    pub(crate) fn for_expression_by_source<F: Fn(ExpressionTester)>(self, outer_match: &str, inner_match: &str, f: F) -> Self {
 
        // Seek the expression in the source code
 
        assert!(outer_match.contains(inner_match), "improper testing code");
 

	
 
        let module = seek_def_in_modules(
 
            &self.ctx.heap, &self.ctx.modules, self.def.this.upcast()
 
        ).unwrap();
 

	
 
        // Find the first occurrence of the expression after the definition of
 
        // the function, we'll check that it is included in the body later.
 
        let mut outer_match_idx = self.def.span.begin.offset as usize;
 
        while outer_match_idx < module.source.input.len() {
 
            if module.source.input[outer_match_idx..].starts_with(outer_match.as_bytes()) {
 
                break;
 
            }
 
            outer_match_idx += 1
 
        }
 

	
 
        assert!(
 
            outer_match_idx < module.source.input.len(),
 
            "[{}] Failed to find '{}' within the source that contains {}",
 
            self.ctx.test_name, outer_match, self.assert_postfix()
 
        );
 
        let inner_match_idx = outer_match_idx + outer_match.find(inner_match).unwrap();
 

	
 
        // Use the inner match index to find the expression
 
        let expr_id = seek_expr_in_stmt(
 
            &self.ctx.heap, self.def.body.upcast(),
 
            &|expr| expr.operation_span().begin.offset as usize == inner_match_idx
 
        );
 
        assert!(
 
            expr_id.is_some(),
 
            "[{}] Failed to find '{}' within the source that contains {} \
 
            (note: expression was found, but not within the specified function",
 
            self.ctx.test_name, outer_match, self.assert_postfix()
 
        );
 
        let expr_id = expr_id.unwrap();
 

	
 
        // We have the expression, call the testing function
 
        let tester = ExpressionTester::new(
 
            self.ctx, self.def.this.upcast(), &self.ctx.heap[expr_id]
 
        );
 
        f(tester);
 

	
 
        self
 
    }
 

	
 
    pub(crate) fn call_ok(self, expected_result: Option<Value>) -> Self {
 
        use crate::protocol::*;
 

	
 
        let (prompt, result) = self.eval_until_end();
 
        match result {
 
            Ok(_) => {
 
                assert!(
 
                    prompt.store.stack.len() > 0, // note: stack never shrinks
 
                    "[{}] No value on stack after calling function for {}",
 
                    self.ctx.test_name, self.assert_postfix()
 
                );
 
            },
 
            Err(err) => {
 
                println!("DEBUG: Formatted evaluation error:\n{}", err);
 
                assert!(
 
                    false,
 
                    "[{}] Expected call to succeed, but got {:?} for {}",
 
                    self.ctx.test_name, err, self.assert_postfix()
 
                )
 
            }
 
        }
 

	
 
        if let Some(expected_result) = expected_result {
 
            debug_assert!(expected_result.get_heap_pos().is_none(), "comparing against heap thingamajigs is not yet implemented");
 
            assert!(
 
                value::apply_equality_operator(&prompt.store, &prompt.store.stack[0], &expected_result),
 
                "[{}] Result from call was {:?}, but expected {:?} for {}",
 
                self.ctx.test_name, &prompt.store.stack[0], &expected_result, self.assert_postfix()
 
            )
 
        }
 

	
 
        self
 
    }
 

	
 
    // Keeping this simple for now, will likely change
 
    pub(crate) fn call_err(self, expected_result: &str) -> Self {
 
        let (_, result) = self.eval_until_end();
 
        match result {
 
            Ok(_) => {
 
                assert!(
 
                    false,
 
                    "[{}] Expected an error, but evaluation finished successfully for {}",
 
                    self.ctx.test_name, self.assert_postfix()
 
                );
 
            },
 
            Err(err) => {
 
                println!("DEBUG: Formatted evaluation error:\n{}", err);
 
                debug_assert_eq!(err.statements.len(), 1);
 
                assert!(
 
                    err.statements[0].message.contains(&expected_result),
 
                    "[{}] Expected error message to contain '{}', but it was '{}' for {}",
 
                    self.ctx.test_name, expected_result, err.statements[0].message, self.assert_postfix()
 
                );
 
            }
 
        }
 

	
 
        self
 
    }
 

	
 
    fn eval_until_end(&self) -> (Prompt, Result<EvalContinuation, EvalError>) {
 
        use crate::protocol::*;
 

	
 
        // Assuming the function is not polymorphic
 
        let definition_id = self.def.this;
 
        let func_type = [ConcreteTypePart::Function(definition_id, 0)];
 
        let mono_index = self.ctx.types.get_procedure_monomorph_type_id(&definition_id.upcast(), &func_type).unwrap();
 

	
 
        let mut prompt = Prompt::new(&self.ctx.types, &self.ctx.heap, definition_id, mono_index, ValueGroup::new_stack(Vec::new()));
 
        let mut call_context = FakeRunContext{};
 
        loop {
 
            let result = prompt.step(&self.ctx.types, &self.ctx.heap, &self.ctx.modules, &mut call_context);
 
            match result {
 
                Ok(EvalContinuation::Stepping) => {},
 
                _ => return (prompt, result),
 
            }
 
        }
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!("Function{{ name: {} }}", self.def.identifier.value.as_str())
 
    }
 
}
 

	
 
pub(crate) struct VariableTester<'a> {
 
    ctx: TestCtx<'a>,
 
    definition_id: DefinitionId,
 
    variable: &'a Variable,
 
    var_expr: &'a VariableExpression,
 
}
 

	
 
impl<'a> VariableTester<'a> {
 
    fn new(
 
        ctx: TestCtx<'a>, definition_id: DefinitionId, variable: &'a Variable, var_expr: &'a VariableExpression
 
    ) -> Self {
 
        Self{ ctx, definition_id, variable, var_expr }
 
    }
 

	
 
    pub(crate) fn assert_parser_type(self, expected: &str) -> Self {
 
        let mut serialized = String::new();
 
        serialize_parser_type(&mut serialized, self.ctx.heap, &self.variable.parser_type);
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected parser type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_concrete_type(self, expected: &str) -> Self {
 
        // Lookup concrete type in type table
 
        let mono_proc = get_procedure_monomorph(&self.ctx.heap, &self.ctx.types, self.definition_id);
 
        let mono_index = mono_proc.monomorph_index;
 
        let mono_data = &self.ctx.heap[self.definition_id].as_procedure().monomorphs[mono_index as usize];
 
        let expr_info = &mono_data.expr_info[self.var_expr.type_index as usize];
 
        let concrete_type = &self.ctx.types.get_monomorph(expr_info.type_id).concrete_type;
 

	
 
        // Serialize and check
 
        let serialized = concrete_type.display_name(self.ctx.heap);
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected concrete type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!("Variable{{ name: {} }}", self.variable.identifier.value.as_str())
 
    }
 
}
 

	
 
pub(crate) struct ExpressionTester<'a> {
 
    ctx: TestCtx<'a>,
 
    definition_id: DefinitionId, // of the enclosing function/component
 
    expr: &'a Expression
 
}
 

	
 
impl<'a> ExpressionTester<'a> {
 
    fn new(
 
        ctx: TestCtx<'a>, definition_id: DefinitionId, expr: &'a Expression
 
    ) -> Self {
 
        Self{ ctx, definition_id, expr }
 
    }
 

	
 
    pub(crate) fn assert_concrete_type(self, expected: &str) -> Self {
 
        // Lookup concrete type
 
        let mono_proc = get_procedure_monomorph(&self.ctx.heap, &self.ctx.types, self.definition_id);
 
        let mono_index = mono_proc.monomorph_index;
 
        let mono_data = &self.ctx.heap[self.definition_id].as_procedure().monomorphs[mono_index as usize];
 
        let expr_info = &mono_data.expr_info[self.expr.type_index() as usize];
 
        let concrete_type = &self.ctx.types.get_monomorph(expr_info.type_id).concrete_type;
 

	
 
        // Serialize and check type
 
        let serialized = concrete_type.display_name(self.ctx.heap);
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected concrete type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!(
 
            "Expression{{ debug: {:?} }}",
 
            self.expr
 
        )
 
    }
 
}
 

	
 
fn get_procedure_monomorph<'a>(heap: &Heap, types: &'a TypeTable, definition_id: DefinitionId) -> &'a ProcedureMonomorph {
 
    let ast_definition = heap[definition_id].as_procedure();
 
    let func_type = if ast_definition.kind == ProcedureKind::Function {
 
        [ConcreteTypePart::Function(ast_definition.this, 0)]
 
    } else {
 
        [ConcreteTypePart::Component(ast_definition.this, 0)]
 
    };
 

	
 
    let mono_index = types.get_procedure_monomorph_type_id(&definition_id, &func_type).unwrap();
 
    let mono_data = types.get_monomorph(mono_index).variant.as_procedure();
 

	
 
    mono_data
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Interface for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct AstErrTester {
 
    test_name: String,
 
    error: ParseError,
 
}
 

	
 
impl AstErrTester {
 
    fn new(test_name: String, error: ParseError) -> Self {
 
        Self{ test_name, error }
 
    }
 

	
 
    pub(crate) fn error<F: Fn(ErrorTester)>(&self, f: F) {
 
        // Maybe multiple errors will be supported in the future
 
        let tester = ErrorTester{ test_name: &self.test_name, error: &self.error };
 
        f(tester)
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Utilities for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct ErrorTester<'a> {
 
    test_name: &'a str,
 
    error: &'a ParseError,
 
}
 

	
 
impl<'a> ErrorTester<'a> {
 
    pub(crate) fn assert_num(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.error.statements.len(),
 
            "[{}] expected error to consist of '{}' parts, but encountered '{}' for {}",
 
            self.test_name, num, self.error.statements.len(), self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    pub(crate) fn assert_ctx_has(self, idx: usize, msg: &str) -> Self {
 
        assert!(
 
            self.error.statements[idx].context.contains(msg),
 
            "[{}] expected error statement {}'s context to contain '{}' for {}",
 
            self.test_name, idx, msg, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    pub(crate) fn assert_msg_has(self, idx: usize, msg: &str) -> Self {
 
        assert!(
 
            self.error.statements[idx].message.contains(msg),
 
            "[{}] expected error statement {}'s message to contain '{}' for {}",
 
            self.test_name, idx, msg, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    /// Seeks the index of the pattern in the context message, then checks if
 
    /// the input position corresponds to that index.
 
    pub (crate) fn assert_occurs_at(self, idx: usize, pattern: &str) -> Self {
 
        let pos = self.error.statements[idx].context.find(pattern);
 
        assert!(
 
            pos.is_some(),
 
            "[{}] incorrect occurs_at: '{}' could not be found in the context for {}",
 
            self.test_name, pattern, self.assert_postfix()
 
        );
 
        let pos = pos.unwrap();
 
        let col = self.error.statements[idx].start_column as usize;
 
        assert_eq!(
 
            pos + 1, col,
 
            "[{}] Expected error to occur at column {}, but found it at {} for {}",
 
            self.test_name, pos + 1, col, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("error: [");
 
        for (idx, stmt) in self.error.statements.iter().enumerate() {
 
            if idx != 0 {
 
                v.push_str(", ");
 
            }
 

	
 
            v.push_str(&format!("{{ context: {}, message: {} }}", &stmt.context, stmt.message));
 
        }
 
        v.push(']');
 
        v
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Generic utilities
 
//------------------------------------------------------------------------------
 

	
 
fn has_equal_num_monomorphs(ctx: TestCtx, num: usize, definition_id: DefinitionId) -> (bool, usize) {
 
    // Again: inefficient, but its testing code
 
    let mut num_on_type = 0;
 

	
 
    for mono in &ctx.types.mono_types {
 
        match &mono.concrete_type.parts[0] {
 
            ConcreteTypePart::Instance(def_id, _) => {
 
                if *def_id == definition_id {
 
                    num_on_type += 1;
 
                }
 
            }
 
            ConcreteTypePart::Function(def_id, _) |
 
            ConcreteTypePart::Component(def_id, _) => {
 
                if def_id.upcast() == definition_id {
 
                    num_on_type += 1;
 
                }
 
            },
 
            _ => {},
 
        };
 
    }
 

	
 
    (num_on_type == num, num_on_type)
 
}
 

	
 
fn has_monomorph(ctx: TestCtx, definition_id: DefinitionId, serialized_monomorph: &str) -> (Option<TypeId>, String) {
 
    // Note: full_buffer is just for error reporting
 
    let mut full_buffer = String::new();
 
    let mut has_match = None;
 

	
 
    full_buffer.push('[');
 
    let mut append_to_full_buffer = |concrete_type: &ConcreteType, type_id: TypeId| {
 
        if full_buffer.len() != 1 {
 
            full_buffer.push_str(", ");
 
        }
 
        full_buffer.push('"');
 

	
 
        let first_idx = full_buffer.len();
 
        full_buffer.push_str(concrete_type.display_name(ctx.heap).as_str());
 
        if &full_buffer[first_idx..] == serialized_monomorph {
 
            has_match = Some(type_id);
 
        }
 

	
 
        full_buffer.push('"');
 
    };
 

	
 
    // Bit wasteful, but this is (temporary?) testing code:
 
    for (_mono_idx, mono) in ctx.types.mono_types.iter().enumerate() {
 
        let got_definition_id = match &mono.concrete_type.parts[0] {
 
            ConcreteTypePart::Instance(v, _) => *v,
 
            ConcreteTypePart::Function(v, _) |
 
            ConcreteTypePart::Component(v, _) => v.upcast(),
 
            _ => DefinitionId::new_invalid(),
 
        };
 
        if got_definition_id == definition_id {
 
            append_to_full_buffer(&mono.concrete_type, mono.type_id);
 
        }
 
    }
 

	
 
    full_buffer.push(']');
 

	
 
    (has_match, full_buffer)
 
}
 

	
 
fn serialize_parser_type(buffer: &mut String, heap: &Heap, parser_type: &ParserType) {
 
    use ParserTypeVariant as PTV;
 

	
 
    fn serialize_variant(buffer: &mut String, heap: &Heap, parser_type: &ParserType, mut idx: usize) -> usize {
 
        match &parser_type.elements[idx].variant {
 
            PTV::Void => buffer.push_str("void"),
 
            PTV::InputOrOutput => {
 
                buffer.push_str("portlike<");
 
                idx = serialize_variant(buffer, heap, parser_type, idx + 1);
 
                buffer.push('>');
 
            },
 
            PTV::ArrayLike => {
 
                idx = serialize_variant(buffer, heap, parser_type, idx + 1);
 
                buffer.push_str("[???]");
 
            },
 
            PTV::IntegerLike => buffer.push_str("integerlike"),
 
            PTV::Message => buffer.push_str(KW_TYPE_MESSAGE_STR),
 
            PTV::Bool => buffer.push_str(KW_TYPE_BOOL_STR),
 
            PTV::UInt8 => buffer.push_str(KW_TYPE_UINT8_STR),
 
            PTV::UInt16 => buffer.push_str(KW_TYPE_UINT16_STR),
 
            PTV::UInt32 => buffer.push_str(KW_TYPE_UINT32_STR),
 
            PTV::UInt64 => buffer.push_str(KW_TYPE_UINT64_STR),
 
            PTV::SInt8 => buffer.push_str(KW_TYPE_SINT8_STR),
 
            PTV::SInt16 => buffer.push_str(KW_TYPE_SINT16_STR),
 
            PTV::SInt32 => buffer.push_str(KW_TYPE_SINT32_STR),
 
            PTV::SInt64 => buffer.push_str(KW_TYPE_SINT64_STR),
 
            PTV::Character => buffer.push_str(KW_TYPE_CHAR_STR),
 
            PTV::String => buffer.push_str(KW_TYPE_STRING_STR),
 
            PTV::IntegerLiteral => buffer.push_str("int_literal"),
 
            PTV::Inferred => buffer.push_str(KW_TYPE_INFERRED_STR),
 
            PTV::Array => {
 
                idx = serialize_variant(buffer, heap, parser_type, idx + 1);
 
                buffer.push_str("[]");
 
            },
 
            PTV::Input => {
 
                buffer.push_str(KW_TYPE_IN_PORT_STR);
 
                buffer.push('<');
 
                idx = serialize_variant(buffer, heap, parser_type, idx + 1);
 
                buffer.push('>');
 
            },
 
            PTV::Output => {
 
                buffer.push_str(KW_TYPE_OUT_PORT_STR);
 
                buffer.push('<');
 
                idx = serialize_variant(buffer, heap, parser_type, idx + 1);
 
                buffer.push('>');
 
            },
 
            PTV::Tuple(num_embedded) => {
 
                buffer.push('(');
 
                for embedded_idx in 0..*num_embedded {
 
                    if embedded_idx != 0 {
 
                        buffer.push(',');
 
                    }
 
                    idx = serialize_variant(buffer, heap, parser_type, idx + 1);
 
                }
 
                buffer.push(')');
 
            },
 
            PTV::PolymorphicArgument(definition_id, poly_idx) => {
 
                let definition = &heap[*definition_id];
 
                let poly_arg = &definition.poly_vars()[*poly_idx as usize];
 
                buffer.push_str(poly_arg.value.as_str());
 
            },
 
            PTV::Definition(definition_id, num_embedded) => {
 
                let definition = &heap[*definition_id];
 
                buffer.push_str(definition.identifier().value.as_str());
 

	
 
                let num_embedded = *num_embedded;
 
                if num_embedded != 0 {
 
                    buffer.push('<');
 
                    for embedded_idx in 0..num_embedded {
 
                        if embedded_idx != 0 {
 
                            buffer.push(',');
 
                        }
 
                        idx = serialize_variant(buffer, heap, parser_type, idx + 1);
 
                    }
 
                    buffer.push('>');
 
                }
 
            }
 
        }
 

	
 
        idx
 
    }
 

	
 
    serialize_variant(buffer, heap, parser_type, 0);
 
}
 

	
 
fn seek_def_in_modules<'a>(heap: &Heap, modules: &'a [Module], def_id: DefinitionId) -> Option<&'a Module> {
 
    for module in modules {
 
        let root = &heap.protocol_descriptions[module.root_id];
 
        for definition in &root.definitions {
 
            if *definition == def_id {
 
                return Some(module)
 
            }
 
        }
 
    }
 

	
 
    None
 
}
 

	
 
fn seek_stmt<F: Fn(&Statement) -> bool>(heap: &Heap, start: StatementId, f: &F) -> Option<StatementId> {
 
    let stmt = &heap[start];
 
    if f(stmt) { return Some(start); }
 

	
 
    // This statement wasn't it, try to recurse
 
    let matched = match stmt {
 
        Statement::Block(block) => {
 
            for sub_id in &block.statements {
 
                if let Some(id) = seek_stmt(heap, *sub_id, f) {
 
                    return Some(id);
 
                }
 
            }
 

	
 
            None
 
        },
 
        Statement::Labeled(stmt) => seek_stmt(heap, stmt.body, f),
 
        Statement::If(stmt) => {
 
            if let Some(id) = seek_stmt(heap, stmt.true_case.body, f) {
 
                return Some(id);
 
            } else if let Some(false_body) = stmt.false_case {
 
                if let Some(id) = seek_stmt(heap, false_body.body, f) {
 
                    return Some(id);
 
                }
 
            }
 
            None
 
        },
 
        Statement::While(stmt) => seek_stmt(heap, stmt.body, f),
 
        Statement::Synchronous(stmt) => seek_stmt(heap, stmt.body, f),
 
        _ => None
 
    };
 

	
 
    matched
 
}
 

	
 
fn seek_scope<F: Fn(&Scope) -> bool>(heap: &Heap, start: ScopeId, f: &F) -> Option<ScopeId> {
 
    let scope = &heap[start];
 
    if f(scope) { return Some(start); }
 

	
 
    for child_scope_id in scope.nested.iter().copied() {
 
        if let Some(result) = seek_scope(heap, child_scope_id, f) {
 
            return Some(result);
 
        }
 
    }
 

	
 
    return None;
 
}
 

	
 
fn seek_expr_in_expr<F: Fn(&Expression) -> bool>(heap: &Heap, start: ExpressionId, f: &F) -> Option<ExpressionId> {
 
    let expr = &heap[start];
 
    if f(expr) { return Some(start); }
 

	
 
    match expr {
 
        Expression::Assignment(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.left, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.right, f))
 
        },
 
        Expression::Binding(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.bound_to, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.bound_from, f))
 
        }
 
        Expression::Conditional(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.test, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.true_expression, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.false_expression, f))
 
        },
 
        Expression::Binary(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.left, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.right, f))
 
        },
 
        Expression::Unary(expr) => {
 
            seek_expr_in_expr(heap, expr.expression, f)
 
        },
 
        Expression::Indexing(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.subject, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.index, f))
 
        },
 
        Expression::Slicing(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.subject, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.from_index, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.to_index, f))
 
        },
 
        Expression::Select(expr) => {
 
            seek_expr_in_expr(heap, expr.subject, f)
 
        },
 
        Expression::Literal(expr) => {
 
            if let Literal::Struct(lit) = &expr.value {
 
                for field in &lit.fields {
 
                    if let Some(id) = seek_expr_in_expr(heap, field.value, f) {
 
                        return Some(id)
 
                    }
 
                }
 
            } else if let Literal::Array(elements) = &expr.value {
 
                for element in elements {
 
                    if let Some(id) = seek_expr_in_expr(heap, *element, f) {
 
                        return Some(id)
 
                    }
 
                }
 
            }
 
            None
 
        },
 
        Expression::Cast(expr) => {
 
            seek_expr_in_expr(heap, expr.subject, f)
 
        }
 
        Expression::Call(expr) => {
 
            for arg in &expr.arguments {
 
                if let Some(id) = seek_expr_in_expr(heap, *arg, f) {
 
                    return Some(id)
 
                }
 
            }
 
            None
 
        },
 
        Expression::Variable(_expr) => {
 
            None
 
        }
 
    }
 
}
 

	
 
fn seek_expr_in_stmt<F: Fn(&Expression) -> bool>(heap: &Heap, start: StatementId, f: &F) -> Option<ExpressionId> {
 
    let stmt = &heap[start];
 

	
 
    match stmt {
 
        Statement::Local(stmt) => {
 
            match stmt {
 
                LocalStatement::Memory(stmt) => seek_expr_in_expr(heap, stmt.initial_expr.upcast(), f),
 
                LocalStatement::Channel(_) => None
 
            }
 
        }
 
        Statement::Block(stmt) => {
 
            for stmt_id in &stmt.statements {
 
                if let Some(id) = seek_expr_in_stmt(heap, *stmt_id, f) {
 
                    return Some(id)
 
                }
 
            }
 
            None
 
        },
 
        Statement::Labeled(stmt) => {
 
            seek_expr_in_stmt(heap, stmt.body, f)
 
        },
 
        Statement::If(stmt) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, stmt.test, f))
 
            .or_else(|| seek_expr_in_stmt(heap, stmt.true_case.body, f))
 
            .or_else(|| if let Some(false_body) = stmt.false_case {
 
                seek_expr_in_stmt(heap, false_body.body, f)
 
            } else {
 
                None
 
            })
 
        },
 
        Statement::While(stmt) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, stmt.test, f))
 
            .or_else(|| seek_expr_in_stmt(heap, stmt.body, f))
 
        },
 
        Statement::Synchronous(stmt) => {
 
            seek_expr_in_stmt(heap, stmt.body, f)
 
        },
 
        Statement::Return(stmt) => {
 
            for expr_id in &stmt.expressions {
 
                if let Some(id) = seek_expr_in_expr(heap, *expr_id, f) {
 
                    return Some(id);
 
                }
 
            }
 
            None
 
        },
 
        Statement::New(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression.upcast(), f)
 
        },
 
        Statement::Expression(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression, f)
 
        },
 
        _ => None
 
    }
 
}
 

	
 
struct FakeRunContext{}
 
impl RunContext for FakeRunContext {
 
    fn performed_put(&mut self, _port: PortId) -> bool { unreachable!() }
 
    fn performed_get(&mut self, _port: PortId) -> Option<ValueGroup> { unreachable!() }
 
    fn fires(&mut self, _port: PortId) -> Option<Value> { unreachable!() }
 
    fn performed_fork(&mut self) -> Option<bool> { unreachable!() }
 
    fn created_channel(&mut self) -> Option<(Value, Value)> { unreachable!() }
 
    fn performed_select_wait(&mut self) -> Option<u32> { unreachable!() }
 
}
 
\ No newline at end of file
src/runtime2/component/component.rs
Show inline comments
 
use crate::protocol::eval::EvalError;
 
use crate::protocol::eval::*;
 
use crate::protocol::*;
 
use crate::runtime2::*;
 
use super::CompCtx;
 
use super::{CompCtx, CompPDL};
 

	
 
pub enum CompScheduling {
 
    Immediate,
 
    Requeue,
 
    Sleep,
 
    Exit,
 
}
 

	
 
/// Generic representation of a component (as viewed by a scheduler).
 
pub(crate) trait Component {
 
    /// Called if the component is created by another component and the messages
 
    /// are being transferred between the two.
 
    fn adopt_message(&mut self, comp_ctx: &mut CompCtx, message: DataMessage);
 

	
 
    /// Called if the component receives a new message. The component is
 
    /// responsible for deciding where that messages goes.
 
    fn handle_message(&mut self, sched_ctx: &mut SchedulerCtx, comp_ctx: &mut CompCtx, message: Message);
 

	
 
    /// Called if the component's routine should be executed. The return value
 
    /// can be used to indicate when the routine should be run again.
 
    fn run(&mut self, sched_ctx: &mut SchedulerCtx, comp_ctx: &mut CompCtx) -> Result<CompScheduling, EvalError>;
 
}
 

	
 
/// Creates a new component based on its definition. Meaning that if it is a
 
/// user-defined component then we set up the PDL code state. Otherwise we
 
/// construct a custom component. This does NOT take care of port and message
 
/// management.
 
pub(crate) fn create_component(
 
    protocol: &ProtocolDescription,
 
    definition_id: ProcedureDefinitionId, type_id: TypeId,
 
    arguments: ValueGroup, num_ports: usize
 
) -> Box<dyn Component> {
 
    let definition = &protocol.heap[definition_id];
 
    debug_assert!(definition.kind == ProcedureKind::Primitive || definition.kind == ProcedureKind::Composite);
 

	
 
    if definition.source.is_builtin() {
 
        // Builtin component
 
        todo!("implement")
 
    } else {
 
        // User-defined component
 
        let prompt = Prompt::new(
 
            &protocol.types, &protocol.heap,
 
            definition_id, type_id, arguments
 
        );
 
        let component = CompPDL::new(prompt, num_ports);
 
        return Box::new(component);
 
    }
 
}
 
\ No newline at end of file
src/runtime2/component/component_pdl.rs
Show inline comments
 
use crate::random::Random;
 
use crate::protocol::*;
 
use crate::protocol::ast::ProcedureDefinitionId;
 
use crate::protocol::eval::{
 
    PortId as EvalPortId, Prompt,
 
    ValueGroup, Value,
 
    EvalContinuation, EvalResult, EvalError
 
};
 

	
 
use crate::runtime2::scheduler::SchedulerCtx;
 
use crate::runtime2::communication::*;
 

	
 
use super::component::*;
 
use super::component_context::*;
 
use super::control_layer::*;
 
use super::consensus::Consensus;
 

	
 
pub enum ExecStmt {
 
    CreatedChannel((Value, Value)),
 
    PerformedPut,
 
    PerformedGet(ValueGroup),
 
    PerformedSelectWait(u32),
 
    None,
 
}
 

	
 
impl ExecStmt {
 
    fn take(&mut self) -> ExecStmt {
 
        let mut value = ExecStmt::None;
 
        std::mem::swap(self, &mut value);
 
        return value;
 
    }
 

	
 
    fn is_none(&self) -> bool {
 
        match self {
 
            ExecStmt::None => return true,
 
            _ => return false,
 
        }
 
    }
 
}
 

	
 
pub struct ExecCtx {
 
    stmt: ExecStmt,
 
}
 

	
 
impl RunContext for ExecCtx {
 
    fn performed_put(&mut self, _port: EvalPortId) -> bool {
 
        match self.stmt.take() {
 
            ExecStmt::None => return false,
 
            ExecStmt::PerformedPut => return true,
 
            _ => unreachable!(),
 
        }
 
    }
 

	
 
    fn performed_get(&mut self, _port: EvalPortId) -> Option<ValueGroup> {
 
        match self.stmt.take() {
 
            ExecStmt::None => return None,
 
            ExecStmt::PerformedGet(value) => return Some(value),
 
            _ => unreachable!(),
 
        }
 
    }
 

	
 
    fn fires(&mut self, _port: EvalPortId) -> Option<Value> {
 
        todo!("remove fires")
 
    }
 

	
 
    fn performed_fork(&mut self) -> Option<bool> {
 
        todo!("remove fork")
 
    }
 

	
 
    fn created_channel(&mut self) -> Option<(Value, Value)> {
 
        match self.stmt.take() {
 
            ExecStmt::None => return None,
 
            ExecStmt::CreatedChannel(ports) => return Some(ports),
 
            _ => unreachable!(),
 
        }
 
    }
 

	
 
    fn performed_select_wait(&mut self) -> Option<u32> {
 
        match self.stmt.take() {
 
            ExecStmt::None => return None,
 
            ExecStmt::PerformedSelectWait(selected_case) => Some(selected_case),
 
            _v => unreachable!(),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
 
pub(crate) enum Mode {
 
    NonSync, // not in sync mode
 
    Sync, // in sync mode, can interact with other components
 
    SyncEnd, // awaiting a solution, i.e. encountered the end of the sync block
 
    BlockedGet, // blocked because we need to receive a message on a particular port
 
    BlockedPut, // component is blocked because the port is blocked
 
    BlockedSelect, // waiting on message to complete the select statement
 
    StartExit, // temporary state: if encountered then we start the shutdown process
 
    BusyExit, // temporary state: waiting for Acks for all the closed ports
 
    Exit, // exiting: shutdown process started, now waiting until the reference count drops to 0
 
}
 

	
 
impl Mode {
 
    fn is_in_sync_block(&self) -> bool {
 
        use Mode::*;
 

	
 
        match self {
 
            Sync | SyncEnd | BlockedGet | BlockedPut | BlockedSelect => true,
 
            NonSync | StartExit | BusyExit | Exit => false,
 
        }
 
    }
 
}
 

	
 
struct SelectCase {
 
    involved_ports: Vec<LocalPortHandle>,
 
}
 

	
 
// TODO: @Optimize, flatten cases into single array, have index-pointers to next case
 
struct SelectState {
 
    cases: Vec<SelectCase>,
 
    next_case: u32,
 
    num_cases: u32,
 
    random: Random,
 
    candidates_workspace: Vec<usize>,
 
}
 

	
 
enum SelectDecision {
 
    None,
 
    Case(u32), // contains case index, should be passed along to PDL code
 
}
 

	
 
type InboxMain = Vec<Option<DataMessage>>;
 

	
 
impl SelectState {
 
    fn new() -> Self {
 
        return Self{
 
            cases: Vec::new(),
 
            next_case: 0,
 
            num_cases: 0,
 
            random: Random::new(),
 
            candidates_workspace: Vec::new(),
 
        }
 
    }
 

	
 
    fn handle_select_start(&mut self, num_cases: u32) {
 
        self.cases.clear();
 
        self.next_case = 0;
 
        self.num_cases = num_cases;
 
    }
 

	
 
    /// Register a port as belonging to a particular case. As for correctness of
 
    /// PDL code one cannot register the same port twice, this function might
 
    /// return an error
 
    fn register_select_case_port(&mut self, comp_ctx: &CompCtx, case_index: u32, _port_index: u32, port_id: PortId) -> Result<(), PortId> {
 
        // Retrieve case and port handle
 
        self.ensure_at_case(case_index);
 
        let cur_case = &mut self.cases[case_index as usize];
 
        let port_handle = comp_ctx.get_port_handle(port_id);
 
        debug_assert_eq!(cur_case.involved_ports.len(), _port_index as usize);
 

	
 
        // Make sure port wasn't added before, we disallow having the same port
 
        // in the same select guard twice.
 
        if cur_case.involved_ports.contains(&port_handle) {
 
            return Err(port_id);
 
        }
 

	
 
        cur_case.involved_ports.push(port_handle);
 
        return Ok(());
 
    }
 

	
 
    /// Notification that all ports have been registered and we should now wait
 
    /// until the appropriate messages have come in.
 
    fn handle_select_waiting_point(&mut self, inbox: &InboxMain, comp_ctx: &CompCtx) -> SelectDecision {
 
        if self.num_cases != self.next_case {
 
            // This happens when there are >=1 select cases written at the end
 
            // of the select block.
 
            self.ensure_at_case(self.num_cases - 1);
 
        }
 

	
 
        return self.has_decision(inbox, comp_ctx);
 
    }
 

	
 
    fn handle_updated_inbox(&mut self, inbox: &InboxMain, comp_ctx: &CompCtx) -> SelectDecision {
 
        return self.has_decision(inbox, comp_ctx);
 
    }
 

	
 
    /// Internal helper, pushes empty cases inbetween last case and provided new
 
    /// case index.
 
    fn ensure_at_case(&mut self, new_case_index: u32) {
 
        // Push an empty case for all intermediate cases that were not
 
        // registered with a port.
 
        debug_assert!(new_case_index >= self.next_case && new_case_index < self.num_cases);
 
        for _ in self.next_case..new_case_index + 1 {
 
            self.cases.push(SelectCase{ involved_ports: Vec::new() });
 
        }
 
        self.next_case = new_case_index + 1;
 
    }
 

	
 
    /// Checks if a decision can be reached
 
    fn has_decision(&mut self, inbox: &InboxMain, comp_ctx: &CompCtx) -> SelectDecision {
 
        self.candidates_workspace.clear();
 
        if self.cases.is_empty() {
 
            // If there are no cases then we can immediately reach a "bogus
 
            // decision".
 
            return SelectDecision::Case(0);
 
        }
 

	
 
        // Need to check for valid case
 
        'case_loop: for (case_index, case) in self.cases.iter().enumerate() {
 
            for port_handle in case.involved_ports.iter().copied() {
 
                let port_index = comp_ctx.get_port_index(port_handle);
 
                if inbox[port_index].is_none() {
 
                    // Condition not satisfied
 
                    continue 'case_loop;
 
                }
 
            }
 

	
 
            // If here then the case guard is satisfied
 
            self.candidates_workspace.push(case_index);
 
        }
 

	
 
        if self.candidates_workspace.is_empty() {
 
            return SelectDecision::None;
 
        } else {
 
            let candidate_index = self.random.get_u64() as usize % self.candidates_workspace.len();
 
            return SelectDecision::Case(self.candidates_workspace[candidate_index] as u32);
 
        }
 
    }
 
}
 

	
 
pub(crate) struct CompPDL {
 
    pub mode: Mode,
 
    pub mode_port: PortId, // when blocked on a port
 
    pub mode_value: ValueGroup, // when blocked on a put
 
    select: SelectState,
 
    pub prompt: Prompt,
 
    pub control: ControlLayer,
 
    pub consensus: Consensus,
 
    pub sync_counter: u32,
 
    pub exec_ctx: ExecCtx,
 
    // TODO: Temporary field, simulates future plans of having one storage place
 
    //  reserved per port.
 
    // Should be same length as the number of ports. Corresponding indices imply
 
    // message is intended for that port.
 
    pub inbox_main: InboxMain,
 
    pub inbox_backup: Vec<DataMessage>,
 
}
 

	
 
impl Component for CompPDL {
 
    fn adopt_message(&mut self, comp_ctx: &mut CompCtx, message: DataMessage) {
 
        let port_handle = comp_ctx.get_port_handle(message.data_header.target_port);
 
        let port_index = comp_ctx.get_port_index(port_handle);
 
        if self.inbox_main[port_index].is_none() {
 
            self.inbox_main[port_index] = Some(message);
 
        } else {
 
            self.inbox_backup.push(message);
 
        }
 
    }
 

	
 
    fn handle_message(&mut self, sched_ctx: &mut SchedulerCtx, comp_ctx: &mut CompCtx, mut message: Message) {
 
        sched_ctx.log(&format!("handling message: {:#?}", message));
 
        if let Some(new_target) = self.control.should_reroute(&mut message) {
 
            let mut target = sched_ctx.runtime.get_component_public(new_target);
 
            target.send_message(sched_ctx, message, false); // not waking up: we schedule once we've received all PortPeerChanged Acks
 
            let _should_remove = target.decrement_users();
 
            debug_assert!(_should_remove.is_none());
 
            return;
 
        }
 

	
 
        match message {
 
            Message::Data(message) => {
 
                self.handle_incoming_data_message(sched_ctx, comp_ctx, message);
 
            },
 
            Message::Control(message) => {
 
                self.handle_incoming_control_message(sched_ctx, comp_ctx, message);
 
            },
 
            Message::Sync(message) => {
 
                self.handle_incoming_sync_message(sched_ctx, comp_ctx, message);
 
            }
 
        }
 
    }
 

	
 
    fn run(&mut self, sched_ctx: &mut SchedulerCtx, comp_ctx: &mut CompCtx) -> Result<CompScheduling, EvalError> {
 
        use EvalContinuation as EC;
 

	
 
        sched_ctx.log(&format!("Running component (mode: {:?})", self.mode));
 

	
 
        // Depending on the mode don't do anything at all, take some special
 
        // actions, or fall through and run the PDL code.
 
        match self.mode {
 
            Mode::NonSync | Mode::Sync => {
 
                // continue and run PDL code
 
            },
 
            Mode::SyncEnd | Mode::BlockedGet | Mode::BlockedPut | Mode::BlockedSelect => {
 
                return Ok(CompScheduling::Sleep);
 
            }
 
            Mode::StartExit => {
 
                self.handle_component_exit(sched_ctx, comp_ctx);
 
                return Ok(CompScheduling::Immediate);
 
            },
 
            Mode::BusyExit => {
 
                if self.control.has_acks_remaining() {
 
                    return Ok(CompScheduling::Sleep);
 
                } else {
 
                    self.mode = Mode::Exit;
 
                    return Ok(CompScheduling::Exit);
 
                }
 
            },
 
            Mode::Exit => {
 
                return Ok(CompScheduling::Exit);
 
            }
 
        }
 

	
 
        let run_result = self.execute_prompt(&sched_ctx)?;
 

	
 
        match run_result {
 
            EC::Stepping => unreachable!(), // execute_prompt runs until this is no longer returned
 
            EC::BranchInconsistent | EC::NewFork | EC::BlockFires(_) => todo!("remove these"),
 
            // Results that can be returned in sync mode
 
            EC::SyncBlockEnd => {
 
                debug_assert_eq!(self.mode, Mode::Sync);
 
                self.handle_sync_end(sched_ctx, comp_ctx);
 
                return Ok(CompScheduling::Immediate);
 
            },
 
            EC::BlockGet(port_id) => {
 
                debug_assert_eq!(self.mode, Mode::Sync);
 
                debug_assert!(self.exec_ctx.stmt.is_none());
 

	
 
                let port_id = port_id_from_eval(port_id);
 
                let port_handle = comp_ctx.get_port_handle(port_id);
 
                let port_index = comp_ctx.get_port_index(port_handle);
 
                if let Some(message) = &self.inbox_main[port_index] {
 
                    // Check if we can actually receive the message
 
                    if self.consensus.try_receive_data_message(sched_ctx, comp_ctx, message) {
 
                        // Message was received. Make sure any blocked peers and
 
                        // pending messages are handled.
 
                        let message = self.inbox_main[port_index].take().unwrap();
 
                        self.handle_received_data_message(sched_ctx, comp_ctx, port_handle);
 

	
 
                        self.exec_ctx.stmt = ExecStmt::PerformedGet(message.content);
 
                        return Ok(CompScheduling::Immediate);
 
                    } else {
 
                        todo!("handle sync failure due to message deadlock");
 
                        return Ok(CompScheduling::Sleep);
 
                    }
 
                } else {
 
                    // We need to wait
 
                    self.mode = Mode::BlockedGet;
 
                    self.mode_port = port_id;
 
                    return Ok(CompScheduling::Sleep);
 
                }
 
            },
 
            EC::Put(port_id, value) => {
 
                debug_assert_eq!(self.mode, Mode::Sync);
 
                sched_ctx.log(&format!("Putting value {:?}", value));
 
                let port_id = port_id_from_eval(port_id);
 
                let port_handle = comp_ctx.get_port_handle(port_id);
 
                let port_info = comp_ctx.get_port(port_handle);
 
                if port_info.state.is_blocked() {
 
                    self.mode = Mode::BlockedPut;
 
                    self.mode_port = port_id;
 
                    self.mode_value = value;
 
                    self.exec_ctx.stmt = ExecStmt::PerformedPut; // prepare for when we become unblocked
 
                    return Ok(CompScheduling::Sleep);
 
                } else {
 
                    self.send_data_message_and_wake_up(sched_ctx, comp_ctx, port_handle, value);
 
                    self.exec_ctx.stmt = ExecStmt::PerformedPut;
 
                    return Ok(CompScheduling::Immediate);
 
                }
 
            },
 
            EC::SelectStart(num_cases, _num_ports) => {
 
                debug_assert_eq!(self.mode, Mode::Sync);
 
                self.select.handle_select_start(num_cases);
 
                return Ok(CompScheduling::Requeue);
 
            },
 
            EC::SelectRegisterPort(case_index, port_index, port_id) => {
 
                debug_assert_eq!(self.mode, Mode::Sync);
 
                let port_id = port_id_from_eval(port_id);
 
                if let Err(_err) = self.select.register_select_case_port(comp_ctx, case_index, port_index, port_id) {
 
                    todo!("handle registering a port multiple times");
 
                }
 
                return Ok(CompScheduling::Immediate);
 
            },
 
            EC::SelectWait => {
 
                debug_assert_eq!(self.mode, Mode::Sync);
 
                let select_decision = self.select.handle_select_waiting_point(&self.inbox_main, comp_ctx);
 
                if let SelectDecision::Case(case_index) = select_decision {
 
                    // Reached a conclusion, so we can continue immediately
 
                    self.exec_ctx.stmt = ExecStmt::PerformedSelectWait(case_index);
 
                    self.mode = Mode::Sync;
 
                    return Ok(CompScheduling::Immediate);
 
                } else {
 
                    // No decision yet
 
                    self.mode = Mode::BlockedSelect;
 
                    return Ok(CompScheduling::Sleep);
 
                }
 
            },
 
            // Results that can be returned outside of sync mode
 
            EC::ComponentTerminated => {
 
                self.mode = Mode::StartExit; // next call we'll take care of the exit
 
                return Ok(CompScheduling::Immediate);
 
            },
 
            EC::SyncBlockStart => {
 
                debug_assert_eq!(self.mode, Mode::NonSync);
 
                self.handle_sync_start(sched_ctx, comp_ctx);
 
                return Ok(CompScheduling::Immediate);
 
            },
 
            EC::NewComponent(definition_id, type_id, arguments) => {
 
                debug_assert_eq!(self.mode, Mode::NonSync);
 
                self.create_component_and_transfer_ports(
 
                    sched_ctx, comp_ctx,
 
                    definition_id, type_id, arguments
 
                );
 
                return Ok(CompScheduling::Requeue);
 
            },
 
            EC::NewChannel => {
 
                debug_assert_eq!(self.mode, Mode::NonSync);
 
                debug_assert!(self.exec_ctx.stmt.is_none());
 
                let channel = comp_ctx.create_channel();
 
                self.exec_ctx.stmt = ExecStmt::CreatedChannel((
 
                    Value::Output(port_id_to_eval(channel.putter_id)),
 
                    Value::Input(port_id_to_eval(channel.getter_id))
 
                ));
 
                self.inbox_main.push(None);
 
                self.inbox_main.push(None);
 
                return Ok(CompScheduling::Immediate);
 
            }
 
        }
 
    }
 
}
 

	
 
impl CompPDL {
 
    pub(crate) fn new(initial_state: Prompt, num_ports: usize) -> Self {
 
        let mut inbox_main = Vec::new();
 
        inbox_main.reserve(num_ports);
 
        for _ in 0..num_ports {
 
            inbox_main.push(None);
 
        }
 

	
 
        return Self{
 
            mode: Mode::NonSync,
 
            mode_port: PortId::new_invalid(),
 
            mode_value: ValueGroup::default(),
 
            select: SelectState::new(),
 
            prompt: initial_state,
 
            control: ControlLayer::default(),
 
            consensus: Consensus::new(),
 
            sync_counter: 0,
 
            exec_ctx: ExecCtx{
 
                stmt: ExecStmt::None,
 
            },
 
            inbox_main,
 
            inbox_backup: Vec::new(),
 
        }
 
    }
 

	
 
    // -------------------------------------------------------------------------
 
    // Running component and handling changes in global component state
 
    // -------------------------------------------------------------------------
 

	
 
    fn execute_prompt(&mut self, sched_ctx: &SchedulerCtx) -> EvalResult {
 
        let mut step_result = EvalContinuation::Stepping;
 
        while let EvalContinuation::Stepping = step_result {
 
            step_result = self.prompt.step(
 
                &sched_ctx.runtime.protocol.types, &sched_ctx.runtime.protocol.heap,
 
                &sched_ctx.runtime.protocol.modules, &mut self.exec_ctx,
 
            )?;
 
        }
 

	
 
        return Ok(step_result)
 
    }
 

	
 
    fn handle_sync_start(&mut self, sched_ctx: &SchedulerCtx, comp_ctx: &mut CompCtx) {
 
        sched_ctx.log("Component starting sync mode");
 
        self.consensus.notify_sync_start(comp_ctx);
 
        for message in self.inbox_main.iter() {
 
            if let Some(message) = message {
 
                self.consensus.handle_new_data_message(comp_ctx, message);
 
            }
 
        }
 
        debug_assert_eq!(self.mode, Mode::NonSync);
 
        self.mode = Mode::Sync;
 
    }
 

	
 
    /// Handles end of sync. The conclusion to the sync round might arise
 
    /// immediately (and be handled immediately), or might come later through
 
    /// messaging. In any case the component should be scheduled again
 
    /// immediately
 
    fn handle_sync_end(&mut self, sched_ctx: &SchedulerCtx, comp_ctx: &mut CompCtx) {
 
        sched_ctx.log("Component ending sync mode (now waiting for solution)");
 
        let decision = self.consensus.notify_sync_end(sched_ctx, comp_ctx);
 
        self.mode = Mode::SyncEnd;
 
        self.handle_sync_decision(sched_ctx, comp_ctx, decision);
 
    }
 

	
 
    /// Handles decision from the consensus round. This will cause a change in
 
    /// the internal `Mode`, such that the next call to `run` can take the
 
    /// appropriate next steps.
 
    fn handle_sync_decision(&mut self, sched_ctx: &SchedulerCtx, _comp_ctx: &mut CompCtx, decision: SyncRoundDecision) {
 
        sched_ctx.log(&format!("Handling sync decision: {:?} (in mode {:?})", decision, self.mode));
 
        let is_success = match decision {
 
            SyncRoundDecision::None => {
 
                // No decision yet
 
                return;
 
            },
 
            SyncRoundDecision::Solution => true,
 
            SyncRoundDecision::Failure => false,
 
        };
 

	
 
        // If here then we've reached a decision
 
        debug_assert_eq!(self.mode, Mode::SyncEnd);
 
        if is_success {
 
            self.mode = Mode::NonSync;
 
            self.consensus.notify_sync_decision(decision);
 
        } else {
 
            self.mode = Mode::StartExit;
 
        }
 
    }
 

	
 
    fn handle_component_exit(&mut self, sched_ctx: &SchedulerCtx, comp_ctx: &mut CompCtx) {
 
        sched_ctx.log("Component exiting");
 
        debug_assert_eq!(self.mode, Mode::StartExit);
 
        self.mode = Mode::BusyExit;
 

	
 
        // Doing this by index, then retrieving the handle is a bit rediculous,
 
        // but Rust is being Rust with its borrowing rules.
 
        for port_index in 0..comp_ctx.num_ports() {
 
            let port = comp_ctx.get_port_by_index_mut(port_index);
 
            if port.state == PortState::Closed {
 
                // Already closed, or in the process of being closed
 
                continue;
 
            }
 

	
 
            // Mark as closed
 
            let port_id = port.self_id;
 
            port.state = PortState::Closed;
 

	
 
            // Notify peer of closing
 
            let port_handle = comp_ctx.get_port_handle(port_id);
 
            let (peer, message) = self.control.initiate_port_closing(port_handle, comp_ctx);
 
            let peer_info = comp_ctx.get_peer(peer);
 
            peer_info.handle.send_message(sched_ctx, Message::Control(message), true);
 
        }
 
    }
 

	
 
    // -------------------------------------------------------------------------
 
    // Handling messages
 
    // -------------------------------------------------------------------------
 

	
 
    fn send_data_message_and_wake_up(&mut self, sched_ctx: &SchedulerCtx, comp_ctx: &CompCtx, source_port_handle: LocalPortHandle, value: ValueGroup) {
 
        let port_info = comp_ctx.get_port(source_port_handle);
 
        let peer_handle = comp_ctx.get_peer_handle(port_info.peer_comp_id);
 
        let peer_info = comp_ctx.get_peer(peer_handle);
 
        let annotated_message = self.consensus.annotate_data_message(comp_ctx, port_info, value);
 
        peer_info.handle.send_message(sched_ctx, Message::Data(annotated_message), true);
 
    }
 

	
 
    /// Handles a message that came in through the public inbox. This function
 
    /// will handle putting it in the correct place, and potentially blocking
 
    /// the port in case too many messages are being received.
 
    fn handle_incoming_data_message(&mut self, sched_ctx: &SchedulerCtx, comp_ctx: &mut CompCtx, message: DataMessage) {
 
        // Whatever we do, glean information from headers in message
 
        if self.mode.is_in_sync_block() {
 
            self.consensus.handle_new_data_message(comp_ctx, &message);
 
        }
 

	
 
        // Check if we can insert it directly into the storage associated with
 
        // the port
 
        let target_port_id = message.data_header.target_port;
 
        let port_handle = comp_ctx.get_port_handle(target_port_id);
 
        let port_index = comp_ctx.get_port_index(port_handle);
 
        if self.inbox_main[port_index].is_none() {
 
            self.inbox_main[port_index] = Some(message);
 

	
 
            // After direct insertion, check if this component's execution is 
 
            // blocked on receiving a message on that port
 
            debug_assert!(!comp_ctx.get_port(port_handle).state.is_blocked()); // because we could insert directly
 
            if self.mode == Mode::BlockedGet && self.mode_port == target_port_id {
 
                // We were indeed blocked
 
                self.mode = Mode::Sync;
 
                self.mode_port = PortId::new_invalid();
 
            } else if self.mode == Mode::BlockedSelect {
 
                let select_decision = self.select.handle_updated_inbox(&self.inbox_main, comp_ctx);
 
                if let SelectDecision::Case(case_index) = select_decision {
 
                    self.exec_ctx.stmt = ExecStmt::PerformedSelectWait(case_index);
 
                    self.mode = Mode::Sync;
 
                }
 
            }
 
            
 
            return;
 
        }
 

	
 
        // The direct inbox is full, so the port will become (or was already) blocked
 
        let port_info = comp_ctx.get_port_mut(port_handle);
 
        debug_assert!(port_info.state == PortState::Open || port_info.state.is_blocked());
 

	
 
        if port_info.state == PortState::Open {
 
            comp_ctx.set_port_state(port_handle, PortState::BlockedDueToFullBuffers);
 
            let (peer_handle, message) =
 
                self.control.initiate_port_blocking(comp_ctx, port_handle);
 

	
 
            let peer = comp_ctx.get_peer(peer_handle);
 
            peer.handle.send_message(sched_ctx, Message::Control(message), true);
 
        }
 

	
 
        // But we still need to remember the message, so:
 
        self.inbox_backup.push(message);
 
    }
 

	
 
    /// Handles when a message has been handed off from the inbox to the PDL
 
    /// code. We check to see if there are more messages waiting and, if not,
 
    /// then we handle the case where the port might have been blocked
 
    /// previously.
 
    fn handle_received_data_message(&mut self, sched_ctx: &SchedulerCtx, comp_ctx: &mut CompCtx, port_handle: LocalPortHandle) {
 
        let port_index = comp_ctx.get_port_index(port_handle);
 
        debug_assert!(self.inbox_main[port_index].is_none()); // this function should be called after the message is taken out
 

	
 
        // Check for any more messages
 
        let port_info = comp_ctx.get_port(port_handle);
 
        for message_index in 0..self.inbox_backup.len() {
 
            let message = &self.inbox_backup[message_index];
 
            if message.data_header.target_port == port_info.self_id {
 
                // One more message for this port
 
                let message = self.inbox_backup.remove(message_index);
 
                debug_assert!(comp_ctx.get_port(port_handle).state.is_blocked()); // since we had >1 message on the port
 
                self.inbox_main[port_index] = Some(message);
 

	
 
                return;
 
            }
 
        }
 

	
 
        // Did not have any more messages. So if we were blocked, then we need
 
        // to send the "unblock" message.
 
        if port_info.state == PortState::BlockedDueToFullBuffers {
 
            comp_ctx.set_port_state(port_handle, PortState::Open);
 
            let (peer_handle, message) = self.control.cancel_port_blocking(comp_ctx, port_handle);
 
            let peer_info = comp_ctx.get_peer(peer_handle);
 
            peer_info.handle.send_message(sched_ctx, Message::Control(message), true);
 
        }
 
    }
 

	
 
    fn handle_incoming_control_message(&mut self, sched_ctx: &SchedulerCtx, comp_ctx: &mut CompCtx, message: ControlMessage) {
 
        // Little local utility to send an Ack
 
        fn send_control_ack_message(sched_ctx: &SchedulerCtx, comp_ctx: &CompCtx, causer_id: ControlId, peer_handle: LocalPeerHandle) {
 
            let peer_info = comp_ctx.get_peer(peer_handle);
 
            peer_info.handle.send_message(sched_ctx, Message::Control(ControlMessage{
 
                id: causer_id,
 
                sender_comp_id: comp_ctx.id,
 
                target_port_id: None,
 
                content: ControlMessageContent::Ack,
 
            }), true);
 
        }
 

	
 
        // Handle the content of the control message, and optionally Ack it
 
        match message.content {
 
            ControlMessageContent::Ack => {
 
                self.handle_ack(sched_ctx, comp_ctx, message.id);
 
            },
 
            ControlMessageContent::BlockPort(port_id) => {
 
                // On of our messages was accepted, but the port should be
 
                // blocked.
 
                let port_handle = comp_ctx.get_port_handle(port_id);
 
                let port_info = comp_ctx.get_port(port_handle);
 
                debug_assert_eq!(port_info.kind, PortKind::Putter);
 
                if port_info.state == PortState::Open {
 
                    // only when open: we don't do this when closed, and we we don't do this if we're blocked due to peer changes
 
                    comp_ctx.set_port_state(port_handle, PortState::BlockedDueToFullBuffers);
 
                }
 
            },
 
            ControlMessageContent::ClosePort(port_id) => {
 
                // Request to close the port. We immediately comply and remove
 
                // the component handle as well
 
                let port_handle = comp_ctx.get_port_handle(port_id);
 
                let peer_comp_id = comp_ctx.get_port(port_handle).peer_comp_id;
 
                let peer_handle = comp_ctx.get_peer_handle(peer_comp_id);
 

	
 
                // One exception to sending an `Ack` is if we just closed the
 
                // port ourselves, meaning that the `ClosePort` messages got
 
                // sent to one another.
 
                if let Some(control_id) = self.control.has_close_port_entry(port_handle, comp_ctx) {
 
                    self.handle_ack(sched_ctx, comp_ctx, control_id);
 
                } else {
 
                    send_control_ack_message(sched_ctx, comp_ctx, message.id, peer_handle);
 
                    comp_ctx.remove_peer(sched_ctx, port_handle, peer_comp_id, false); // do not remove if closed
 
                    comp_ctx.set_port_state(port_handle, PortState::Closed); // now set to closed
 
                }
 
            },
 
            ControlMessageContent::UnblockPort(port_id) => {
 
                // We were previously blocked (or already closed)
 
                let port_handle = comp_ctx.get_port_handle(port_id);
 
                let port_info = comp_ctx.get_port(port_handle);
 
                debug_assert_eq!(port_info.kind, PortKind::Putter);
 
                if port_info.state == PortState::BlockedDueToFullBuffers {
 
                    self.handle_unblock_port_instruction(sched_ctx, comp_ctx, port_handle);
 
                }
 
            },
 
            ControlMessageContent::PortPeerChangedBlock(port_id) => {
 
                // The peer of our port has just changed. So we are asked to
 
                // temporarily block the port (while our original recipient is
 
                // potentially rerouting some of the in-flight messages) and
 
                // Ack. Then we wait for the `unblock` call.
 
                debug_assert_eq!(message.target_port_id, Some(port_id));
 
                let port_handle = comp_ctx.get_port_handle(port_id);
 
                comp_ctx.set_port_state(port_handle, PortState::BlockedDueToPeerChange);
 

	
 
                let port_info = comp_ctx.get_port(port_handle);
 
                let peer_handle = comp_ctx.get_peer_handle(port_info.peer_comp_id);
 

	
 
                send_control_ack_message(sched_ctx, comp_ctx, message.id, peer_handle);
 
            },
 
            ControlMessageContent::PortPeerChangedUnblock(new_port_id, new_comp_id) => {
 
                let port_handle = comp_ctx.get_port_handle(message.target_port_id.unwrap());
 
                let port_info = comp_ctx.get_port(port_handle);
 
                debug_assert!(port_info.state == PortState::BlockedDueToPeerChange);
 
                let old_peer_id = port_info.peer_comp_id;
 

	
 
                comp_ctx.remove_peer(sched_ctx, port_handle, old_peer_id, false);
 

	
 
                let port_info = comp_ctx.get_port_mut(port_handle);
 
                port_info.peer_comp_id = new_comp_id;
 
                port_info.peer_port_id = new_port_id;
 
                comp_ctx.add_peer(port_handle, sched_ctx, new_comp_id, None);
 
                self.handle_unblock_port_instruction(sched_ctx, comp_ctx, port_handle);
 
            }
 
        }
 
    }
 

	
 
    fn handle_incoming_sync_message(&mut self, sched_ctx: &SchedulerCtx, comp_ctx: &mut CompCtx, message: SyncMessage) {
 
        let decision = self.consensus.receive_sync_message(sched_ctx, comp_ctx, message);
 
        self.handle_sync_decision(sched_ctx, comp_ctx, decision);
 
    }
 

	
 
    /// Little helper that notifies the control layer of an `Ack`, and takes the
 
    /// appropriate subsequent action
 
    fn handle_ack(&mut self, sched_ctx: &SchedulerCtx, comp_ctx: &mut CompCtx, control_id: ControlId) {
 
        let mut to_ack = control_id;
 
        loop {
 
            let (action, new_to_ack) = self.control.handle_ack(to_ack, sched_ctx, comp_ctx);
 
            match action {
 
                AckAction::SendMessage(target_comp, message) => {
 
                    // FIX @NoDirectHandle
 
                    let mut handle = sched_ctx.runtime.get_component_public(target_comp);
 
                    handle.send_message(sched_ctx, Message::Control(message), true);
 
                    let _should_remove = handle.decrement_users();
 
                    debug_assert!(_should_remove.is_none());
 
                },
 
                AckAction::ScheduleComponent(to_schedule) => {
 
                    // FIX @NoDirectHandle
 
                    let mut handle = sched_ctx.runtime.get_component_public(to_schedule);
 

	
 
                    // Note that the component is intentionally not
 
                    // sleeping, so we just wake it up
 
                    debug_assert!(!handle.sleeping.load(std::sync::atomic::Ordering::Acquire));
 
                    let key = unsafe{ to_schedule.upgrade() };
 
                    sched_ctx.runtime.enqueue_work(key);
 
                    let _should_remove = handle.decrement_users();
 
                    debug_assert!(_should_remove.is_none());
 
                },
 
                AckAction::None => {}
 
            }
 

	
 
            match new_to_ack {
 
                Some(new_to_ack) => to_ack = new_to_ack,
 
                None => break,
 
            }
 
        }
 
    }
 

	
 
    // -------------------------------------------------------------------------
 
    // Handling ports
 
    // -------------------------------------------------------------------------
 

	
 
    /// Unblocks a port, potentially continuing execution of the component, in
 
    /// response to a message that told us to unblock a previously blocked
 
    fn handle_unblock_port_instruction(&mut self, sched_ctx: &SchedulerCtx, comp_ctx: &mut CompCtx, port_handle: LocalPortHandle) {
 
        let port_info = comp_ctx.get_port_mut(port_handle);
 
        let port_id = port_info.self_id;
 
        debug_assert!(port_info.state.is_blocked());
 
        port_info.state = PortState::Open;
 

	
 
        if self.mode == Mode::BlockedPut && port_id == self.mode_port {
 
            // We were blocked on the port that just became unblocked, so
 
            // send the message.
 
            debug_assert_eq!(port_info.kind, PortKind::Putter);
 
            let mut replacement = ValueGroup::default();
 
            std::mem::swap(&mut replacement, &mut self.mode_value);
 
            self.send_data_message_and_wake_up(sched_ctx, comp_ctx, port_handle, replacement);
 

	
 
            self.mode = Mode::Sync;
 
            self.mode_port = PortId::new_invalid();
 
        }
 
    }
 

	
 
    fn create_component_and_transfer_ports(
 
        &mut self,
 
        sched_ctx: &SchedulerCtx, creator_ctx: &mut CompCtx,
 
        definition_id: ProcedureDefinitionId, type_id: TypeId, mut arguments: ValueGroup
 
    ) {
 
        struct PortPair{
 
            creator_handle: LocalPortHandle,
 
            creator_id: PortId,
 
            created_handle: LocalPortHandle,
 
            created_id: PortId,
 
        }
 
        let mut port_id_pairs = Vec::new();
 

	
 
        let reservation = sched_ctx.runtime.start_create_pdl_component();
 
        let mut created_ctx = CompCtx::new(&reservation);
 

	
 
        // Take all the ports ID that are in the `args` (and currently belong to
 
        // the creator component) and translate them into new IDs that are
 
        // associated with the component we're about to create
 
        let mut arg_iter = ValueGroupIter::new(&mut arguments);
 
        let mut arg_iter = ValueGroupPortIter::new(&mut arguments);
 
        while let Some(port_reference) = arg_iter.next() {
 
            // Create port entry for new component
 
            let creator_port_id = port_reference.id;
 
            let creator_port_handle = creator_ctx.get_port_handle(creator_port_id);
 
            let creator_port = creator_ctx.get_port(creator_port_handle);
 
            let created_port_handle = created_ctx.add_port(
 
                creator_port.peer_comp_id, creator_port.peer_port_id,
 
                creator_port.kind, creator_port.state
 
            );
 
            let created_port = created_ctx.get_port(created_port_handle);
 
            let created_port_id = created_port.self_id;
 

	
 
            port_id_pairs.push(PortPair{
 
                creator_handle: creator_port_handle,
 
                creator_id: creator_port_id,
 
                created_handle: created_port_handle,
 
                created_id: created_port_id,
 
            });
 

	
 
            // Modify value in arguments (bit dirty, but double vec in ValueGroup causes lifetime issues)
 
            let arg_value = if let Some(heap_pos) = port_reference.heap_pos {
 
                &mut arg_iter.group.regions[heap_pos][port_reference.index]
 
            } else {
 
                &mut arg_iter.group.values[port_reference.index]
 
            };
 
            match arg_value {
 
                Value::Input(id) => *id = port_id_to_eval(created_port_id),
 
                Value::Output(id) => *id = port_id_to_eval(created_port_id),
 
                _ => unreachable!(),
 
            }
 
        }
 

	
 
        // For each transferred port pair set their peer components to the
 
        // correct values. This will only change the values for the ports of
 
        // the new component.
 
        let mut created_component_has_remote_peers = false;
 

	
 
        for pair in port_id_pairs.iter() {
 
            let creator_port_info = creator_ctx.get_port(pair.creator_handle);
 
            let created_port_info = created_ctx.get_port_mut(pair.created_handle);
 

	
 
            if created_port_info.peer_comp_id == creator_ctx.id {
 
                // Port peer is owned by the creator as well
 
                let created_peer_port_index = port_id_pairs
 
                    .iter()
 
                    .position(|v| v.creator_id == creator_port_info.peer_port_id);
 
                match created_peer_port_index {
 
                    Some(created_peer_port_index) => {
 
                        // Peer port moved to the new component as well. So
 
                        // adjust IDs appropriately.
 
                        let peer_pair = &port_id_pairs[created_peer_port_index];
 
                        created_port_info.peer_port_id = peer_pair.created_id;
 
                        created_port_info.peer_comp_id = reservation.id();
 
                        todo!("either add 'self peer', or remove that idea from Ctx altogether")
 
                    },
 
                    None => {
 
                        // Peer port remains with creator component.
 
                        created_port_info.peer_comp_id = creator_ctx.id;
 
                        created_ctx.add_peer(pair.created_handle, sched_ctx, creator_ctx.id, None);
 
                    }
 
                }
 
            } else {
 
                // Peer is a different component. We'll deal with sending the
 
                // appropriate messages later
 
                let peer_handle = creator_ctx.get_peer_handle(created_port_info.peer_comp_id);
 
                let peer_info = creator_ctx.get_peer(peer_handle);
 
                created_ctx.add_peer(pair.created_handle, sched_ctx, peer_info.id, Some(&peer_info.handle));
 
                created_component_has_remote_peers = true;
 
            }
 
        }
 

	
 
        // We'll now actually turn our reservation for a new component into an
 
        // actual component. Note that we initialize it as "not sleeping" as
 
        // its initial scheduling might be performed based on `Ack`s in response
 
        // to message exchanges between remote peers.
 
        let prompt = Prompt::new(
 
            &sched_ctx.runtime.protocol.types, &sched_ctx.runtime.protocol.heap,
 
            definition_id, type_id, arguments,
 
        );
 
        let component = CompPDL::new(prompt, port_id_pairs.len());
 
        let (created_key, component) = sched_ctx.runtime.finish_create_pdl_component(
 
            reservation, component, created_ctx, false,
 
        );
 

	
 
        // Now modify the creator's ports: remove every transferred port and
 
        // potentially remove the peer component.
 
        for pair in port_id_pairs.iter() {
 
            // Remove peer if appropriate
 
            let creator_port_info = creator_ctx.get_port(pair.creator_handle);
 
            let creator_port_index = creator_ctx.get_port_index(pair.creator_handle);
 
            let creator_peer_comp_id = creator_port_info.peer_comp_id;
 
            creator_ctx.remove_peer(sched_ctx, pair.creator_handle, creator_peer_comp_id, false);
 
            creator_ctx.remove_port(pair.creator_handle);
 

	
 
            // Transfer any messages
 
            if let Some(mut message) = self.inbox_main.remove(creator_port_index) {
 
                message.data_header.target_port = pair.created_id;
 
                component.component.adopt_message(&mut component.ctx, message)
 
            }
 

	
 
            let mut message_index = 0;
 
            while message_index < self.inbox_backup.len() {
 
                let message = &self.inbox_backup[message_index];
 
                if message.data_header.target_port == pair.creator_id {
 
                    // transfer message
 
                    let mut message = self.inbox_backup.remove(message_index);
 
                    message.data_header.target_port = pair.created_id;
 
                    component.component.adopt_message(&mut component.ctx, message);
 
                } else {
 
                    message_index += 1;
 
                }
 
            }
 

	
 
            // Handle potential channel between creator and created component
 
            let created_port_info = component.ctx.get_port(pair.created_handle);
 

	
 
            if created_port_info.peer_comp_id == creator_ctx.id {
 
                let peer_port_handle = creator_ctx.get_port_handle(created_port_info.peer_port_id);
 
                let peer_port_info = creator_ctx.get_port_mut(peer_port_handle);
 
                peer_port_info.peer_comp_id = component.ctx.id;
 
                peer_port_info.peer_port_id = created_port_info.self_id;
 
                creator_ctx.add_peer(peer_port_handle, sched_ctx, component.ctx.id, None);
 
            }
 
        }
 

	
 
        // By now all ports and messages have been transferred. If there are any
 
        // peers that need to be notified about this new component, then we
 
        // initiate the protocol that will notify everyone here.
 
        if created_component_has_remote_peers {
 
            let created_ctx = &component.ctx;
 
            let schedule_entry_id = self.control.add_schedule_entry(created_ctx.id);
 
            for pair in port_id_pairs.iter() {
 
                let port_info = created_ctx.get_port(pair.created_handle);
 
                if port_info.peer_comp_id != creator_ctx.id && port_info.peer_comp_id != created_ctx.id {
 
                    let message = self.control.add_reroute_entry(
 
                        creator_ctx.id, port_info.peer_port_id, port_info.peer_comp_id,
 
                        pair.creator_id, pair.created_id, created_ctx.id,
 
                        schedule_entry_id
 
                    );
 
                    let peer_handle = created_ctx.get_peer_handle(port_info.peer_comp_id);
 
                    let peer_info = created_ctx.get_peer(peer_handle);
 
                    peer_info.handle.send_message(sched_ctx, message, true);
 
                }
 
            }
 
        } else {
 
            // Peer can be scheduled immediately
 
            sched_ctx.runtime.enqueue_work(created_key);
 
        }
 
    }
 
}
 

	
 
#[inline]
 
fn port_id_from_eval(port_id: EvalPortId) -> PortId {
 
    return PortId(port_id.id);
 
}
 

	
 
#[inline]
 
fn port_id_to_eval(port_id: PortId) -> EvalPortId {
 
    return EvalPortId{ id: port_id.0 };
 
}
 

	
 
/// Recursively goes through the value group, attempting to find ports.
 
/// Duplicates will only be added once.
 
pub(crate) fn find_ports_in_value_group(value_group: &ValueGroup, ports: &mut Vec<PortId>) {
 
    // Helper to check a value for a port and recurse if needed.
 
    fn find_port_in_value(group: &ValueGroup, value: &Value, ports: &mut Vec<PortId>) {
 
        match value {
 
            Value::Input(port_id) | Value::Output(port_id) => {
 
                // This is an actual port
 
                let cur_port = PortId(port_id.id);
 
                for prev_port in ports.iter() {
 
                    if *prev_port == cur_port {
 
                        // Already added
 
                        return;
 
                    }
 
                }
 

	
 
                ports.push(cur_port);
 
            },
 
            Value::Array(heap_pos) |
 
            Value::Message(heap_pos) |
 
            Value::String(heap_pos) |
 
            Value::Struct(heap_pos) |
 
            Value::Union(_, heap_pos) => {
 
                // Reference to some dynamic thing which might contain ports,
 
                // so recurse
 
                let heap_region = &group.regions[*heap_pos as usize];
 
                for embedded_value in heap_region {
 
                    find_port_in_value(group, embedded_value, ports);
 
                }
 
            },
 
            _ => {}, // values we don't care about
 
        }
 
    }
 

	
 
    // Clear the ports, then scan all the available values
 
    ports.clear();
 
    for value in &value_group.values {
 
        find_port_in_value(value_group, value, ports);
 
    }
 
}
 

	
 
struct ValueGroupIter<'a> {
 
struct ValueGroupPortIter<'a> {
 
    group: &'a mut ValueGroup,
 
    heap_stack: Vec<(usize, usize)>,
 
    index: usize,
 
}
 

	
 
impl<'a> ValueGroupIter<'a> {
 
impl<'a> ValueGroupPortIter<'a> {
 
    fn new(group: &'a mut ValueGroup) -> Self {
 
        return Self{ group, heap_stack: Vec::new(), index: 0 }
 
    }
 
}
 

	
 
struct ValueGroupPortRef {
 
    id: PortId,
 
    heap_pos: Option<usize>, // otherwise: on stack
 
    index: usize,
 
}
 

	
 
impl<'a> Iterator for ValueGroupIter<'a> {
 
impl<'a> Iterator for ValueGroupPortIter<'a> {
 
    type Item = ValueGroupPortRef;
 

	
 
    fn next(&mut self) -> Option<Self::Item> {
 
        // Enter loop that keeps iterating until a port is found
 
        loop {
 
            if let Some(pos) = self.heap_stack.last() {
 
                let (heap_pos, region_index) = *pos;
 
                if region_index >= self.group.regions[heap_pos].len() {
 
                    self.heap_stack.pop();
 
                    continue;
 
                }
 

	
 
                let value = &self.group.regions[heap_pos][region_index];
 
                self.heap_stack.last_mut().unwrap().1 += 1;
 

	
 
                match value {
 
                    Value::Input(id) | Value::Output(id) => {
 
                        let id = PortId(id.id);
 
                        return Some(ValueGroupPortRef{
 
                            id,
 
                            heap_pos: Some(heap_pos),
 
                            index: region_index,
 
                        });
 
                    },
 
                    _ => {},
 
                }
 

	
 
                if let Some(heap_pos) = value.get_heap_pos() {
 
                    self.heap_stack.push((heap_pos as usize, 0));
 
                }
 
            } else {
 
                if self.index >= self.group.values.len() {
 
                    return None;
 
                }
 

	
 
                let value = &mut self.group.values[self.index];
 
                self.index += 1;
 

	
 
                match value {
 
                    Value::Input(id) | Value::Output(id) => {
 
                        let id = PortId(id.id);
 
                        return Some(ValueGroupPortRef{
 
                            id,
 
                            heap_pos: None,
 
                            index: self.index - 1
 
                        });
 
                    },
 
                    _ => {},
 
                }
 

	
 
                // Not a port, check if we need to enter a heap region
 
                if let Some(heap_pos) = value.get_heap_pos() {
 
                    self.heap_stack.push((heap_pos as usize, 0));
 
                } // else: just consider the next value
 
            }
 
        }
 
    }
 
}
 
\ No newline at end of file
std/std.global.pdl
Show inline comments
 
new file 100644
 
#module std.global
 

	
 
func get<T>(in<T>) -> T #builtin
 
func put<T>(out<T>, T value) -> #type_void #builtin
 
func fires<T>(#type_portlike<T>) -> bool #builtin
 
func create<T>(#type_integerlike length) -> T[] #builtin
 
func length<T>(#type_arraylike<T> array) -> u32 #builtin
 
func assert(bool condition) -> #type_void #builtin
 
func print(string message) -> #type_void #builtin
 
\ No newline at end of file
0 comments (0 inline, 0 general)