Changeset - f7c20c8ac012
[Not reviewed]
0 12 0
MH - 4 years ago 2021-04-02 19:04:34
contact@maxhenger.nl
WIP on enum inference and monomorph testing
8 files changed:
0 comments (0 inline, 0 general)
src/protocol/ast.rs
Show inline comments
 
@@ -882,431 +882,440 @@ pub enum ParserTypeVariant {
 
    Input(ParserTypeId), // typed input endpoint of a channel
 
    Output(ParserTypeId), // typed output endpoint of a channel
 
    Symbolic(SymbolicParserType), // symbolic type (definition or polyarg)
 
}
 

	
 
impl ParserTypeVariant {
 
    pub(crate) fn supports_polymorphic_args(&self) -> bool {
 
        use ParserTypeVariant::*;
 
        match self {
 
            Message | Bool | Byte | Short | Int | Long | String | IntegerLiteral | Inferred => false,
 
            _ => true
 
        }
 
    }
 
}
 

	
 
/// ParserType is a specification of a type during the parsing phase and initial
 
/// linker/validator phase of the compilation process. These types may be
 
/// (partially) inferred or represent literals (e.g. a integer whose bytesize is
 
/// not yet determined).
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct ParserType {
 
    pub this: ParserTypeId,
 
    pub pos: InputPosition,
 
    pub variant: ParserTypeVariant,
 
}
 

	
 
/// SymbolicParserType is the specification of a symbolic type. During the
 
/// parsing phase we will only store the identifier of the type. During the
 
/// validation phase we will determine whether it refers to a user-defined type,
 
/// or a polymorphic argument. After the validation phase it may still be the
 
/// case that the resulting `variant` will not pass the typechecker.
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct SymbolicParserType {
 
    // Phase 1: parser
 
    pub identifier: NamespacedIdentifier,
 
    // Phase 2: validation/linking (for types in function/component bodies) and
 
    //  type table construction (for embedded types of structs/unions)
 
    pub poly_args2: Vec<ParserTypeId>, // taken from identifier or inferred
 
    pub variant: Option<SymbolicParserTypeVariant>
 
}
 

	
 
/// Specifies whether the symbolic type points to an actual user-defined type,
 
/// or whether it points to a polymorphic argument within the definition (e.g.
 
/// a defined variable `T var` within a function `int func<T>()`
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum SymbolicParserTypeVariant {
 
    Definition(DefinitionId),
 
    // TODO: figure out if I need the DefinitionId here
 
    PolyArg(DefinitionId, usize), // index of polyarg in the definition
 
}
 

	
 
/// ConcreteType is the representation of a type after resolving symbolic types
 
/// and performing type inference
 
#[derive(Debug, Clone, Copy, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
 
pub enum ConcreteTypePart {
 
    // Markers for the use of polymorphic types within a procedure's body that
 
    // refer to polymorphic variables on the procedure's definition. Different
 
    // from markers in the `InferenceType`, these will not contain nested types.
 
    Marker(usize),
 
    // Special types (cannot be explicitly constructed by the programmer)
 
    Void,
 
    // Builtin types without nested types
 
    Message,
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    // Builtin types with one nested type
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // User defined type with any number of nested types
 
    Instance(DefinitionId, usize),
 
}
 

	
 
#[derive(Debug, Clone, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
 
pub struct ConcreteType {
 
    pub(crate) parts: Vec<ConcreteTypePart>
 
}
 

	
 
impl Default for ConcreteType {
 
    fn default() -> Self {
 
        Self{ parts: Vec::new() }
 
    }
 
}
 

	
 
impl ConcreteType {
 
    pub(crate) fn has_marker(&self) -> bool {
 
        self.parts
 
            .iter()
 
            .any(|p| {
 
                if let ConcreteTypePart::Marker(_) = p { true } else { false }
 
            })
 
    }
 
}
 

	
 
// TODO: Remove at some point
 
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
 
pub enum PrimitiveType {
 
    Unassigned,
 
    Input,
 
    Output,
 
    Message,
 
    Boolean,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
 
pub struct Type {
 
    pub primitive: PrimitiveType,
 
    pub array: bool,
 
}
 

	
 
#[allow(dead_code)]
 
impl Type {
 
    pub const UNASSIGNED: Type = Type { primitive: PrimitiveType::Unassigned, array: false };
 

	
 
    pub const INPUT: Type = Type { primitive: PrimitiveType::Input, array: false };
 
    pub const OUTPUT: Type = Type { primitive: PrimitiveType::Output, array: false };
 
    pub const MESSAGE: Type = Type { primitive: PrimitiveType::Message, array: false };
 
    pub const BOOLEAN: Type = Type { primitive: PrimitiveType::Boolean, array: false };
 
    pub const BYTE: Type = Type { primitive: PrimitiveType::Byte, array: false };
 
    pub const SHORT: Type = Type { primitive: PrimitiveType::Short, array: false };
 
    pub const INT: Type = Type { primitive: PrimitiveType::Int, array: false };
 
    pub const LONG: Type = Type { primitive: PrimitiveType::Long, array: false };
 

	
 
    pub const INPUT_ARRAY: Type = Type { primitive: PrimitiveType::Input, array: true };
 
    pub const OUTPUT_ARRAY: Type = Type { primitive: PrimitiveType::Output, array: true };
 
    pub const MESSAGE_ARRAY: Type = Type { primitive: PrimitiveType::Message, array: true };
 
    pub const BOOLEAN_ARRAY: Type = Type { primitive: PrimitiveType::Boolean, array: true };
 
    pub const BYTE_ARRAY: Type = Type { primitive: PrimitiveType::Byte, array: true };
 
    pub const SHORT_ARRAY: Type = Type { primitive: PrimitiveType::Short, array: true };
 
    pub const INT_ARRAY: Type = Type { primitive: PrimitiveType::Int, array: true };
 
    pub const LONG_ARRAY: Type = Type { primitive: PrimitiveType::Long, array: true };
 
}
 

	
 
impl Display for Type {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        match &self.primitive {
 
            PrimitiveType::Unassigned => {
 
                write!(f, "unassigned")?;
 
            }
 
            PrimitiveType::Input => {
 
                write!(f, "in")?;
 
            }
 
            PrimitiveType::Output => {
 
                write!(f, "out")?;
 
            }
 
            PrimitiveType::Message => {
 
                write!(f, "msg")?;
 
            }
 
            PrimitiveType::Boolean => {
 
                write!(f, "boolean")?;
 
            }
 
            PrimitiveType::Byte => {
 
                write!(f, "byte")?;
 
            }
 
            PrimitiveType::Short => {
 
                write!(f, "short")?;
 
            }
 
            PrimitiveType::Int => {
 
                write!(f, "int")?;
 
            }
 
            PrimitiveType::Long => {
 
                write!(f, "long")?;
 
            }
 
        }
 
        if self.array {
 
            write!(f, "[]")
 
        } else {
 
            Ok(())
 
        }
 
    }
 
}
 

	
 
type LiteralCharacter = Vec<u8>;
 
type LiteralInteger = i64; // TODO: @int_literal
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Literal {
 
    Null, // message
 
    True,
 
    False,
 
    Character(LiteralCharacter),
 
    Integer(LiteralInteger),
 
    Struct(LiteralStruct),
 
    Enum(LiteralEnum),
 
}
 

	
 
impl Literal {
 
    pub(crate) fn as_struct(&self) -> &LiteralStruct {
 
        if let Literal::Struct(literal) = self{
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Struct", self)
 
        }
 
    }
 

	
 
    pub(crate) fn as_struct_mut(&mut self) -> &mut LiteralStruct {
 
        if let Literal::Struct(literal) = self{
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Struct", self)
 
        }
 
    }
 

	
 
    pub(crate) fn as_enum(&self) -> &LiteralEnum {
 
        if let Literal::Enum(literal) = self {
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Enum", self)
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct LiteralStructField {
 
    // Phase 1: parser
 
    pub(crate) identifier: Identifier,
 
    pub(crate) value: ExpressionId,
 
    // Phase 2: linker
 
    pub(crate) field_idx: usize, // in struct definition
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct LiteralStruct {
 
    // Phase 1: parser
 
    pub(crate) identifier: NamespacedIdentifier,
 
    pub(crate) fields: Vec<LiteralStructField>,
 
    // Phase 2: linker
 
    pub(crate) poly_args2: Vec<ParserTypeId>, // taken from identifier
 
    pub(crate) poly_args2: Vec<ParserTypeId>, // taken from identifier once linked to a definition
 
    pub(crate) definition: Option<DefinitionId>
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct LiteralEnum {
 
    // Phase 1: parser
 
    pub(crate) identifier: NamespacedIdentifier,
 
    // Phase 2: linker
 
    pub(crate) poly_args2: Vec<ParserTypeId>,
 
    pub(crate) poly_args2: Vec<ParserTypeId>, // taken from identifier once linked to a definition
 
    pub(crate) definition: Option<DefinitionId>,
 
    pub(crate) variant_idx: usize,
 
    pub(crate) variant_idx: usize, // as present in the type table
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Method {
 
    Get,
 
    Put,
 
    Fires,
 
    Create,
 
    Symbolic(MethodSymbolic)
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct MethodSymbolic {
 
    pub(crate) identifier: NamespacedIdentifier,
 
    pub(crate) definition: Option<DefinitionId>
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Field {
 
    Length,
 
    Symbolic(FieldSymbolic),
 
}
 
impl Field {
 
    pub fn is_length(&self) -> bool {
 
        match self {
 
            Field::Length => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    pub fn as_symbolic(&self) -> &FieldSymbolic {
 
        match self {
 
            Field::Symbolic(v) => v,
 
            _ => unreachable!("attempted to get Field::Symbolic from {:?}", self)
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct FieldSymbolic {
 
    // Phase 1: Parser
 
    pub(crate) identifier: Identifier,
 
    // Phase 3: Typing
 
    pub(crate) definition: Option<DefinitionId>,
 
    pub(crate) field_idx: usize,
 
}
 

	
 
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
 
pub enum Scope {
 
    Definition(DefinitionId),
 
    Regular(BlockStatementId),
 
    Synchronous((SynchronousStatementId, BlockStatementId)),
 
}
 

	
 
impl Scope {
 
    pub fn is_block(&self) -> bool {
 
        match &self {
 
            Scope::Definition(_) => false,
 
            Scope::Regular(_) => true,
 
            Scope::Synchronous(_) => true,
 
        }
 
    }
 
    pub fn to_block(&self) -> BlockStatementId {
 
        match &self {
 
            Scope::Regular(id) => *id,
 
            Scope::Synchronous((_, id)) => *id,
 
            _ => panic!("unable to get BlockStatement from Scope")
 
        }
 
    }
 
}
 

	
 
pub trait VariableScope {
 
    fn parent_scope(&self, h: &Heap) -> Option<Scope>;
 
    fn get_variable(&self, h: &Heap, id: &Identifier) -> Option<VariableId>;
 
}
 

	
 
impl VariableScope for Scope {
 
    fn parent_scope(&self, h: &Heap) -> Option<Scope> {
 
        match self {
 
            Scope::Definition(def) => h[*def].parent_scope(h),
 
            Scope::Regular(stmt) => h[*stmt].parent_scope(h),
 
            Scope::Synchronous((stmt, _)) => h[*stmt].parent_scope(h),
 
        }
 
    }
 
    fn get_variable(&self, h: &Heap, id: &Identifier) -> Option<VariableId> {
 
        match self {
 
            Scope::Definition(def) => h[*def].get_variable(h, id),
 
            Scope::Regular(stmt) => h[*stmt].get_variable(h, id),
 
            Scope::Synchronous((stmt, _)) => h[*stmt].get_variable(h, id),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Variable {
 
    Parameter(Parameter),
 
    Local(Local),
 
}
 

	
 
impl Variable {
 
    pub fn identifier(&self) -> &Identifier {
 
        match self {
 
            Variable::Parameter(var) => &var.identifier,
 
            Variable::Local(var) => &var.identifier,
 
        }
 
    }
 
    pub fn is_parameter(&self) -> bool {
 
        match self {
 
            Variable::Parameter(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub fn as_parameter(&self) -> &Parameter {
 
        match self {
 
            Variable::Parameter(result) => result,
 
            _ => panic!("Unable to cast `Variable` to `Parameter`"),
 
        }
 
    }
 
    pub fn as_local(&self) -> &Local {
 
        match self {
 
            Variable::Local(result) => result,
 
            _ => panic!("Unable to cast `Variable` to `Local`"),
 
        }
 
    }
 
    pub fn as_local_mut(&mut self) -> &mut Local {
 
        match self {
 
            Variable::Local(result) => result,
 
            _ => panic!("Unable to cast 'Variable' to 'Local'"),
 
        }
 
    }
 
}
 

	
 
impl SyntaxElement for Variable {
 
    fn position(&self) -> InputPosition {
 
        match self {
 
            Variable::Parameter(decl) => decl.position(),
 
            Variable::Local(decl) => decl.position(),
 
        }
 
    }
 
}
 

	
 
/// TODO: Remove distinction between parameter/local and add an enum to indicate
 
///     the distinction between the two
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct Parameter {
 
    pub this: ParameterId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub parser_type: ParserTypeId,
 
    pub identifier: Identifier,
 
}
 

	
 
impl SyntaxElement for Parameter {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct Local {
 
    pub this: LocalId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub parser_type: ParserTypeId,
 
    pub identifier: Identifier,
 
    // Phase 2: linker
 
    pub relative_pos_in_block: u32,
 
}
 
impl SyntaxElement for Local {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Definition {
 
    Struct(StructDefinition),
 
    Enum(EnumDefinition),
 
    Component(Component),
 
    Function(Function),
 
}
 

	
 
impl Definition {
 
    pub fn is_struct(&self) -> bool {
 
        match self {
 
            Definition::Struct(_) => true,
 
            _ => false
 
        }
 
    }
 
    pub fn as_struct(&self) -> &StructDefinition {
 
        match self {
 
            Definition::Struct(result) => result,
src/protocol/ast_printer.rs
Show inline comments
 
@@ -87,753 +87,780 @@ impl<'a> KV<'a> {
 
        self
 
    }
 

	
 
    fn with_s_val(self, val: &str) -> Self {
 
        self.temp_val.push_str(val);
 
        self
 
    }
 

	
 
    fn with_disp_val<D: Display>(self, val: &D) -> Self {
 
        self.temp_val.push_str(&format!("{}", val));
 
        self
 
    }
 

	
 
    fn with_debug_val<D: Debug>(self, val: &D) -> Self {
 
        self.temp_val.push_str(&format!("{:?}", val));
 
        self
 
    }
 

	
 
    fn with_ascii_val(self, val: &[u8]) -> Self {
 
        self.temp_val.push_str(&*String::from_utf8_lossy(val));
 
        self
 
    }
 

	
 
    fn with_opt_disp_val<D: Display>(self, val: Option<&D>) -> Self {
 
        match val {
 
            Some(v) => { self.temp_val.push_str(&format!("Some({})", v)); },
 
            None => { self.temp_val.push_str("None"); }
 
        }
 
        self
 
    }
 

	
 
    fn with_opt_ascii_val(self, val: Option<&[u8]>) -> Self {
 
        match val {
 
            Some(v) => {
 
                self.temp_val.push_str("Some(");
 
                self.temp_val.push_str(&*String::from_utf8_lossy(v));
 
                self.temp_val.push(')');
 
            },
 
            None => {
 
                self.temp_val.push_str("None");
 
            }
 
        }
 
        self
 
    }
 

	
 
    fn with_custom_val<F: Fn(&mut String)>(mut self, val_fn: F) -> Self {
 
        val_fn(&mut self.temp_val);
 
        self
 
    }
 
}
 

	
 
impl<'a> Drop for KV<'a> {
 
    fn drop(&mut self) {
 
        // Prefix and indent
 
        if let Some((prefix, id)) = &self.prefix {
 
            self.buffer.push_str(&format!("{}[{:04}]", prefix, id));
 
        } else {
 
            self.buffer.push_str("           ");
 
        }
 

	
 
        for _ in 0..self.indent * INDENT {
 
            self.buffer.push(' ');
 
        }
 

	
 
        // Leading dash
 
        self.buffer.push_str("- ");
 

	
 
        // Key and value
 
        self.buffer.push_str(self.temp_key);
 
        if self.temp_val.is_empty() {
 
            self.buffer.push(':');
 
        } else {
 
            self.buffer.push_str(": ");
 
            self.buffer.push_str(&self.temp_val);
 
        }
 
        self.buffer.push('\n');
 
    }
 
}
 

	
 
pub(crate) struct ASTWriter {
 
    cur_definition: Option<DefinitionId>,
 
    buffer: String,
 
    temp1: String,
 
    temp2: String,
 
}
 

	
 
impl ASTWriter {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            cur_definition: None,
 
            buffer: String::with_capacity(4096),
 
            temp1: String::with_capacity(256),
 
            temp2: String::with_capacity(256),
 
        }
 
    }
 
    pub(crate) fn write_ast<W: IOWrite>(&mut self, w: &mut W, heap: &Heap) {
 
        for root_id in heap.protocol_descriptions.iter().map(|v| v.this) {
 
            self.write_module(heap, root_id);
 
            w.write_all(self.buffer.as_bytes()).expect("flush buffer");
 
            self.buffer.clear();
 
        }
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Top-level module writing
 
    //--------------------------------------------------------------------------
 

	
 
    fn write_module(&mut self, heap: &Heap, root_id: RootId) {
 
        self.kv(0).with_id(PREFIX_ROOT_ID, root_id.index)
 
            .with_s_key("Module");
 

	
 
        let root = &heap[root_id];
 
        self.kv(1).with_s_key("Pragmas");
 
        for pragma_id in &root.pragmas {
 
            self.write_pragma(heap, *pragma_id, 2);
 
        }
 

	
 
        self.kv(1).with_s_key("Imports");
 
        for import_id in &root.imports {
 
            self.write_import(heap, *import_id, 2);
 
        }
 

	
 
        self.kv(1).with_s_key("Definitions");
 
        for def_id in &root.definitions {
 
            self.write_definition(heap, *def_id, 2);
 
        }
 
    }
 

	
 
    fn write_pragma(&mut self, heap: &Heap, pragma_id: PragmaId, indent: usize) {
 
        match &heap[pragma_id] {
 
            Pragma::Version(pragma) => {
 
                self.kv(indent).with_id(PREFIX_PRAGMA_ID, pragma.this.index)
 
                    .with_s_key("PragmaVersion")
 
                    .with_disp_val(&pragma.version);
 
            },
 
            Pragma::Module(pragma) => {
 
                self.kv(indent).with_id(PREFIX_PRAGMA_ID, pragma.this.index)
 
                    .with_s_key("PragmaModule")
 
                    .with_ascii_val(&pragma.value);
 
            }
 
        }
 
    }
 

	
 
    fn write_import(&mut self, heap: &Heap, import_id: ImportId, indent: usize) {
 
        let import = &heap[import_id];
 
        let indent2 = indent + 1;
 

	
 
        match import {
 
            Import::Module(import) => {
 
                self.kv(indent).with_id(PREFIX_IMPORT_ID, import.this.index)
 
                    .with_s_key("ImportModule");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&import.module_name);
 
                self.kv(indent2).with_s_key("Alias").with_ascii_val(&import.alias.value);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(import.module_id.as_ref().map(|v| &v.index));
 
            },
 
            Import::Symbols(import) => {
 
                self.kv(indent).with_id(PREFIX_IMPORT_ID, import.this.index)
 
                    .with_s_key("ImportSymbol");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&import.module_name);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(import.module_id.as_ref().map(|v| &v.index));
 

	
 
                self.kv(indent2).with_s_key("Symbols");
 

	
 
                let indent3 = indent2 + 1;
 
                let indent4 = indent3 + 1;
 
                for symbol in &import.symbols {
 
                    self.kv(indent3).with_s_key("AliasedSymbol");
 
                    self.kv(indent4).with_s_key("Name").with_ascii_val(&symbol.name.value);
 
                    self.kv(indent4).with_s_key("Alias").with_ascii_val(&symbol.alias.value);
 
                    self.kv(indent4).with_s_key("Definition")
 
                        .with_opt_disp_val(symbol.definition_id.as_ref().map(|v| &v.index));
 
                }
 
            }
 
        }
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Top-level definition writing
 
    //--------------------------------------------------------------------------
 

	
 
    fn write_definition(&mut self, heap: &Heap, def_id: DefinitionId, indent: usize) {
 
        self.cur_definition = Some(def_id);
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 
        let indent4 = indent3 + 1;
 

	
 
        match &heap[def_id] {
 
            Definition::Struct(_) => todo!("implement Definition::Struct"),
 
            Definition::Enum(_) => todo!("implement Definition::Enum"),
 
            Definition::Enum(def) => {
 
                self.kv(indent).with_id(PREFIX_ENUM_ID, def.this.0.index)
 
                    .with_s_key("DefinitionEnum");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar").with_ascii_val(&poly_var_id.value);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Variants");
 
                for variant in &def.variants {
 
                    self.kv(indent3).with_s_key("Variant");
 
                    self.kv(indent4).with_s_key("Name")
 
                        .with_ascii_val(&variant.identifier.value);
 
                    // TODO: Attached value
 
                }
 
            },
 
            Definition::Function(def) => {
 
                self.kv(indent).with_id(PREFIX_FUNCTION_ID, def.this.0.index)
 
                    .with_s_key("DefinitionFunction");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar");
 
                    self.kv(indent4).with_s_key("Name").with_ascii_val(&poly_var_id.value);
 
                    self.kv(indent3).with_s_key("PolyVar").with_ascii_val(&poly_var_id.value);
 
                }
 

	
 
                self.kv(indent2).with_s_key("ReturnParserType").with_custom_val(|s| write_parser_type(s, heap, &heap[def.return_type]));
 

	
 
                self.kv(indent2).with_s_key("Parameters");
 
                for param_id in &def.parameters {
 
                    self.write_parameter(heap, *param_id, indent3);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, def.body, indent3);
 
            },
 
            Definition::Component(def) => {
 
                self.kv(indent).with_id(PREFIX_COMPONENT_ID,def.this.0.index)
 
                    .with_s_key("DefinitionComponent");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
                self.kv(indent2).with_s_key("Variant").with_debug_val(&def.variant);
 

	
 
                self.kv(indent2).with_s_key("PolymorphicVariables");
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar");
 
                    self.kv(indent4).with_s_key("Name").with_ascii_val(&poly_var_id.value);
 
                    self.kv(indent3).with_s_key("PolyVar").with_ascii_val(&poly_var_id.value);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parameters");
 
                for param_id in &def.parameters {
 
                    self.write_parameter(heap, *param_id, indent3)
 
                }
 

	
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, def.body, indent3);
 
            }
 
        }
 
    }
 

	
 
    fn write_parameter(&mut self, heap: &Heap, param_id: ParameterId, indent: usize) {
 
        let indent2 = indent + 1;
 
        let param = &heap[param_id];
 

	
 
        self.kv(indent).with_id(PREFIX_PARAMETER_ID, param_id.0.index)
 
            .with_s_key("Parameter");
 
        self.kv(indent2).with_s_key("Name").with_ascii_val(&param.identifier.value);
 
        self.kv(indent2).with_s_key("ParserType").with_custom_val(|w| write_parser_type(w, heap, &heap[param.parser_type]));
 
    }
 

	
 
    fn write_stmt(&mut self, heap: &Heap, stmt_id: StatementId, indent: usize) {
 
        let stmt = &heap[stmt_id];
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 

	
 
        match stmt {
 
            Statement::Block(stmt) => {
 
                self.kv(indent).with_id(PREFIX_BLOCK_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Block");
 

	
 
                for stmt_id in &stmt.statements {
 
                    self.write_stmt(heap, *stmt_id, indent2);
 
                }
 
            },
 
            Statement::Local(stmt) => {
 
                match stmt {
 
                    LocalStatement::Channel(stmt) => {
 
                        self.kv(indent).with_id(PREFIX_CHANNEL_STMT_ID, stmt.this.0.0.index)
 
                            .with_s_key("LocalChannel");
 

	
 
                        self.kv(indent2).with_s_key("From");
 
                        self.write_local(heap, stmt.from, indent3);
 
                        self.kv(indent2).with_s_key("To");
 
                        self.write_local(heap, stmt.to, indent3);
 
                        self.kv(indent2).with_s_key("Next")
 
                            .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
                    },
 
                    LocalStatement::Memory(stmt) => {
 
                        self.kv(indent).with_id(PREFIX_MEM_STMT_ID, stmt.this.0.0.index)
 
                            .with_s_key("LocalMemory");
 

	
 
                        self.kv(indent2).with_s_key("Variable");
 
                        self.write_local(heap, stmt.variable, indent3);
 
                        self.kv(indent2).with_s_key("Next")
 
                            .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
                    }
 
                }
 
            },
 
            Statement::Skip(stmt) => {
 
                self.kv(indent).with_id(PREFIX_SKIP_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Skip");
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Labeled(stmt) => {
 
                self.kv(indent).with_id(PREFIX_LABELED_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Labeled");
 

	
 
                self.kv(indent2).with_s_key("Label").with_ascii_val(&stmt.label.value);
 
                self.kv(indent2).with_s_key("Statement");
 
                self.write_stmt(heap, stmt.body, indent3);
 
            },
 
            Statement::If(stmt) => {
 
                self.kv(indent).with_id(PREFIX_IF_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("If");
 

	
 
                self.kv(indent2).with_s_key("EndIf")
 
                    .with_opt_disp_val(stmt.end_if.as_ref().map(|v| &v.0.index));
 

	
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, stmt.test, indent3);
 

	
 
                self.kv(indent2).with_s_key("TrueBody");
 
                self.write_stmt(heap, stmt.true_body, indent3);
 

	
 
                self.kv(indent2).with_s_key("FalseBody");
 
                self.write_stmt(heap, stmt.false_body, indent3);
 
            },
 
            Statement::EndIf(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDIF_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndIf");
 
                self.kv(indent2).with_s_key("StartIf").with_disp_val(&stmt.start_if.0.index);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::While(stmt) => {
 
                self.kv(indent).with_id(PREFIX_WHILE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("While");
 

	
 
                self.kv(indent2).with_s_key("EndWhile")
 
                    .with_opt_disp_val(stmt.end_while.as_ref().map(|v| &v.0.index));
 
                self.kv(indent2).with_s_key("InSync")
 
                    .with_opt_disp_val(stmt.in_sync.as_ref().map(|v| &v.0.index));
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, stmt.test, indent3);
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, stmt.body, indent3);
 
            },
 
            Statement::EndWhile(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDWHILE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndWhile");
 
                self.kv(indent2).with_s_key("StartWhile").with_disp_val(&stmt.start_while.0.index);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Break(stmt) => {
 
                self.kv(indent).with_id(PREFIX_BREAK_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Break");
 
                self.kv(indent2).with_s_key("Label")
 
                    .with_opt_ascii_val(stmt.label.as_ref().map(|v| v.value.as_slice()));
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::Continue(stmt) => {
 
                self.kv(indent).with_id(PREFIX_CONTINUE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Continue");
 
                self.kv(indent2).with_s_key("Label")
 
                    .with_opt_ascii_val(stmt.label.as_ref().map(|v| v.value.as_slice()));
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::Synchronous(stmt) => {
 
                self.kv(indent).with_id(PREFIX_SYNC_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Synchronous");
 
                self.kv(indent2).with_s_key("EndSync")
 
                    .with_opt_disp_val(stmt.end_sync.as_ref().map(|v| &v.0.index));
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, stmt.body, indent3);
 
            },
 
            Statement::EndSynchronous(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDSYNC_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndSynchronous");
 
                self.kv(indent2).with_s_key("StartSync").with_disp_val(&stmt.start_sync.0.index);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Return(stmt) => {
 
                self.kv(indent).with_id(PREFIX_RETURN_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Return");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression, indent3);
 
            },
 
            Statement::Assert(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ASSERT_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Assert");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression, indent3);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Goto(stmt) => {
 
                self.kv(indent).with_id(PREFIX_GOTO_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Goto");
 
                self.kv(indent2).with_s_key("Label").with_ascii_val(&stmt.label.value);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::New(stmt) => {
 
                self.kv(indent).with_id(PREFIX_NEW_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("New");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression.upcast(), indent3);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Expression(stmt) => {
 
                self.kv(indent).with_id(PREFIX_EXPR_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("ExpressionStatement");
 
                self.write_expr(heap, stmt.expression, indent2);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            }
 
        }
 
    }
 

	
 
    fn write_expr(&mut self, heap: &Heap, expr_id: ExpressionId, indent: usize) {
 
        let expr = &heap[expr_id];
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 
        let def_id = self.cur_definition.unwrap();
 

	
 
        match expr {
 
            Expression::Assignment(expr) => {
 
                self.kv(indent).with_id(PREFIX_ASSIGNMENT_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("AssignmentExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Left");
 
                self.write_expr(heap, expr.left, indent3);
 
                self.kv(indent2).with_s_key("Right");
 
                self.write_expr(heap, expr.right, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Conditional(expr) => {
 
                self.kv(indent).with_id(PREFIX_CONDITIONAL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ConditionalExpr");
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, expr.test, indent3);
 
                self.kv(indent2).with_s_key("TrueExpression");
 
                self.write_expr(heap, expr.true_expression, indent3);
 
                self.kv(indent2).with_s_key("FalseExpression");
 
                self.write_expr(heap, expr.false_expression, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Binary(expr) => {
 
                self.kv(indent).with_id(PREFIX_BINARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("BinaryExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Left");
 
                self.write_expr(heap, expr.left, indent3);
 
                self.kv(indent2).with_s_key("Right");
 
                self.write_expr(heap, expr.right, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Unary(expr) => {
 
                self.kv(indent).with_id(PREFIX_UNARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("UnaryExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Argument");
 
                self.write_expr(heap, expr.expression, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Indexing(expr) => {
 
                self.kv(indent).with_id(PREFIX_INDEXING_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("IndexingExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 
                self.kv(indent2).with_s_key("Index");
 
                self.write_expr(heap, expr.index, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Slicing(expr) => {
 
                self.kv(indent).with_id(PREFIX_SLICING_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("SlicingExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 
                self.kv(indent2).with_s_key("FromIndex");
 
                self.write_expr(heap, expr.from_index, indent3);
 
                self.kv(indent2).with_s_key("ToIndex");
 
                self.write_expr(heap, expr.to_index, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Select(expr) => {
 
                self.kv(indent).with_id(PREFIX_SELECT_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("SelectExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 

	
 
                match &expr.field {
 
                    Field::Length => {
 
                        self.kv(indent2).with_s_key("Field").with_s_val("length");
 
                    },
 
                    Field::Symbolic(field) => {
 
                        self.kv(indent2).with_s_key("Field").with_ascii_val(&field.identifier.value);
 
                        self.kv(indent3).with_s_key("Definition").with_opt_disp_val(field.definition.as_ref().map(|v| &v.index));
 
                        self.kv(indent3).with_s_key("Index").with_disp_val(&field.field_idx);
 
                    }
 
                }
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Array(expr) => {
 
                self.kv(indent).with_id(PREFIX_ARRAY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ArrayExpr");
 
                self.kv(indent2).with_s_key("Elements");
 
                for expr_id in &expr.elements {
 
                    self.write_expr(heap, *expr_id, indent3);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Literal(expr) => {
 
                self.kv(indent).with_id(PREFIX_CONST_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ConstantExpr");
 

	
 
                let val = self.kv(indent2).with_s_key("Value");
 
                match &expr.value {
 
                    Literal::Null => { val.with_s_val("null"); },
 
                    Literal::True => { val.with_s_val("true"); },
 
                    Literal::False => { val.with_s_val("false"); },
 
                    Literal::Character(data) => { val.with_ascii_val(data); },
 
                    Literal::Integer(data) => { val.with_disp_val(data); },
 
                    Literal::Struct(data) => {
 
                        val.with_s_val("Struct");
 
                        let indent4 = indent3 + 1;
 

	
 
                        // Polymorphic arguments
 
                        if !data.poly_args2.is_empty() {
 
                            self.kv(indent3).with_s_key("PolymorphicArguments");
 
                            for poly_arg in &data.poly_args2 {
 
                                self.kv(indent4).with_s_key("Argument")
 
                                    .with_custom_val(|v| write_parser_type(v, heap, &heap[*poly_arg]));
 
                            }
 
                        }
 

	
 
                        for field in &data.fields {
 
                            self.kv(indent3).with_s_key("Field");
 
                            self.kv(indent4).with_s_key("Name").with_ascii_val(&field.identifier.value);
 
                            self.kv(indent4).with_s_key("Index").with_disp_val(&field.field_idx);
 
                            self.kv(indent4).with_s_key("ParserType");
 
                            self.write_expr(heap, field.value, indent4 + 1);
 
                        }
 
                    },
 
                    Literal::Enum(data) => {
 
                        val.with_s_val("Enum");
 
                        let indent4 = indent3 + 1;
 

	
 
                        // Polymorphic arguments
 
                        if !data.poly_args2.is_empty() {
 
                            self.kv(indent3).with_s_key("PolymorphicArguments");
 
                            for poly_arg in &data.poly_args2 {
 
                                self.kv(indent4).with_s_key("Argument")
 
                                    .with_custom_val(|v| write_parser_type(v, heap, &heap[*poly_arg]));
 
                            }
 
                        }
 
                    }
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Call(expr) => {
 
                self.kv(indent).with_id(PREFIX_CALL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("CallExpr");
 

	
 
                // Method
 
                let method = self.kv(indent2).with_s_key("Method");
 
                match &expr.method {
 
                    Method::Get => { method.with_s_val("get"); },
 
                    Method::Put => { method.with_s_val("put"); },
 
                    Method::Fires => { method.with_s_val("fires"); },
 
                    Method::Create => { method.with_s_val("create"); },
 
                    Method::Symbolic(symbolic) => {
 
                        method.with_s_val("symbolic");
 
                        self.kv(indent3).with_s_key("Name").with_ascii_val(&symbolic.identifier.value);
 
                        self.kv(indent3).with_s_key("Definition")
 
                            .with_opt_disp_val(symbolic.definition.as_ref().map(|v| &v.index));
 
                    }
 
                }
 

	
 
                // Polymorphic arguments
 
                if !expr.poly_args.is_empty() {
 
                    self.kv(indent2).with_s_key("PolymorphicArguments");
 
                    for poly_arg in &expr.poly_args {
 
                        self.kv(indent3).with_s_key("Argument")
 
                            .with_custom_val(|v| write_parser_type(v, heap, &heap[*poly_arg]));
 
                    }
 
                }
 

	
 
                // Arguments
 
                self.kv(indent2).with_s_key("Arguments");
 
                for arg_id in &expr.arguments {
 
                    self.write_expr(heap, *arg_id, indent3);
 
                }
 

	
 
                // Parent
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Variable(expr) => {
 
                self.kv(indent).with_id(PREFIX_VARIABLE_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("VariableExpr");
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&expr.identifier.value);
 
                self.kv(indent2).with_s_key("Definition")
 
                    .with_opt_disp_val(expr.declaration.as_ref().map(|v| &v.index));
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            }
 
        }
 
    }
 

	
 
    fn write_local(&mut self, heap: &Heap, local_id: LocalId, indent: usize) {
 
        let local = &heap[local_id];
 
        let indent2 = indent + 1;
 

	
 
        self.kv(indent).with_id(PREFIX_LOCAL_ID, local_id.0.index)
 
            .with_s_key("Local");
 

	
 
        self.kv(indent2).with_s_key("Name").with_ascii_val(&local.identifier.value);
 
        self.kv(indent2).with_s_key("ParserType")
 
            .with_custom_val(|w| write_parser_type(w, heap, &heap[local.parser_type]));
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Printing Utilities
 
    //--------------------------------------------------------------------------
 

	
 
    fn kv(&mut self, indent: usize) -> KV {
 
        KV::new(&mut self.buffer, &mut self.temp1, &mut self.temp2, indent)
 
    }
 

	
 
    fn flush<W: IOWrite>(&mut self, w: &mut W) {
 
        w.write(self.buffer.as_bytes()).unwrap();
 
        self.buffer.clear()
 
    }
 
}
 

	
 
fn write_option<V: Display>(target: &mut String, value: Option<V>) {
 
    target.clear();
 
    match &value {
 
        Some(v) => target.push_str(&format!("Some({})", v)),
 
        None => target.push_str("None")
 
    };
 
}
 

	
 
fn write_parser_type(target: &mut String, heap: &Heap, t: &ParserType) {
 
    use ParserTypeVariant as PTV;
 

	
 
    let mut embedded = Vec::new();
 
    match &t.variant {
 
        PTV::Input(id) => { target.push_str("in"); embedded.push(*id); }
 
        PTV::Output(id) => { target.push_str("out"); embedded.push(*id) }
 
        PTV::Array(id) => { target.push_str("array"); embedded.push(*id) }
 
        PTV::Message => { target.push_str("msg"); }
 
        PTV::Bool => { target.push_str("bool"); }
 
        PTV::Byte => { target.push_str("byte"); }
 
        PTV::Short => { target.push_str("short"); }
 
        PTV::Int => { target.push_str("int"); }
 
        PTV::Long => { target.push_str("long"); }
 
        PTV::String => { target.push_str("str"); }
 
        PTV::IntegerLiteral => { target.push_str("int_lit"); }
 
        PTV::Inferred => { target.push_str("auto"); }
 
        PTV::Symbolic(symbolic) => {
 
            target.push_str(&String::from_utf8_lossy(&symbolic.identifier.value));
 
            match symbolic.variant {
 
                Some(SymbolicParserTypeVariant::PolyArg(def_id, idx)) => {
 
                    target.push_str(&format!("{{def: {}, idx: {}}}", def_id.index, idx));
 
                },
 
                Some(SymbolicParserTypeVariant::Definition(def_id)) => {
 
                    target.push_str(&format!("{{def: {}}}", def_id.index));
 
                },
 
                None => {
 
                    target.push_str("{None}");
 
                }
 
            }
 
            embedded.extend(&symbolic.poly_args2);
 
        }
 
    };
 

	
 
    if !embedded.is_empty() {
 
        target.push_str("<");
 
        for (idx, embedded_id) in embedded.into_iter().enumerate() {
 
            if idx != 0 { target.push_str(", "); }
 
            write_parser_type(target, heap, &heap[embedded_id]);
 
        }
 
        target.push_str(">");
 
    }
 
}
 

	
 
fn write_concrete_type(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType) {
 
    use ConcreteTypePart as CTP;
 

	
 
    fn write_concrete_part(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType, mut idx: usize) -> usize {
 
        if idx >= t.parts.len() {
 
            return idx;
 
        }
 

	
 
        match &t.parts[idx] {
 
            CTP::Marker(marker) => {
 
                // Marker points to polymorphic variable index
 
                let definition = &heap[def_id];
 
                let poly_var_ident = match definition {
 
                    Definition::Struct(_) | Definition::Enum(_) => unreachable!(),
 
                    Definition::Function(definition) => &definition.poly_vars[*marker].value,
 
                    Definition::Component(definition) => &definition.poly_vars[*marker].value,
 
                };
 
                target.push_str(&String::from_utf8_lossy(&poly_var_ident));
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
            },
 
            CTP::Void => target.push_str("void"),
 
            CTP::Message => target.push_str("msg"),
 
            CTP::Bool => target.push_str("bool"),
 
            CTP::Byte => target.push_str("byte"),
 
            CTP::Short => target.push_str("short"),
 
            CTP::Int => target.push_str("int"),
 
            CTP::Long => target.push_str("long"),
 
            CTP::String => target.push_str("string"),
 
            CTP::Array => {
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push_str("[]");
 
            },
 
            CTP::Slice => {
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push_str("[..]");
 
            }
 
            CTP::Input => {
 
                target.push_str("in<");
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push('>');
 
            },
 
            CTP::Output => {
 
                target.push_str("out<");
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push('>')
 
            },
 
            CTP::Instance(definition_id, num_embedded) => {
 
                let identifier = heap[*definition_id].identifier();
 
                target.push_str(&String::from_utf8_lossy(&identifier.value));
 
                target.push('<');
 
                for idx_embedded in 0..*num_embedded {
 
                    if idx_embedded != 0 {
src/protocol/eval.rs
Show inline comments
 
use std::collections::HashMap;
 
use std::fmt;
 
use std::fmt::{Debug, Display, Formatter};
 
use std::{i16, i32, i64, i8};
 

	
 
use crate::common::*;
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::EvalContext;
 

	
 
// const MAX_RECURSION: usize = 1024;
 

	
 
const BYTE_MIN: i64 = i8::MIN as i64;
 
const BYTE_MAX: i64 = i8::MAX as i64;
 
const SHORT_MIN: i64 = i16::MIN as i64;
 
const SHORT_MAX: i64 = i16::MAX as i64;
 
const INT_MIN: i64 = i32::MIN as i64;
 
const INT_MAX: i64 = i32::MAX as i64;
 

	
 
const MESSAGE_MAX_LENGTH: i64 = SHORT_MAX;
 

	
 
const ONE: Value = Value::Byte(ByteValue(1));
 

	
 
// TODO: All goes one day anyway, so dirty typechecking hack
 
trait ValueImpl {
 
    fn exact_type(&self) -> Type;
 
    fn is_type_compatible(&self, h: &Heap, t: &ParserType) -> bool {
 
        Self::is_type_compatible_hack(h, t)
 
    }
 
    fn is_type_compatible_hack(h: &Heap, t: &ParserType) -> bool;
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Value {
 
    Unassigned,
 
    Input(InputValue),
 
    Output(OutputValue),
 
    Message(MessageValue),
 
    Boolean(BooleanValue),
 
    Byte(ByteValue),
 
    Short(ShortValue),
 
    Int(IntValue),
 
    Long(LongValue),
 
    InputArray(InputArrayValue),
 
    OutputArray(OutputArrayValue),
 
    MessageArray(MessageArrayValue),
 
    BooleanArray(BooleanArrayValue),
 
    ByteArray(ByteArrayValue),
 
    ShortArray(ShortArrayValue),
 
    IntArray(IntArrayValue),
 
    LongArray(LongArrayValue),
 
}
 
impl Value {
 
    pub fn receive_message(buffer: &Payload) -> Value {
 
        Value::Message(MessageValue(Some(buffer.clone())))
 
    }
 
    fn create_message(length: Value) -> Value {
 
        match length {
 
            Value::Byte(_) | Value::Short(_) | Value::Int(_) | Value::Long(_) => {
 
                let length: i64 = i64::from(length);
 
                if length < 0 || length > MESSAGE_MAX_LENGTH {
 
                    // Only messages within the expected length are allowed
 
                    Value::Message(MessageValue(None))
 
                } else {
 
                    Value::Message(MessageValue(Some(Payload::new(length as usize))))
 
                }
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn from_constant(constant: &Literal) -> Value {
 
        match constant {
 
            Literal::Null => Value::Message(MessageValue(None)),
 
            Literal::True => Value::Boolean(BooleanValue(true)),
 
            Literal::False => Value::Boolean(BooleanValue(false)),
 
            Literal::Integer(val) => {
 
                // Convert raw ASCII data to UTF-8 string
 
                let val = *val;
 
                if val >= BYTE_MIN && val <= BYTE_MAX {
 
                    Value::Byte(ByteValue(val as i8))
 
                } else if val >= SHORT_MIN && val <= SHORT_MAX {
 
                    Value::Short(ShortValue(val as i16))
 
                } else if val >= INT_MIN && val <= INT_MAX {
 
                    Value::Int(IntValue(val as i32))
 
                } else {
 
                    Value::Long(LongValue(val))
 
                }
 
            }
 
            Literal::Character(_data) => unimplemented!(),
 
            Literal::Struct(_data) => unimplemented!(),
 
            Literal::Enum(_data) => unimplemented!(),
 
        }
 
    }
 
    fn set(&mut self, index: &Value, value: &Value) -> Option<Value> {
 
        // The index must be of integer type, and non-negative
 
        let the_index: usize;
 
        match index {
 
            Value::Byte(_) | Value::Short(_) | Value::Int(_) | Value::Long(_) => {
 
                let index = i64::from(index);
 
                if index < 0 || index >= MESSAGE_MAX_LENGTH {
 
                    // It is inconsistent to update out of bounds
 
                    return None;
 
                }
 
                the_index = index.try_into().unwrap();
 
            }
 
            _ => unreachable!(),
 
        }
 
        // The subject must be either a message or an array
 
        // And the value and the subject must be compatible
 
        match (self, value) {
 
            (Value::Message(MessageValue(None)), _) => {
 
                // It is inconsistent to update the null message
 
                None
 
            }
 
            (Value::Message(MessageValue(Some(payload))), Value::Byte(ByteValue(b))) => {
 
                if *b < 0 {
 
                    // It is inconsistent to update with a negative value
 
                    return None;
 
                }
 
                if let Some(slot) = payload.as_mut_vec().get_mut(the_index) {
 
                    *slot = (*b).try_into().unwrap();
 
                    Some(value.clone())
 
                } else {
 
                    // It is inconsistent to update out of bounds
 
                    None
 
                }
 
            }
 
            (Value::Message(MessageValue(Some(payload))), Value::Short(ShortValue(b))) => {
 
                if *b < 0 || *b > BYTE_MAX as i16 {
 
                    // It is inconsistent to update with a negative value or a too large value
 
                    return None;
 
                }
 
                if let Some(slot) = payload.as_mut_vec().get_mut(the_index) {
 
                    *slot = (*b).try_into().unwrap();
 
                    Some(value.clone())
 
                } else {
 
                    // It is inconsistent to update out of bounds
 
                    None
 
                }
 
            }
 
            (Value::InputArray(_), Value::Input(_)) => todo!(),
 
            (Value::OutputArray(_), Value::Output(_)) => todo!(),
 
            (Value::MessageArray(_), Value::Message(_)) => todo!(),
 
            (Value::BooleanArray(_), Value::Boolean(_)) => todo!(),
 
            (Value::ByteArray(_), Value::Byte(_)) => todo!(),
 
            (Value::ShortArray(_), Value::Short(_)) => todo!(),
 
            (Value::IntArray(_), Value::Int(_)) => todo!(),
 
            (Value::LongArray(_), Value::Long(_)) => todo!(),
 
            _ => unreachable!(),
 
        }
 
    }
 
    fn get(&self, index: &Value) -> Option<Value> {
 
        // The index must be of integer type, and non-negative
 
        let the_index: usize;
 
        match index {
 
            Value::Byte(_) | Value::Short(_) | Value::Int(_) | Value::Long(_) => {
 
                let index = i64::from(index);
 
                if index < 0 || index >= MESSAGE_MAX_LENGTH {
 
                    // It is inconsistent to update out of bounds
 
                    return None;
 
                }
 
                the_index = index.try_into().unwrap();
 
            }
 
            _ => unreachable!(),
 
        }
 
        // The subject must be either a message or an array
 
        match self {
 
            Value::Message(MessageValue(None)) => {
 
                // It is inconsistent to read from the null message
 
                None
 
            }
 
            Value::Message(MessageValue(Some(payload))) => {
 
                if let Some(slot) = payload.as_slice().get(the_index) {
 
                    Some(Value::Short(ShortValue((*slot).try_into().unwrap())))
 
                } else {
 
                    // It is inconsistent to update out of bounds
 
                    None
 
                }
 
            }
 
            _ => panic!("Can only get from port value"),
 
        }
 
    }
 
    fn length(&self) -> Option<Value> {
 
        // The subject must be either a message or an array
 
        match self {
 
            Value::Message(MessageValue(None)) => {
 
                // It is inconsistent to get length from the null message
 
                None
 
            }
 
            Value::Message(MessageValue(Some(buffer))) => {
 
                Some(Value::Int(IntValue((buffer.len()).try_into().unwrap())))
 
            }
 
            Value::InputArray(InputArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::OutputArray(OutputArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::MessageArray(MessageArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::BooleanArray(BooleanArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::ByteArray(ByteArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::ShortArray(ShortArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::IntArray(IntArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            Value::LongArray(LongArrayValue(vec)) => {
 
                Some(Value::Int(IntValue((vec.len()).try_into().unwrap())))
 
            }
 
            _ => unreachable!(),
 
        }
 
    }
 
    fn plus(&self, other: &Value) -> Value {
 
        match (self, other) {
 
            (Value::Byte(ByteValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Byte(ByteValue(*s + *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s as i16 + *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 + *o))
 
            }
 
            (Value::Byte(ByteValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 + *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Short(ShortValue(*s + *o as i16))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Short(ShortValue(*s + *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Int(IntValue(*s as i32 + *o))
 
            }
 
            (Value::Short(ShortValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 + *o))
 
            }
 
            (Value::Int(IntValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Int(IntValue(*s + *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Int(IntValue(*s + *o as i32))
 
            }
 
            (Value::Int(IntValue(s)), Value::Int(IntValue(o))) => Value::Int(IntValue(*s + *o)),
 
            (Value::Int(IntValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s as i64 + *o))
 
            }
 
            (Value::Long(LongValue(s)), Value::Byte(ByteValue(o))) => {
 
                Value::Long(LongValue(*s + *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Short(ShortValue(o))) => {
 
                Value::Long(LongValue(*s + *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Int(IntValue(o))) => {
 
                Value::Long(LongValue(*s + *o as i64))
 
            }
 
            (Value::Long(LongValue(s)), Value::Long(LongValue(o))) => {
 
                Value::Long(LongValue(*s + *o))
 
            }
 

	
 
            (Value::Message(MessageValue(s)), Value::Message(MessageValue(o))) => {
 
                let payload = if let [Some(s), Some(o)] = [s, o] {
 
                    let mut payload = s.clone();
 
                    payload.concatenate_with(o);
 
                    Some(payload)
 
                } else {
 
                    None
 
                };
 
                Value::Message(MessageValue(payload))
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn minus(&self, other: &Value) -> Value {
 
        match (self, other) {
src/protocol/lexer.rs
Show inline comments
 
@@ -325,415 +325,419 @@ impl Lexer<'_> {
 
        next = self.source.next();
 
        while is_ident_rest(next) {
 
            result.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 
        Ok(result)
 
    }
 
    fn has_integer(&mut self) -> bool {
 
        is_integer_start(self.source.next())
 
    }
 
    fn consume_integer(&mut self) -> Result<i64, ParseError> {
 
        let position = self.source.pos();
 
        let mut data = Vec::new();
 
        let mut next = self.source.next();
 
        while is_integer_rest(next) {
 
            data.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 

	
 
        let data_len = data.len();
 
        debug_assert_ne!(data_len, 0);
 
        if data_len == 1 {
 
            debug_assert!(data[0] >= b'0' && data[0] <= b'9');
 
            return Ok((data[0] - b'0') as i64);
 
        } else {
 
            // TODO: Fix, u64 should be supported as well
 
            let parsed = if data[1] == b'b' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 2)
 
            } else if data[1] == b'o' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 8)
 
            } else if data[1] == b'x' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 16)
 
            } else {
 
                // Assume decimal
 
                let data = String::from_utf8_lossy(&data);
 
                i64::from_str_radix(&data, 10)
 
            };
 

	
 
            if let Err(_err) = parsed {
 
                return Err(ParseError::new_error(&self.source, position, "Invalid integer constant"));
 
            }
 

	
 
            Ok(parsed.unwrap())
 
        }
 
    }
 

	
 
    // Statement keywords
 
    // TODO: Clean up these functions
 
    fn has_statement_keyword(&self) -> bool {
 
        self.has_keyword(b"channel")
 
            || self.has_keyword(b"skip")
 
            || self.has_keyword(b"if")
 
            || self.has_keyword(b"while")
 
            || self.has_keyword(b"break")
 
            || self.has_keyword(b"continue")
 
            || self.has_keyword(b"synchronous")
 
            || self.has_keyword(b"return")
 
            || self.has_keyword(b"assert")
 
            || self.has_keyword(b"goto")
 
            || self.has_keyword(b"new")
 
    }
 
    fn has_type_keyword(&self) -> bool {
 
        self.has_keyword(b"in")
 
            || self.has_keyword(b"out")
 
            || self.has_keyword(b"msg")
 
            || self.has_keyword(b"boolean")
 
            || self.has_keyword(b"byte")
 
            || self.has_keyword(b"short")
 
            || self.has_keyword(b"int")
 
            || self.has_keyword(b"long")
 
            || self.has_keyword(b"auto")
 
    }
 
    fn has_builtin_keyword(&self) -> bool {
 
        self.has_keyword(b"get")
 
            || self.has_keyword(b"fires")
 
            || self.has_keyword(b"create")
 
            || self.has_keyword(b"length")
 
    }
 
    fn has_reserved(&self) -> bool {
 
        self.has_statement_keyword()
 
            || self.has_type_keyword()
 
            || self.has_builtin_keyword()
 
            || self.has_keyword(b"let")
 
            || self.has_keyword(b"struct")
 
            || self.has_keyword(b"enum")
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
            || self.has_keyword(b"null")
 
    }
 

	
 
    // Identifiers
 

	
 
    fn has_identifier(&self) -> bool {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return false;
 
        }
 
        let next = self.source.next();
 
        is_ident_start(next)
 
    }
 
    fn consume_identifier(&mut self) -> Result<Identifier, ParseError> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let position = self.source.pos();
 
        let value = self.consume_ident()?;
 
        Ok(Identifier{ position, value })
 
    }
 
    fn consume_identifier_spilled(&mut self) -> Result<(), ParseError> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        self.consume_ident()?;
 
        Ok(())
 
    }
 

	
 
    fn consume_namespaced_identifier(&mut self, h: &mut Heap) -> Result<NamespacedIdentifier, ParseError> {
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        // Consumes a part of the namespaced identifier, returns a boolean
 
        // indicating whether polymorphic arguments were specified.
 
        // TODO: Continue here: if we fail to properly parse the polymorphic
 
        //  arguments, assume we have reached the end of the namespaced 
 
        //  identifier and are instead dealing with a less-than operator. Ugly?
 
        //  Yes. Needs tokenizer? Yes. 
 
        fn consume_part(
 
            l: &mut Lexer, h: &mut Heap, ident: &mut NamespacedIdentifier,
 
            backup_pos: &mut InputPosition
 
        ) -> Result<(), ParseError> {
 
            // Consume identifier
 
            if !ident.value.is_empty() {
 
                ident.value.extend(b"::");
 
            }
 
            let ident_start = ident.value.len();
 
            ident.value.extend(l.consume_ident()?);
 
            ident.parts.push(NamespacedIdentifierPart::Identifier{
 
                start: ident_start as u16,
 
                end: ident.value.len() as u16
 
            });
 

	
 
            // Maybe consume polymorphic args.
 
            *backup_pos = l.source.pos();
 
            l.consume_whitespace(false)?;
 
            match l.consume_polymorphic_args(h, true)? {
 
                Some(args) => {
 
                    let poly_start = ident.poly_args.len();
 
                    ident.poly_args.extend(args);
 

	
 
                    ident.parts.push(NamespacedIdentifierPart::PolyArgs{
 
                        start: poly_start as u16,
 
                        end: ident.poly_args.len() as u16,
 
                    });
 

	
 
                    *backup_pos = l.source.pos();
 
                },
 
                None => {}
 
            };
 

	
 
            Ok(())
 
        }
 

	
 
        let mut ident = NamespacedIdentifier{
 
            position: self.source.pos(),
 
            value: Vec::new(),
 
            poly_args: Vec::new(),
 
            parts: Vec::new(),
 
        };
 

	
 
        // Keep consume parts separted by "::". We don't consume the trailing
 
        // whitespace, hence we keep a backup position at the end of the last
 
        // valid part of the namespaced identifier (i.e. the last ident, or the
 
        // last encountered polymorphic arguments).
 
        let mut backup_pos = self.source.pos();
 
        consume_part(self, h, &mut ident, &mut backup_pos)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_whitespace(false)?;
 
            consume_part(self, h, &mut ident, &mut backup_pos)?;
 
            self.consume_whitespace(false)?;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(ident)
 
    }
 

	
 
    fn consume_namespaced_identifier_spilled(&mut self) -> Result<(), ParseError> {
 
    // Consumes a spilled namespaced identifier and returns the number of
 
    // namespaces that we encountered.
 
    fn consume_namespaced_identifier_spilled(&mut self) -> Result<usize, ParseError> {
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        debug_log!("consume_nsident2_spilled: {}", debug_line!(self.source));
 

	
 
        fn consume_part_spilled(l: &mut Lexer, backup_pos: &mut InputPosition) -> Result<(), ParseError> {
 
            l.consume_ident()?;
 
            *backup_pos = l.source.pos();
 
            l.consume_whitespace(false)?;
 
            match l.maybe_consume_poly_args_spilled_without_pos_recovery() {
 
                Ok(true) => { *backup_pos = l.source.pos(); },
 
                Ok(false) => {},
 
                Err(_) => { return Err(l.error_at_pos("Failed to parse poly args (spilled)")) },
 
            }
 
            Ok(())
 
        }
 

	
 
        let mut backup_pos = self.source.pos();
 
        let mut num_namespaces = 1;
 
        consume_part_spilled(self, &mut backup_pos)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_whitespace(false)?;
 
            consume_part_spilled(self, &mut backup_pos)?;
 
            self.consume_whitespace(false)?;
 
            num_namespaces += 1;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(())
 
        Ok(num_namespaces)
 
    }
 

	
 
    // Types and type annotations
 

	
 
    /// Consumes a type definition. When called the input position should be at
 
    /// the type specification. When done the input position will be at the end
 
    /// of the type specifications (hence may be at whitespace).
 
    fn consume_type(&mut self, h: &mut Heap, allow_inference: bool) -> Result<ParserTypeId, ParseError> {
 
        // Small helper function to convert in/out polymorphic arguments. Not
 
        // pretty, but return boolean is true if the error is due to inference
 
        // not being allowed
 
        let reduce_port_poly_args = |
 
            heap: &mut Heap,
 
            port_pos: &InputPosition,
 
            args: Vec<ParserTypeId>,
 
        | -> Result<ParserTypeId, bool> {
 
            match args.len() {
 
                0 => if allow_inference {  
 
                    Ok(heap.alloc_parser_type(|this| ParserType{
 
                        this,
 
                        pos: port_pos.clone(),
 
                        variant: ParserTypeVariant::Inferred
 
                    }))
 
                } else {
 
                    Err(true)
 
                },
 
                1 => Ok(args[0]),
 
                _ => Err(false)
 
            }
 
        };
 

	
 
        // Consume the type
 
        debug_log!("consume_type: {}", debug_line!(self.source));
 
        let pos = self.source.pos();
 
        let parser_type_variant = if self.has_keyword(b"msg") {
 
            self.consume_keyword(b"msg")?;
 
            ParserTypeVariant::Message
 
        } else if self.has_keyword(b"boolean") {
 
            self.consume_keyword(b"boolean")?;
 
            ParserTypeVariant::Bool
 
        } else if self.has_keyword(b"byte") {
 
            self.consume_keyword(b"byte")?;
 
            ParserTypeVariant::Byte
 
        } else if self.has_keyword(b"short") {
 
            self.consume_keyword(b"short")?;
 
            ParserTypeVariant::Short
 
        } else if self.has_keyword(b"int") {
 
            self.consume_keyword(b"int")?;
 
            ParserTypeVariant::Int
 
        } else if self.has_keyword(b"long") {
 
            self.consume_keyword(b"long")?;
 
            ParserTypeVariant::Long
 
        } else if self.has_keyword(b"str") {
 
            self.consume_keyword(b"str")?;
 
            ParserTypeVariant::String
 
        } else if self.has_keyword(b"auto") {
 
            if !allow_inference {
 
                return Err(ParseError::new_error(
 
                        &self.source, pos,
 
                        "Type inference is not allowed here"
 
                ));
 
            }
 

	
 
            self.consume_keyword(b"auto")?;
 
            ParserTypeVariant::Inferred
 
        } else if self.has_keyword(b"in") {
 
            // TODO: @cleanup: not particularly neat to have this special case
 
            //  where we enforce polyargs in the parser-phase
 
            self.consume_keyword(b"in")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?.unwrap_or_default();
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error|  {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'in' only allows for 1 polymorphic argument"
 
                    };
 
                    ParseError::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Input(poly_arg)
 
        } else if self.has_keyword(b"out") {
 
            self.consume_keyword(b"out")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?.unwrap_or_default();
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error| {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'out' only allows for 1 polymorphic argument, but {} were specified"
 
                    };
 
                    ParseError::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Output(poly_arg)
 
        } else {
 
            // Must be a symbolic type
 
            let identifier = self.consume_namespaced_identifier(h)?;
 
            ParserTypeVariant::Symbolic(SymbolicParserType{identifier, variant: None, poly_args2: Vec::new()})
 
        };
 

	
 
        // If the type was a basic type (not supporting polymorphic type
 
        // arguments), then we make sure the user did not specify any of them.
 
        let mut backup_pos = self.source.pos();
 
        if !parser_type_variant.supports_polymorphic_args() {
 
            self.consume_whitespace(false)?;
 
            if let Some(b'<') = self.source.next() {
 
                return Err(ParseError::new_error(
 
                    &self.source, self.source.pos(),
 
                    "This type does not allow polymorphic arguments"
 
                ));
 
            }
 

	
 
            self.source.seek(backup_pos);
 
        }
 

	
 
        let mut parser_type_id = h.alloc_parser_type(|this| ParserType{
 
            this, pos, variant: parser_type_variant
 
        });
 

	
 
        // If we're dealing with arrays, then we need to wrap the currently
 
        // parsed type in array types
 
        self.consume_whitespace(false)?;
 
        while let Some(b'[') = self.source.next() {
 
            let pos = self.source.pos();
 
            self.source.consume();
 
            self.consume_whitespace(false)?;
 
            if let Some(b']') = self.source.next() {
 
                // Type is wrapped in an array
 
                self.source.consume();
 
                parser_type_id = h.alloc_parser_type(|this| ParserType{
 
                    this, pos, variant: ParserTypeVariant::Array(parser_type_id)
 
                });
 
                backup_pos = self.source.pos();
 

	
 
                // In case we're dealing with another array
 
                self.consume_whitespace(false)?;
 
            } else {
 
                return Err(ParseError::new_error(
 
                    &self.source, pos,
 
                    "Expected a closing ']'"
 
                ));
 
            }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(parser_type_id)
 
    }
 

	
 
    /// Attempts to consume a type without returning it. If it doesn't encounter
 
    /// a well-formed type, then the input position is left at a "random"
 
    /// position.
 
    fn maybe_consume_type_spilled_without_pos_recovery(&mut self) -> bool {
 
        // Consume type identifier
 
        debug_log!("maybe_consume_type_spilled_...: {}", debug_line!(self.source));
 
        if self.has_type_keyword() {
 
            self.consume_any_chars();
 
        } else {
 
            let ident = self.consume_namespaced_identifier_spilled();
 
            if ident.is_err() { return false; }
 
        }
 

	
 
        // Consume any polymorphic arguments that follow the type identifier
 
        let mut backup_pos = self.source.pos();
 
        if self.consume_whitespace(false).is_err() { return false; }
 
        
 
        // Consume any array specifiers. Make sure we always leave the input
 
        // position at the end of the last array specifier if we do find a
 
        // valid type
 
        if self.consume_whitespace(false).is_err() { return false; }
 
        while let Some(b'[') = self.source.next() {
 
            self.source.consume();
 
            if self.consume_whitespace(false).is_err() { return false; }
 
            if self.source.next() != Some(b']') { return false; }
 
            self.source.consume();
 
            backup_pos = self.source.pos();
 
            if self.consume_whitespace(false).is_err() { return false; }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return true;
 
    }
 

	
 
    fn maybe_consume_type_spilled(&mut self) -> bool {
 
        let backup_pos = self.source.pos();
 
        if !self.maybe_consume_type_spilled_without_pos_recovery() {
 
            self.source.seek(backup_pos);
 
            return false;
 
        }
 

	
 
        return true;
 
    }
 

	
 
    /// Attempts to consume polymorphic arguments without returning them. If it
 
@@ -1247,462 +1251,495 @@ impl Lexer<'_> {
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_prefix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.has_string(b"+")
 
            || self.has_string(b"-")
 
            || self.has_string(b"~")
 
            || self.has_string(b"!")
 
        {
 
            let position = self.source.pos();
 
            let operation;
 
            if self.has_string(b"+") {
 
                self.consume_string(b"+")?;
 
                if self.has_string(b"+") {
 
                    self.consume_string(b"+")?;
 
                    operation = UnaryOperation::PreIncrement;
 
                } else {
 
                    operation = UnaryOperation::Positive;
 
                }
 
            } else if self.has_string(b"-") {
 
                self.consume_string(b"-")?;
 
                if self.has_string(b"-") {
 
                    self.consume_string(b"-")?;
 
                    operation = UnaryOperation::PreDecrement;
 
                } else {
 
                    operation = UnaryOperation::Negative;
 
                }
 
            } else if self.has_string(b"~") {
 
                self.consume_string(b"~")?;
 
                operation = UnaryOperation::BitwiseNot;
 
            } else {
 
                self.consume_string(b"!")?;
 
                operation = UnaryOperation::LogicalNot;
 
            }
 
            self.consume_whitespace(false)?;
 
            if self.level >= MAX_LEVEL {
 
                return Err(self.error_at_pos("Too deeply nested expression"));
 
            }
 
            self.level += 1;
 
            let result = self.consume_prefix_expression(h);
 
            self.level -= 1;
 
            let expression = result?;
 
            return Ok(h
 
                .alloc_unary_expression(|this| UnaryExpression {
 
                    this,
 
                    position,
 
                    operation,
 
                    expression,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast());
 
        }
 
        self.consume_postfix_expression(h)
 
    }
 
    fn consume_postfix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_primary_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"++")
 
            || self.has_string(b"--")
 
            || self.has_string(b"[")
 
            || (self.has_string(b".") && !self.has_string(b".."))
 
        {
 
            let mut position = self.source.pos();
 
            if self.has_string(b"++") {
 
                self.consume_string(b"++")?;
 
                let operation = UnaryOperation::PostIncrement;
 
                let expression = result;
 
                self.consume_whitespace(false)?;
 
                result = h
 
                    .alloc_unary_expression(|this| UnaryExpression {
 
                        this,
 
                        position,
 
                        operation,
 
                        expression,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            } else if self.has_string(b"--") {
 
                self.consume_string(b"--")?;
 
                let operation = UnaryOperation::PostDecrement;
 
                let expression = result;
 
                self.consume_whitespace(false)?;
 
                result = h
 
                    .alloc_unary_expression(|this| UnaryExpression {
 
                        this,
 
                        position,
 
                        operation,
 
                        expression,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            } else if self.has_string(b"[") {
 
                self.consume_string(b"[")?;
 
                self.consume_whitespace(false)?;
 
                let subject = result;
 
                let index = self.consume_expression(h)?;
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b"..") || self.has_string(b":") {
 
                    position = self.source.pos();
 
                    if self.has_string(b"..") {
 
                        self.consume_string(b"..")?;
 
                    } else {
 
                        self.consume_string(b":")?;
 
                    }
 
                    self.consume_whitespace(false)?;
 
                    let to_index = self.consume_expression(h)?;
 
                    self.consume_whitespace(false)?;
 
                    result = h
 
                        .alloc_slicing_expression(|this| SlicingExpression {
 
                            this,
 
                            position,
 
                            subject,
 
                            from_index: index,
 
                            to_index,
 
                            parent: ExpressionParent::None,
 
                            concrete_type: ConcreteType::default(),
 
                        })
 
                        .upcast();
 
                } else {
 
                    result = h
 
                        .alloc_indexing_expression(|this| IndexingExpression {
 
                            this,
 
                            position,
 
                            subject,
 
                            index,
 
                            parent: ExpressionParent::None,
 
                            concrete_type: ConcreteType::default(),
 
                        })
 
                        .upcast();
 
                }
 
                self.consume_string(b"]")?;
 
                self.consume_whitespace(false)?;
 
            } else {
 
                assert!(self.has_string(b"."));
 
                self.consume_string(b".")?;
 
                self.consume_whitespace(false)?;
 
                let subject = result;
 
                let field;
 
                if self.has_keyword(b"length") {
 
                    self.consume_keyword(b"length")?;
 
                    field = Field::Length;
 
                } else {
 
                    let identifier = self.consume_identifier()?;
 
                    field = Field::Symbolic(FieldSymbolic{
 
                        identifier,
 
                        definition: None,
 
                        field_idx: 0,
 
                    });
 
                }
 
                result = h
 
                    .alloc_select_expression(|this| SelectExpression {
 
                        this,
 
                        position,
 
                        subject,
 
                        field,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            }
 
        }
 
        Ok(result)
 
    }
 
    fn consume_primary_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.has_string(b"(") {
 
            return self.consume_paren_expression(h);
 
        }
 
        if self.has_string(b"{") {
 
            return Ok(self.consume_array_expression(h)?.upcast());
 
        }
 
        if self.has_builtin_literal() {
 
            return Ok(self.consume_builtin_literal_expression(h)?.upcast());
 
        }
 
        if self.has_struct_literal() {
 
            return Ok(self.consume_struct_literal_expression(h)?.upcast());
 
        }
 
        if self.has_call_expression() {
 
            return Ok(self.consume_call_expression(h)?.upcast());
 
        }
 
        if self.has_enum_literal() {
 
            return Ok(self.consume_enum_literal(h)?.upcast());
 
        }
 
        Ok(self.consume_variable_expression(h)?.upcast())
 
    }
 
    fn consume_array_expression(&mut self, h: &mut Heap) -> Result<ArrayExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        let mut elements = Vec::new();
 
        self.consume_string(b"{")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b"}") {
 
            while self.source.next().is_some() {
 
                elements.push(self.consume_expression(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b"}") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?;
 
            }
 
        }
 
        self.consume_string(b"}")?;
 
        Ok(h.alloc_array_expression(|this| ArrayExpression {
 
            this,
 
            position,
 
            elements,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn has_builtin_literal(&self) -> bool {
 
        is_constant(self.source.next())
 
            || self.has_keyword(b"null")
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
    }
 
    fn consume_builtin_literal_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<LiteralExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        let value;
 
        if self.has_keyword(b"null") {
 
            self.consume_keyword(b"null")?;
 
            value = Literal::Null;
 
        } else if self.has_keyword(b"true") {
 
            self.consume_keyword(b"true")?;
 
            value = Literal::True;
 
        } else if self.has_keyword(b"false") {
 
            self.consume_keyword(b"false")?;
 
            value = Literal::False;
 
        } else if self.source.next() == Some(b'\'') {
 
            self.source.consume();
 
            let mut data = Vec::new();
 
            let mut next = self.source.next();
 
            while next != Some(b'\'') && (is_vchar(next) || next == Some(b' ')) {
 
                data.push(next.unwrap());
 
                self.source.consume();
 
                next = self.source.next();
 
            }
 
            if next != Some(b'\'') || data.is_empty() {
 
                return Err(self.error_at_pos("Expected character constant"));
 
            }
 
            self.source.consume();
 
            value = Literal::Character(data);
 
        } else {
 
            if !self.has_integer() {
 
                return Err(self.error_at_pos("Expected integer constant"));
 
            }
 

	
 
            value = Literal::Integer(self.consume_integer()?);
 
        }
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression {
 
            this,
 
            position,
 
            value,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 

	
 
    fn has_enum_literal(&mut self) -> bool {
 
        // An enum literal is always:
 
        //      maybe_a_namespace::EnumName<maybe_one_of_these>::Variant
 
        // So may for now be distinguished from other literals/variables by 
 
        // first checking for struct literals and call expressions, then for
 
        // enum literals, finally for variable expressions. It is different
 
        // from a variable expression in that it _always_ contains multiple
 
        // elements to the enum.
 
        let backup_pos = self.source.pos();
 
        let result = match self.consume_namespaced_identifier_spilled() {
 
            Ok(num_namespaces) => num_namespaces > 1,
 
            Err(_) => false,
 
        };
 
        self.source.seek(backup_pos);
 
        result
 
    }
 
    fn consume_enum_literal(&mut self, h: &mut Heap) -> Result<LiteralExpressionId, ParseError> {
 
        let identifier = self.consume_namespaced_identifier(h)?;
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression{
 
            this,
 
            position: identifier.position,
 
            value: Literal::Enum(LiteralEnum{
 
                identifier,
 
                poly_args2: Vec::new(),
 
                definition: None,
 
                variant_idx: 0,
 
            }),
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn has_struct_literal(&mut self) -> bool {
 
        // A struct literal is written as:
 
        //      namespace::StructName<maybe_one_of_these, auto>{ field: expr }
 
        // We will parse up until the opening brace to see if we're dealing with
 
        // a struct literal.
 
        let backup_pos = self.source.pos();
 
        let result = self.consume_namespaced_identifier_spilled().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.source.next() == Some(b'{');
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 

	
 
    fn consume_struct_literal_expression(&mut self, h: &mut Heap) -> Result<LiteralExpressionId, ParseError> {
 
        // Consume identifier and polymorphic arguments
 
        debug_log!("consume_struct_literal_expression: {}", debug_line!(self.source));
 
        let position = self.source.pos();
 
        let identifier = self.consume_namespaced_identifier(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume fields
 
        let fields = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected the end of the list of struct fields",
 
            |lexer, heap| {
 
                let identifier = lexer.consume_identifier()?;
 
                lexer.consume_whitespace(false)?;
 
                lexer.consume_string(b":")?;
 
                lexer.consume_whitespace(false)?;
 
                let value = lexer.consume_expression(heap)?;
 

	
 
                Ok(LiteralStructField{ identifier, value, field_idx: 0 })
 
            }
 
        )? {
 
            Some(fields) => fields,
 
            None => return Err(ParseError::new_error(
 
                self.source, self.source.pos(),
 
                "A struct literal must be followed by its field values"
 
            ))
 
        };
 

	
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression{
 
            this,
 
            position,
 
            value: Literal::Struct(LiteralStruct{
 
                identifier,
 
                fields,
 
                poly_args2: Vec::new(),
 
                definition: None,
 
            }),
 
            parent: ExpressionParent::None,
 
            concrete_type: Default::default()
 
        }))
 
    }
 

	
 
    fn has_call_expression(&mut self) -> bool {
 
        // We need to prevent ambiguity with various operators (because we may
 
        // be specifying polymorphic variables) and variables.
 
        if self.has_builtin_keyword() {
 
            return true;
 
        }
 

	
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 

	
 
        if self.consume_namespaced_identifier_spilled().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.source.next() == Some(b'(') {
 
            // Seems like we have a function call or an enum literal
 
            result = true;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_call_expression(&mut self, h: &mut Heap) -> Result<CallExpressionId, ParseError> {
 
        let position = self.source.pos();
 

	
 
        // Consume method identifier
 
        // TODO: @token Replace this conditional polymorphic arg parsing once we have a tokenizer.
 
        debug_log!("consume_call_expression: {}", debug_line!(self.source));
 
        let method;
 
        let mut consume_poly_args_explicitly = true;
 
        if self.has_keyword(b"get") {
 
            self.consume_keyword(b"get")?;
 
            method = Method::Get;
 
        } else if self.has_keyword(b"put") {
 
            self.consume_keyword(b"put")?;
 
            method = Method::Put;
 
        } else if self.has_keyword(b"fires") {
 
            self.consume_keyword(b"fires")?;
 
            method = Method::Fires;
 
        } else if self.has_keyword(b"create") {
 
            self.consume_keyword(b"create")?;
 
            method = Method::Create;
 
        } else {
 
            let identifier = self.consume_namespaced_identifier(h)?;
 
            method = Method::Symbolic(MethodSymbolic{
 
                identifier,
 
                definition: None
 
            });
 
            consume_poly_args_explicitly = false;
 
        };
 

	
 
        // Consume polymorphic arguments
 
        let poly_args = if consume_poly_args_explicitly {
 
            self.consume_whitespace(false)?;
 
            self.consume_polymorphic_args(h, true)?.unwrap_or_default()
 
        } else {
 
            Vec::new()
 
        };
 

	
 
        // Consume arguments to call
 
        self.consume_whitespace(false)?;
 
        let mut arguments = Vec::new();
 
        self.consume_string(b"(")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b")") {
 
            // TODO: allow trailing comma
 
            while self.source.next().is_some() {
 
                arguments.push(self.consume_expression(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b")") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?
 
            }
 
        }
 
        self.consume_string(b")")?;
 
        Ok(h.alloc_call_expression(|this| CallExpression {
 
            this,
 
            position,
 
            method,
 
            arguments,
 
            poly_args,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn consume_variable_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<VariableExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        debug_log!("consume_variable_expression: {}", debug_line!(self.source));
 

	
 
        // TODO: @token Reimplement when tokenizer is implemented, prevent ambiguities
 
        let identifier = identifier_as_namespaced(self.consume_identifier()?);
 

	
 
        Ok(h.alloc_variable_expression(|this| VariableExpression {
 
            this,
 
            position,
 
            identifier,
 
            declaration: None,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 

	
 
    // ====================
 
    // Statements
 
    // ====================
 

	
 
    /// Consumes any kind of statement from the source and will error if it
 
    /// did not encounter a statement. Will also return an error if the
 
    /// statement is nested too deeply.
 
    ///
 
    /// `wrap_in_block` may be set to true to ensure that the parsed statement
 
    /// will be wrapped in a block statement if it is not already a block
 
    /// statement. This is used to ensure that all `if`, `while` and `sync`
 
    /// statements have a block statement as body.
 
    fn consume_statement(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError> {
 
        if self.level >= MAX_LEVEL {
 
            return Err(self.error_at_pos("Too deeply nested statement"));
 
        }
 
        self.level += 1;
 
        let result = self.consume_statement_impl(h, wrap_in_block);
 
        self.level -= 1;
 
        result
 
    }
 
    fn has_label(&mut self) -> bool {
 
        // To prevent ambiguity with expression statements consisting only of an
 
        // identifier or a namespaced identifier, we look ahead and match on the
 
        // *single* colon that signals a labeled statement.
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 
        if self.consume_identifier_spilled().is_ok() {
 
            // next character is ':', second character is NOT ':'
 
            result = Some(b':') == self.source.next() && Some(b':') != self.source.lookahead(1)
 
        }
 
        self.source.seek(backup_pos);
 
@@ -2045,386 +2082,390 @@ impl Lexer<'_> {
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<SynchronousStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"synchronous")?;
 
        self.consume_whitespace(false)?;
 
        // TODO: What was the purpose of this? Seems superfluous and confusing?
 
        // let mut parameters = Vec::new();
 
        // if self.has_string(b"(") {
 
        //     self.consume_parameters(h, &mut parameters)?;
 
        //     self.consume_whitespace(false)?;
 
        // } else if !self.has_keyword(b"skip") && !self.has_string(b"{") {
 
        //     return Err(self.error_at_pos("Expected block statement"));
 
        // }
 
        let body = self.consume_statement(h, true)?;
 
        Ok(h.alloc_synchronous_statement(|this| SynchronousStatement {
 
            this,
 
            position,
 
            body,
 
            end_sync: None,
 
            parent_scope: None,
 
        }))
 
    }
 
    fn consume_return_statement(&mut self, h: &mut Heap) -> Result<ReturnStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"return")?;
 
        self.consume_whitespace(false)?;
 
        let expression = if self.has_string(b"(") {
 
            self.consume_paren_expression(h)
 
        } else {
 
            self.consume_expression(h)
 
        }?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_return_statement(|this| ReturnStatement { this, position, expression }))
 
    }
 
    fn consume_assert_statement(&mut self, h: &mut Heap) -> Result<AssertStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"assert")?;
 
        self.consume_whitespace(false)?;
 
        let expression = if self.has_string(b"(") {
 
            self.consume_paren_expression(h)
 
        } else {
 
            self.consume_expression(h)
 
        }?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_assert_statement(|this| AssertStatement {
 
            this,
 
            position,
 
            expression,
 
            next: None,
 
        }))
 
    }
 
    fn consume_goto_statement(&mut self, h: &mut Heap) -> Result<GotoStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"goto")?;
 
        self.consume_whitespace(false)?;
 
        let label = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_goto_statement(|this| GotoStatement { this, position, label, target: None }))
 
    }
 
    fn consume_new_statement(&mut self, h: &mut Heap) -> Result<NewStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"new")?;
 
        self.consume_whitespace(false)?;
 
        let expression = self.consume_call_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_new_statement(|this| NewStatement { this, position, expression, next: None }))
 
    }
 
    fn consume_expression_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ExpressionStatementId, ParseError> {
 
        let position = self.source.pos();
 
        let expression = self.consume_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_expression_statement(|this| ExpressionStatement {
 
            this,
 
            position,
 
            expression,
 
            next: None,
 
        }))
 
    }
 

	
 
    // ====================
 
    // Symbol definitions
 
    // ====================
 

	
 
    fn has_symbol_definition(&self) -> bool {
 
        self.has_keyword(b"composite")
 
            || self.has_keyword(b"primitive")
 
            || self.has_type_keyword()
 
            || self.has_identifier()
 
    }
 
    fn consume_symbol_definition(&mut self, h: &mut Heap) -> Result<DefinitionId, ParseError> {
 
        if self.has_keyword(b"struct") {
 
            Ok(self.consume_struct_definition(h)?.upcast())
 
        } else if self.has_keyword(b"enum") {
 
            Ok(self.consume_enum_definition(h)?.upcast())
 
        } else if self.has_keyword(b"composite") || self.has_keyword(b"primitive") {
 
            Ok(self.consume_component_definition(h)?.upcast())
 
        } else {
 
            Ok(self.consume_function_definition(h)?.upcast())
 
        }
 
    }
 
    fn consume_struct_definition(&mut self, h: &mut Heap) -> Result<StructId, ParseError> {
 
        // Parse "struct" keyword, optional polyvars and its identifier
 
        let struct_pos = self.source.pos();
 
        self.consume_keyword(b"struct")?;
 
        self.consume_whitespace(true)?;
 
        let struct_ident = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Parse struct fields
 
        let fields = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected the end of the list of struct fields",
 
            |lexer, heap| {
 
                let position = lexer.source.pos();
 
                let parser_type = lexer.consume_type(heap, false)?;
 
                lexer.consume_whitespace(true)?;
 
                let field = lexer.consume_identifier()?;
 

	
 
                Ok(StructFieldDefinition{ position, field, parser_type })
 
            }
 
        )? {
 
            Some(fields) => fields,
 
            None => return Err(ParseError::new_error(
 
                self.source, struct_pos,
 
                "An struct definition must be followed by its fields"
 
            )),
 
        };
 

	
 
        // Valid struct definition
 
        Ok(h.alloc_struct_definition(|this| StructDefinition{
 
            this,
 
            position: struct_pos,
 
            identifier: struct_ident,
 
            poly_vars,
 
            fields,
 
        }))
 
    }
 
    fn consume_enum_definition(&mut self, h: &mut Heap) -> Result<EnumId, ParseError> {
 
        // Parse "enum" keyword, optional polyvars and its identifier
 
        let enum_pos = self.source.pos();
 
        self.consume_keyword(b"enum")?;
 
        self.consume_whitespace(true)?;
 
        let enum_ident = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        let variants = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected end of enum variant list",
 
            |lexer, heap| {
 
                // Variant identifier
 
                let position = lexer.source.pos();
 
                let identifier = lexer.consume_identifier()?;
 
                lexer.consume_whitespace(false)?;
 

	
 
                // Optional variant value/type
 
                let next = lexer.source.next();
 
                let value = match next {
 
                    Some(b',') => {
 
                        // Do not consume, let `consume_comma_separated` handle
 
                        // the next item
 
                        EnumVariantValue::None
 
                    },
 
                    Some(b'=') => {
 
                        // Integer value
 
                        lexer.source.consume();
 
                        lexer.consume_whitespace(false)?;
 
                        if !lexer.has_integer() {
 
                            return Err(lexer.error_at_pos("expected integer"))
 
                        }
 
                        let value = lexer.consume_integer()?;
 
                        EnumVariantValue::Integer(value)
 
                    },
 
                    Some(b'(') => {
 
                        // Embedded type
 
                        lexer.source.consume();
 
                        lexer.consume_whitespace(false)?;
 
                        let embedded_type = lexer.consume_type(heap, false)?;
 
                        lexer.consume_whitespace(false)?;
 
                        lexer.consume_string(b")")?;
 
                        EnumVariantValue::Type(embedded_type)
 
                    },
 
                    Some(b'}') => {
 
                        // End of enum
 
                        EnumVariantValue::None
 
                    }
 
                    _ => {
 
                        return Err(lexer.error_at_pos("Expected ',', '=', or '('"));
 
                        return Err(lexer.error_at_pos("Expected ',', '=', '}' or '('"));
 
                    }
 
                };
 

	
 
                Ok(EnumVariantDefinition{ position, identifier, value })
 
            }
 
        )? {
 
            Some(variants) => variants,
 
            None => return Err(ParseError::new_error(
 
                self.source, enum_pos,
 
                "An enum definition must be followed by its variants"
 
            )),
 
        };
 

	
 
        Ok(h.alloc_enum_definition(|this| EnumDefinition{
 
            this,
 
            position: enum_pos,
 
            identifier: enum_ident,
 
            poly_vars,
 
            variants,
 
        }))
 
    }
 
    fn consume_component_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // TODO: Cleanup
 
        if self.has_keyword(b"composite") {
 
            Ok(self.consume_composite_definition(h)?)
 
        } else {
 
            Ok(self.consume_primitive_definition(h)?)
 
        }
 
    }
 
    fn consume_composite_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // Parse keyword, optional polyvars and the identifier
 
        let position = self.source.pos();
 
        self.consume_keyword(b"composite")?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Parse body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_component(|this| Component { 
 
            this,
 
            variant: ComponentVariant::Composite,
 
            position,
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body
 
        }))
 
    }
 
    fn consume_primitive_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // Consume keyword, optional polyvars and identifier
 
        let position = self.source.pos();
 
        self.consume_keyword(b"primitive")?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_component(|this| Component { 
 
            this,
 
            variant: ComponentVariant::Primitive,
 
            position,
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body
 
        }))
 
    }
 
    fn consume_function_definition(&mut self, h: &mut Heap) -> Result<FunctionId, ParseError> {
 
        // Consume return type, optional polyvars and identifier
 
        let position = self.source.pos();
 
        let return_type = self.consume_type(h, false)?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_function(|this| Function {
 
            this,
 
            position,
 
            return_type,
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body,
 
        }))
 
    }
 
    fn has_pragma(&self) -> bool {
 
        if let Some(c) = self.source.next() {
 
            c == b'#'
 
        } else {
 
            false
 
        }
 
    }
 
    fn consume_pragma(&mut self, h: &mut Heap) -> Result<PragmaId, ParseError> {
 
        let position = self.source.pos();
 
        let next = self.source.next();
 
        if next != Some(b'#') {
 
            return Err(self.error_at_pos("Expected pragma"));
 
        }
 
        self.source.consume();
 
        if !is_vchar(self.source.next()) {
 
            return Err(self.error_at_pos("Expected pragma"));
 
        }
 
        if self.has_string(b"version") {
 
            self.consume_string(b"version")?;
 
            self.consume_whitespace(true)?;
 
            if !self.has_integer() {
 
                return Err(self.error_at_pos("Expected integer constant"));
 
            }
 
            let version = self.consume_integer()?;
 
            debug_assert!(version >= 0);
 
            return Ok(h.alloc_pragma(|this| Pragma::Version(PragmaVersion{
 
                this, position, version: version as u64
 
            })))
 
        } else if self.has_string(b"module") {
 
            self.consume_string(b"module")?;
 
            self.consume_whitespace(true)?;
 
            if !self.has_identifier() {
 
                return Err(self.error_at_pos("Expected identifier"));
 
            }
 
            let mut value = Vec::new();
 
            let mut ident = self.consume_ident()?;
 
            value.append(&mut ident);
 
            while self.has_string(b".") {
 
                self.consume_string(b".")?;
 
                value.push(b'.');
 
                ident = self.consume_ident()?;
 
                value.append(&mut ident);
 
            }
 
            return Ok(h.alloc_pragma(|this| Pragma::Module(PragmaModule{
 
                this, position, value
 
            })));
 
        } else {
 
            return Err(self.error_at_pos("Unknown pragma"));
 
        }
 
    }
 

	
 
    fn has_import(&self) -> bool {
 
        self.has_keyword(b"import")
 
    }
 
    fn consume_import(&mut self, h: &mut Heap) -> Result<ImportId, ParseError> {
 
        // Parse the word "import" and the name of the module
 
        let position = self.source.pos();
 
        self.consume_keyword(b"import")?;
 
        self.consume_whitespace(true)?;
 
        let mut value = Vec::new();
 
        let mut last_ident_pos = self.source.pos();
 
        let mut ident = self.consume_ident()?;
 
        value.append(&mut ident);
 
        let mut last_ident_start = 0;
 

	
 
        while self.has_string(b".") {
 
            self.consume_string(b".")?;
 
            value.push(b'.');
 
            last_ident_pos = self.source.pos();
 
            ident = self.consume_ident()?;
 
            last_ident_start = value.len();
 
            value.append(&mut ident);
 
        }
 

	
 

	
 
        self.consume_whitespace(false)?;
 

	
 
        // Check for the potential aliasing or specific module imports
 
        let import = if self.has_string(b"as") {
 
            self.consume_string(b"as")?;
 
            self.consume_whitespace(true)?;
 
            let alias = self.consume_identifier()?;
 

	
 
            h.alloc_import(|this| Import::Module(ImportModule{
 
                this,
src/protocol/parser/mod.rs
Show inline comments
 
@@ -45,220 +45,220 @@ impl Parser {
 
            symbol_table: SymbolTable::new(),
 
            type_table: TypeTable::new(),
 
        }
 
    }
 

	
 
    pub fn feed(&mut self, mut source: InputSource) -> Result<RootId, ParseError> {
 
        // Lex the input source
 
        let mut lex = Lexer::new(&mut source);
 
        let pd = lex.consume_protocol_description(&mut self.heap)?;
 

	
 
        // Seek the module name and version
 
        let root = &self.heap[pd];
 
        let mut module_name_pos = InputPosition::default();
 
        let mut module_name = Vec::new();
 
        let mut module_version_pos = InputPosition::default();
 
        let mut module_version = None;
 

	
 
        for pragma in &root.pragmas {
 
            match &self.heap[*pragma] {
 
                Pragma::Module(module) => {
 
                    if !module_name.is_empty() {
 
                        return Err(
 
                            ParseError::new_error(&source, module.position, "Double definition of module name in the same file")
 
                                .with_postfixed_info(&source, module_name_pos, "Previous definition was here")
 
                        )
 
                    }
 

	
 
                    module_name_pos = module.position.clone();
 
                    module_name = module.value.clone();
 
                },
 
                Pragma::Version(version) => {
 
                    if module_version.is_some() {
 
                        return Err(
 
                            ParseError::new_error(&source, version.position, "Double definition of module version")
 
                                .with_postfixed_info(&source, module_version_pos, "Previous definition was here")
 
                        )
 
                    }
 

	
 
                    module_version_pos = version.position.clone();
 
                    module_version = Some(version.version);
 
                },
 
            }
 
        }
 

	
 
        // Add module to list of modules and prevent naming conflicts
 
        let cur_module_idx = self.modules.len();
 
        if let Some(prev_module_idx) = self.module_lookup.get(&module_name) {
 
            // Find `#module` statement in other module again
 
            let prev_module = &self.modules[*prev_module_idx];
 
            let prev_module_pos = self.heap[prev_module.root_id].pragmas
 
                .iter()
 
                .find_map(|p| {
 
                    match &self.heap[*p] {
 
                        Pragma::Module(module) => Some(module.position.clone()),
 
                        _ => None
 
                    }
 
                })
 
                .unwrap_or(InputPosition::default());
 

	
 
            let module_name_msg = if module_name.is_empty() {
 
                format!("a nameless module")
 
            } else {
 
                format!("module '{}'", String::from_utf8_lossy(&module_name))
 
            };
 

	
 
            return Err(
 
                ParseError::new_error(&source, module_name_pos, &format!("Double definition of {} across files", module_name_msg))
 
                    .with_postfixed_info(&prev_module.source, prev_module_pos, "Other definition was here")
 
            );
 
        }
 

	
 
        self.modules.push(LexedModule{
 
            source,
 
            module_name: module_name.clone(),
 
            version: module_version,
 
            root_id: pd
 
        });
 
        self.module_lookup.insert(module_name, cur_module_idx);
 
        Ok(pd)
 
    }
 

	
 
    fn resolve_symbols_and_types(&mut self) -> Result<(), ParseError> {
 
        // Construct the symbol table to resolve any imports and/or definitions,
 
        // then use the symbol table to actually annotate all of the imports.
 
        // If the type table is constructed correctly then all imports MUST be
 
        // resolvable.
 
        self.symbol_table.build(&self.heap, &self.modules)?;
 

	
 
        // Not pretty, but we need to work around rust's borrowing rules, it is
 
        // totally safe to mutate the contents of an AST element that we are
 
        // not borrowing anywhere else.
 
        let mut module_index = 0;
 
        let mut import_index = 0;
 
        loop {
 
            if module_index >= self.modules.len() {
 
                break;
 
            }
 

	
 
            let module_root_id = self.modules[module_index].root_id;
 
            let import_id = {
 
                let root = &self.heap[module_root_id];
 
                if import_index >= root.imports.len() {
 
                    module_index += 1;
 
                    import_index = 0;
 
                    continue
 
                }
 
                root.imports[import_index]
 
            };
 

	
 
            let import = &mut self.heap[import_id];
 
            match import {
 
                Import::Module(import) => {
 
                    debug_assert!(import.module_id.is_none(), "module import already resolved");
 
                    let target_module_id = self.symbol_table.resolve_module(&import.module_name)
 
                        .expect("module import is resolved by symbol table");
 
                    import.module_id = Some(target_module_id)
 
                },
 
                Import::Symbols(import) => {
 
                    debug_assert!(import.module_id.is_none(), "module of symbol import already resolved");
 
                    let target_module_id = self.symbol_table.resolve_module(&import.module_name)
 
                        .expect("symbol import's module is resolved by symbol table");
 
                    import.module_id = Some(target_module_id);
 

	
 
                    for symbol in &mut import.symbols {
 
                        debug_assert!(symbol.definition_id.is_none(), "symbol import already resolved");
 
                        let (_, target_definition_id) = self.symbol_table.resolve_identifier(module_root_id, &symbol.alias)
 
                            .expect("symbol import is resolved by symbol table")
 
                            .as_definition()
 
                            .expect("symbol import does not resolve to namespace symbol");
 
                        symbol.definition_id = Some(target_definition_id);
 
                    }
 
                }
 
            }
 

	
 
            import_index += 1;
 
        }
 

	
 
        // All imports in the AST are now annotated. We now use the symbol table
 
        // to construct the type table.
 
        let mut type_ctx = TypeCtx::new(&self.symbol_table, &mut self.heap, &self.modules);
 
        self.type_table.build_base_types(&mut type_ctx)?;
 

	
 
        Ok(())
 
    }
 

	
 
    pub fn parse(&mut self) -> Result<(), ParseError> {
 
        self.resolve_symbols_and_types()?;
 

	
 
        // Validate and link all modules
 
        let mut visit = ValidityAndLinkerVisitor::new();
 
        for module in &self.modules {
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            visit.visit_module(&mut ctx)?;
 
        }
 

	
 
        // Perform typechecking on all modules
 
        let mut visit = TypeResolvingVisitor::new();
 
        let mut queue = ResolveQueue::new();
 
        for module in &self.modules {
 
            let ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            TypeResolvingVisitor::queue_module_definitions(&ctx, &mut queue);   
 
        };
 
        while !queue.is_empty() {
 
            let top = queue.pop().unwrap();
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module: &self.modules[top.root_id.index as usize],
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            visit.handle_module_definition(&mut ctx, &mut queue, top)?;
 
        }
 

	
 
        // Perform remaining steps
 
        // TODO: Phase out at some point
 
        for module in &self.modules {
 
            let root_id = module.root_id;
 
            if let Err((position, message)) = Self::parse_inner(&mut self.heap, root_id) {
 
                return Err(ParseError::new_error(&self.modules[0].source, position, &message))
 
            }
 
        }
 

	
 
        // let mut writer = ASTWriter::new();
 
        // let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap();
 
        // writer.write_ast(&mut file, &self.heap);
 
        let mut writer = ASTWriter::new();
 
        let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap();
 
        writer.write_ast(&mut file, &self.heap);
 

	
 
        Ok(())
 
    }
 

	
 
    pub fn parse_inner(h: &mut Heap, pd: RootId) -> VisitorResult {
 
        // TODO: @cleanup, slowly phasing out old compiler
 
        // NestedSynchronousStatements::new().visit_protocol_description(h, pd)?;
 
        // ChannelStatementOccurrences::new().visit_protocol_description(h, pd)?;
 
        // FunctionStatementReturns::new().visit_protocol_description(h, pd)?;
 
        // ComponentStatementReturnNew::new().visit_protocol_description(h, pd)?;
 
        // CheckBuiltinOccurrences::new().visit_protocol_description(h, pd)?;
 
        // BuildSymbolDeclarations::new().visit_protocol_description(h, pd)?;
 
        // LinkCallExpressions::new().visit_protocol_description(h, pd)?;
 
        // BuildScope::new().visit_protocol_description(h, pd)?;
 
        // ResolveVariables::new().visit_protocol_description(h, pd)?;
 
        LinkStatements::new().visit_protocol_description(h, pd)?;
 
        // BuildLabels::new().visit_protocol_description(h, pd)?;
 
        // ResolveLabels::new().visit_protocol_description(h, pd)?;
 
        AssignableExpressions::new().visit_protocol_description(h, pd)?;
 
        IndexableExpressions::new().visit_protocol_description(h, pd)?;
 
        SelectableExpressions::new().visit_protocol_description(h, pd)?;
 

	
 
        Ok(())
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/type_resolver.rs
Show inline comments
 
@@ -56,386 +56,386 @@ macro_rules! debug_log {
 

	
 
use std::collections::{HashMap, HashSet, VecDeque};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::parser::type_table::*;
 
use super::visitor::{
 
    STMT_BUFFER_INIT_CAPACITY,
 
    EXPR_BUFFER_INIT_CAPACITY,
 
    Ctx,
 
    Visitor2,
 
    VisitorResult
 
};
 
use std::collections::hash_map::Entry;
 

	
 
const MESSAGE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Message, InferenceTypePart::Byte ];
 
const BOOL_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Bool ];
 
const NUMBERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::NumberLike ];
 
const INTEGERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::IntegerLike ];
 
const ARRAY_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Array, InferenceTypePart::Unknown ];
 
const ARRAYLIKE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::ArrayLike, InferenceTypePart::Unknown ];
 

	
 
/// TODO: @performance Turn into PartialOrd+Ord to simplify checks
 
/// TODO: @types Remove the Message -> Byte hack at some point...
 
#[derive(Debug, Clone, Eq, PartialEq)]
 
pub(crate) enum InferenceTypePart {
 
    // A marker with an identifier which we can use to retrieve the type subtree
 
    // that follows the marker. This is used to perform type inference on
 
    // polymorphs: an expression may determine the polymorphs type, after we
 
    // need to apply that information to all other places where the polymorph is
 
    // used.
 
    MarkerDefinition(usize), // marker for polymorph types on a procedure's definition
 
    MarkerBody(usize), // marker for polymorph types within a procedure body
 
    // Completely unknown type, needs to be inferred
 
    Unknown,
 
    // Partially known type, may be inferred to to be the appropriate related 
 
    // type.
 
    // IndexLike,      // index into array/slice
 
    NumberLike,     // any kind of integer/float
 
    IntegerLike,    // any kind of integer
 
    ArrayLike,      // array or slice. Note that this must have a subtype
 
    PortLike,       // input or output port
 
    // Special types that cannot be instantiated by the user
 
    Void, // For builtin functions that do not return anything
 
    // Concrete types without subtypes
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    // One subtype
 
    Message,
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // A user-defined type with any number of subtypes
 
    Instance(DefinitionId, usize)
 
}
 

	
 
impl InferenceTypePart {
 
    fn is_marker(&self) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        match self {
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    /// Checks if the type is concrete, markers are interpreted as concrete
 
    /// types.
 
    fn is_concrete(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | 
 
            ITP::ArrayLike | ITP::PortLike => false,
 
            _ => true
 
        }
 
    }
 

	
 
    fn is_concrete_number(&self) -> bool {
 
        // TODO: @float
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_integer(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_msg_array_or_slice(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Array | ITP::Slice | ITP::Message => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_port(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Input | ITP::Output => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    /// Checks if a part is less specific than the argument. Only checks for 
 
    /// single-part inference (i.e. not the replacement of an `Unknown` variant 
 
    /// with the argument)
 
    fn may_be_inferred_from(&self, arg: &InferenceTypePart) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        (*self == ITP::IntegerLike && arg.is_concrete_integer()) ||
 
        (*self == ITP::NumberLike && (arg.is_concrete_number() || *arg == ITP::IntegerLike)) ||
 
        (*self == ITP::ArrayLike && arg.is_concrete_msg_array_or_slice()) ||
 
        (*self == ITP::PortLike && arg.is_concrete_port())
 
    }
 

	
 
    /// Returns the change in "iteration depth" when traversing this particular
 
    /// part. The iteration depth is used to traverse the tree in a linear 
 
    /// fashion. It is basically `number_of_subtypes - 1`
 
    fn depth_change(&self) -> i32 {
 
        use InferenceTypePart as ITP;
 
        match &self {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike |
 
            ITP::Void | ITP::Bool |
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long | 
 
            ITP::String => {
 
                -1
 
            },
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) |
 
            ITP::ArrayLike | ITP::Message | ITP::Array | ITP::Slice |
 
            ITP::PortLike | ITP::Input | ITP::Output => {
 
                // One subtype, so do not modify depth
 
                0
 
            },
 
            ITP::Instance(_, num_args) => {
 
                (*num_args as i32) - 1
 
            }
 
        }
 
    }
 
}
 

	
 
impl From<ConcreteTypePart> for InferenceTypePart {
 
    fn from(v: ConcreteTypePart) -> InferenceTypePart {
 
        use ConcreteTypePart as CTP;
 
        use InferenceTypePart as ITP;
 

	
 
        match v {
 
            CTP::Marker(_) => {
 
                unreachable!("encountered marker while converting concrete type to inferred type");
 
            }
 
            CTP::Void => ITP::Void,
 
            CTP::Message => ITP::Message,
 
            CTP::Bool => ITP::Bool,
 
            CTP::Byte => ITP::Byte,
 
            CTP::Short => ITP::Short,
 
            CTP::Int => ITP::Int,
 
            CTP::Long => ITP::Long,
 
            CTP::String => ITP::String,
 
            CTP::Array => ITP::Array,
 
            CTP::Slice => ITP::Slice,
 
            CTP::Input => ITP::Input,
 
            CTP::Output => ITP::Output,
 
            CTP::Instance(id, num) => ITP::Instance(id, num),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug)]
 
struct InferenceType {
 
    has_body_marker: bool,
 
    is_done: bool,
 
    parts: Vec<InferenceTypePart>,
 
}
 

	
 
impl InferenceType {
 
    /// Generates a new InferenceType. The two boolean flags will be checked in
 
    /// debug mode.
 
    fn new(has_body_marker: bool, is_done: bool, parts: Vec<InferenceTypePart>) -> Self {
 
        if cfg!(debug_assertions) {
 
            debug_assert!(!parts.is_empty());
 
            if !has_body_marker {
 
                debug_assert!(parts.iter().all(|v| {
 
                    if let InferenceTypePart::MarkerBody(_) = v { false } else { true }
 
                debug_assert!(!parts.iter().any(|v| {
 
                    if let InferenceTypePart::MarkerBody(_) = v { true } else { false }
 
                }));
 
            }
 
            if is_done {
 
                debug_assert!(parts.iter().all(|v| v.is_concrete()));
 
            }
 
        }
 
        Self{ has_body_marker: has_body_marker, is_done, parts }
 
    }
 

	
 
    /// Replaces a type subtree with the provided subtree. The caller must make
 
    /// sure the the replacement is a well formed type subtree.
 
    fn replace_subtree(&mut self, start_idx: usize, with: &[InferenceTypePart]) {
 
        let end_idx = Self::find_subtree_end_idx(&self.parts, start_idx);
 
        debug_assert_eq!(with.len(), Self::find_subtree_end_idx(with, 0));
 
        self.parts.splice(start_idx..end_idx, with.iter().cloned());
 
        self.recompute_is_done();
 
    }
 

	
 
    // TODO: @performance, might all be done inline in the type inference methods
 
    fn recompute_is_done(&mut self) {
 
        self.is_done = self.parts.iter().all(|v| v.is_concrete());
 
    }
 

	
 
    /// Seeks a body marker starting at the specified position. If a marker is
 
    /// found then its value and the index of the type subtree that follows it
 
    /// is returned.
 
    fn find_body_marker(&self, mut start_idx: usize) -> Option<(usize, usize)> {
 
        while start_idx < self.parts.len() {
 
            if let InferenceTypePart::MarkerBody(marker) = &self.parts[start_idx] {
 
                return Some((*marker, start_idx + 1))
 
            }
 

	
 
            start_idx += 1;
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Returns an iterator over all body markers and the partial type tree that
 
    /// follows those markers. If it is a problem that `InferenceType` is 
 
    /// borrowed by the iterator, then use `find_body_marker`.
 
    fn body_marker_iter(&self) -> InferenceTypeMarkerIter {
 
        InferenceTypeMarkerIter::new(&self.parts)
 
    }
 

	
 
    /// Given that the `parts` are a depth-first serialized tree of types, this
 
    /// function finds the subtree anchored at a specific node. The returned 
 
    /// index is exclusive.
 
    fn find_subtree_end_idx(parts: &[InferenceTypePart], start_idx: usize) -> usize {
 
        let mut depth = 1;
 
        let mut idx = start_idx;
 

	
 
        while idx < parts.len() {
 
            depth += parts[idx].depth_change();
 
            if depth == 0 {
 
                return idx + 1;
 
            }
 
            idx += 1;
 
        }
 

	
 
        // If here, then the inference type is malformed
 
        unreachable!();
 
    }
 

	
 
    /// Call that attempts to infer the part at `to_infer.parts[to_infer_idx]` 
 
    /// using the subtree at `template.parts[template_idx]`. Will return 
 
    /// `Some(depth_change_due_to_traversal)` if type inference has been 
 
    /// applied. In this case the indices will also be modified to point to the 
 
    /// next part in both templates. If type inference has not (or: could not) 
 
    /// be applied then `None` will be returned. Note that this might mean that 
 
    /// the types are incompatible.
 
    ///
 
    /// As this is a helper functions, some assumptions: the parts are not 
 
    /// exactly equal, and neither of them contains a marker. Also: only the
 
    /// `to_infer` parts are checked for inference. It might be that this 
 
    /// function returns `None`, but that that `template` is still compatible
 
    /// with `to_infer`, e.g. when `template` has an `Unknown` part.
 
    fn infer_part_for_single_type(
 
        to_infer: &mut InferenceType, to_infer_idx: &mut usize,
 
        template_parts: &[InferenceTypePart], template_idx: &mut usize,
 
    ) -> Option<i32> {
 
        use InferenceTypePart as ITP;
 

	
 
        let to_infer_part = &to_infer.parts[*to_infer_idx];
 
        let template_part = &template_parts[*template_idx];
 

	
 
        // TODO: Maybe do this differently?
 
        let mut template_definition_marker = None;
 
        if *template_idx > 0 {
 
            if let ITP::MarkerDefinition(marker) = &template_parts[*template_idx - 1] {
 
                template_definition_marker = Some(*marker)
 
            }
 
        }
 

	
 
        // Check for programmer mistakes
 
        debug_assert_ne!(to_infer_part, template_part);
 
        debug_assert!(!to_infer_part.is_marker(), "marker encountered in 'infer part'");
 
        debug_assert!(!template_part.is_marker(), "marker encountered in 'template part'");
 

	
 
        // Inference of a somewhat-specified type
 
        if to_infer_part.may_be_inferred_from(template_part) {
 
            let depth_change = to_infer_part.depth_change();
 
            debug_assert_eq!(depth_change, template_part.depth_change());
 

	
 
            if let Some(marker) = template_definition_marker {
 
                to_infer.parts.insert(*to_infer_idx, ITP::MarkerDefinition(marker));
 
                *to_infer_idx += 1;
 
            }
 

	
 
            to_infer.parts[*to_infer_idx] = template_part.clone();
 

	
 
            *to_infer_idx += 1;
 
            *template_idx += 1;
 
            return Some(depth_change);
 
        }
 

	
 
        // Inference of a completely unknown type
 
        if *to_infer_part == ITP::Unknown {
 
            // template part is different, so cannot be unknown, hence copy the
 
            // entire subtree. Make sure to copy definition markers, but not to
 
            // transfer polymorph markers.
 
            let template_end_idx = Self::find_subtree_end_idx(template_parts, *template_idx);
 
            let template_start_idx = if let Some(marker) = template_definition_marker {
 
                to_infer.parts[*to_infer_idx] = ITP::MarkerDefinition(marker);
 
                *template_idx
 
            } else {
 
                to_infer.parts[*to_infer_idx] = template_parts[*template_idx].clone();
 
                *template_idx + 1
 
            };
 
            *to_infer_idx += 1;
 

	
 
            for template_idx in template_start_idx..template_end_idx {
 
                let template_part = &template_parts[template_idx];
 
                if let ITP::MarkerBody(_) = template_part {
 
                    // Do not copy this one
 
                } else {
 
                    to_infer.parts.insert(*to_infer_idx, template_part.clone());
 
                    *to_infer_idx += 1;
 
                }
 
            }
 
            *template_idx = template_end_idx;
 

	
 
            // Note: by definition the LHS was Unknown and the RHS traversed a 
 
            // full subtree.
 
            return Some(-1);
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Call that checks if the `to_check` part is compatible with the `infer`
 
    /// part. This is essentially a copy of `infer_part_for_single_type`, but
 
    /// without actually copying the type parts.
 
    fn check_part_for_single_type(
 
        to_check_parts: &[InferenceTypePart], to_check_idx: &mut usize,
 
        template_parts: &[InferenceTypePart], template_idx: &mut usize
 
    ) -> Option<i32> {
 
        use InferenceTypePart as ITP;
 

	
 
        let to_check_part = &to_check_parts[*to_check_idx];
 
        let template_part = &template_parts[*template_idx];
 

	
 
        // Checking programmer errors
 
        debug_assert_ne!(to_check_part, template_part);
 
        debug_assert!(!to_check_part.is_marker(), "marker encountered in 'to_check part'");
 
        debug_assert!(!template_part.is_marker(), "marker encountered in 'template part'");
 

	
 
        if to_check_part.may_be_inferred_from(template_part) {
 
            let depth_change = to_check_part.depth_change();
 
            debug_assert_eq!(depth_change, template_part.depth_change());
 
            *to_check_idx += 1;
 
            *template_idx += 1;
 
            return Some(depth_change);
 
        }
 

	
 
        if *to_check_part == ITP::Unknown {
 
            *to_check_idx += 1;
 
            *template_idx = Self::find_subtree_end_idx(template_parts, *template_idx);
 

	
 
            // By definition LHS and RHS had depth change of -1
 
            return Some(-1);
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Attempts to infer types between two `InferenceType` instances. This 
 
    /// function is unsafe as it accepts pointers to work around Rust's 
 
    /// borrowing rules. The caller must ensure that the pointers are distinct.
 
    unsafe fn infer_subtrees_for_both_types(
 
        type_a: *mut InferenceType, start_idx_a: usize,
 
        type_b: *mut InferenceType, start_idx_b: usize
 
@@ -1035,570 +1035,580 @@ impl Visitor2 for TypeResolvingVisitor {
 
        let test_expr_id = if_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_stmt(ctx, true_body_id)?;
 
        self.visit_stmt(ctx, false_body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_while_stmt(&mut self, ctx: &mut Ctx, id: WhileStatementId) -> VisitorResult {
 
        let while_stmt = &ctx.heap[id];
 

	
 
        let body_id = while_stmt.body;
 
        let test_expr_id = while_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_stmt(ctx, body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_synchronous_stmt(&mut self, ctx: &mut Ctx, id: SynchronousStatementId) -> VisitorResult {
 
        let sync_stmt = &ctx.heap[id];
 
        let body_id = sync_stmt.body;
 

	
 
        self.visit_stmt(ctx, body_id)
 
    }
 

	
 
    fn visit_return_stmt(&mut self, ctx: &mut Ctx, id: ReturnStatementId) -> VisitorResult {
 
        let return_stmt = &ctx.heap[id];
 
        let expr_id = return_stmt.expression;
 

	
 
        self.visit_expr(ctx, expr_id)
 
    }
 

	
 
    fn visit_assert_stmt(&mut self, ctx: &mut Ctx, id: AssertStatementId) -> VisitorResult {
 
        let assert_stmt = &ctx.heap[id];
 
        let test_expr_id = assert_stmt.expression;
 

	
 
        self.visit_expr(ctx, test_expr_id)
 
    }
 

	
 
    fn visit_new_stmt(&mut self, ctx: &mut Ctx, id: NewStatementId) -> VisitorResult {
 
        let new_stmt = &ctx.heap[id];
 
        let call_expr_id = new_stmt.expression;
 

	
 
        self.visit_call_expr(ctx, call_expr_id)
 
    }
 

	
 
    fn visit_expr_stmt(&mut self, ctx: &mut Ctx, id: ExpressionStatementId) -> VisitorResult {
 
        let expr_stmt = &ctx.heap[id];
 
        let subexpr_id = expr_stmt.expression;
 

	
 
        self.visit_expr(ctx, subexpr_id)
 
    }
 

	
 
    // Expressions
 

	
 
    fn visit_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let assign_expr = &ctx.heap[id];
 
        let left_expr_id = assign_expr.left;
 
        let right_expr_id = assign_expr.right;
 

	
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.visit_expr(ctx, right_expr_id)?;
 

	
 
        self.progress_assignment_expr(ctx, id)
 
    }
 

	
 
    fn visit_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let conditional_expr = &ctx.heap[id];
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        self.expr_types.insert(test_expr_id, InferenceType::new(false, true, vec![InferenceTypePart::Bool]));
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_expr(ctx, true_expr_id)?;
 
        self.visit_expr(ctx, false_expr_id)?;
 

	
 
        self.progress_conditional_expr(ctx, id)
 
    }
 

	
 
    fn visit_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let binary_expr = &ctx.heap[id];
 
        let lhs_expr_id = binary_expr.left;
 
        let rhs_expr_id = binary_expr.right;
 

	
 
        self.visit_expr(ctx, lhs_expr_id)?;
 
        self.visit_expr(ctx, rhs_expr_id)?;
 

	
 
        self.progress_binary_expr(ctx, id)
 
    }
 

	
 
    fn visit_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let unary_expr = &ctx.heap[id];
 
        let arg_expr_id = unary_expr.expression;
 

	
 
        self.visit_expr(ctx, arg_expr_id)?;
 

	
 
        self.progress_unary_expr(ctx, id)
 
    }
 

	
 
    fn visit_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let indexing_expr = &ctx.heap[id];
 
        let subject_expr_id = indexing_expr.subject;
 
        let index_expr_id = indexing_expr.index;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.visit_expr(ctx, index_expr_id)?;
 

	
 
        self.progress_indexing_expr(ctx, id)
 
    }
 

	
 
    fn visit_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let slicing_expr = &ctx.heap[id];
 
        let subject_expr_id = slicing_expr.subject;
 
        let from_expr_id = slicing_expr.from_index;
 
        let to_expr_id = slicing_expr.to_index;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.visit_expr(ctx, from_expr_id)?;
 
        self.visit_expr(ctx, to_expr_id)?;
 

	
 
        self.progress_slicing_expr(ctx, id)
 
    }
 

	
 
    fn visit_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let select_expr = &ctx.heap[id];
 
        let subject_expr_id = select_expr.subject;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 

	
 
        self.progress_select_expr(ctx, id)
 
    }
 

	
 
    fn visit_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let array_expr = &ctx.heap[id];
 
        // TODO: @performance
 
        for element_id in array_expr.elements.clone().into_iter() {
 
            self.visit_expr(ctx, element_id)?;
 
        }
 

	
 
        self.progress_array_expr(ctx, id)
 
    }
 

	
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let literal_expr = &ctx.heap[id];
 
        match &literal_expr.value {
 
            Literal::Null | Literal::False | Literal::True |
 
            Literal::Integer(_) | Literal::Character(_) => {
 
                // No subexpressions
 
            },
 
            Literal::Struct(literal) => {
 
                // TODO: @performance
 
                let expr_ids: Vec<_> = literal.fields
 
                    .iter()
 
                    .map(|f| f.value)
 
                    .collect();
 

	
 
                self.insert_initial_struct_polymorph_data(ctx, id);
 

	
 
                for expr_id in expr_ids {
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 
            },
 
            Literal::Enum(_) => {
 
                // Enumerations do not carry any subexpressions, but may still
 
                // have a user-defined polymorphic marker variable. For this 
 
                // reason we may still have to apply inference to this 
 
                // polymorphic variable
 
                self.insert_initial_enum_polymorph_data(ctx, id);
 
            }
 
        }
 

	
 
        self.progress_literal_expr(ctx, id)
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        self.insert_initial_call_polymorph_data(ctx, id);
 

	
 
        // TODO: @performance
 
        let call_expr = &ctx.heap[id];
 
        for arg_expr_id in call_expr.arguments.clone() {
 
            self.visit_expr(ctx, arg_expr_id)?;
 
        }
 

	
 
        self.progress_call_expr(ctx, id)
 
    }
 

	
 
    fn visit_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let var_expr = &ctx.heap[id];
 
        debug_assert!(var_expr.declaration.is_some());
 
        let var_data = self.var_types.get_mut(var_expr.declaration.as_ref().unwrap()).unwrap();
 
        var_data.used_at.push(upcast_id);
 

	
 
        self.progress_variable_expr(ctx, id)
 
    }
 
}
 

	
 
macro_rules! debug_assert_expr_ids_unique_and_known {
 
    // Base case for a single expression ID
 
    ($resolver:ident, $id:ident) => {
 
        if cfg!(debug_assertions) {
 
            $resolver.expr_types.contains_key(&$id);
 
        }
 
    };
 
    // Base case for two expression IDs
 
    ($resolver:ident, $id1:ident, $id2:ident) => {
 
        debug_assert_ne!($id1, $id2);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2);
 
    };
 
    // Generic case
 
    ($resolver:ident, $id1:ident, $id2:ident, $($tail:ident),+) => {
 
        debug_assert_ne!($id1, $id2);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2, $($tail),+);
 
    };
 
}
 

	
 
macro_rules! debug_assert_ptrs_distinct {
 
    // Base case
 
    ($ptr1:ident, $ptr2:ident) => {
 
        debug_assert!(!std::ptr::eq($ptr1, $ptr2));
 
    };
 
    // Generic case
 
    ($ptr1:ident, $ptr2:ident, $($tail:ident),+) => {
 
        debug_assert_ptrs_distinct!($ptr1, $ptr2);
 
        debug_assert_ptrs_distinct!($ptr2, $($tail),+);
 
    };
 
}
 

	
 
impl TypeResolvingVisitor {
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError> {
 
        // Keep inferring until we can no longer make any progress
 
        while let Some(next_expr_id) = self.expr_queued.iter().next() {
 
            let next_expr_id = *next_expr_id;
 
            self.expr_queued.remove(&next_expr_id);
 
            self.progress_expr(ctx, next_expr_id)?;
 
        }
 

	
 
        // We check if we have all the types we need. If we're typechecking a 
 
        // polymorphic procedure more than once, then we have already annotated
 
        // the AST and have now performed typechecking for a different 
 
        // monomorph. In that case we just need to perform typechecking, no need
 
        // to annotate the AST again.
 
        let definition_id = match &self.definition_type {
 
            DefinitionType::Component(id) => id.upcast(),
 
            DefinitionType::Function(id) => id.upcast(),
 
            _ => unreachable!(),
 
        };
 

	
 
        let already_checked = ctx.types.get_base_definition(&definition_id).unwrap().has_any_monomorph();
 
        for (expr_id, expr_type) in self.expr_types.iter_mut() {
 
            if !expr_type.is_done {
 
                // Auto-infer numberlike/integerlike types to a regular int
 
                if expr_type.parts.len() == 1 && expr_type.parts[0] == InferenceTypePart::IntegerLike {
 
                    expr_type.parts[0] = InferenceTypePart::Int;
 
                } else {
 
                    let expr = &ctx.heap[*expr_id];
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Could not fully infer the type of this expression (got '{}')",
 
                            expr_type.display_name(&ctx.heap)
 
                        )
 
                    ))
 
                }
 
            }
 

	
 
            if !already_checked {
 
                let concrete_type = ctx.heap[*expr_id].get_type_mut();
 
                expr_type.write_concrete_type(concrete_type);
 
            } else {
 
                if cfg!(debug_assertions) {
 
                    let mut concrete_type = ConcreteType::default();
 
                    expr_type.write_concrete_type(&mut concrete_type);
 
                    debug_assert_eq!(*ctx.heap[*expr_id].get_type(), concrete_type);
 
                }
 
            }
 
        }
 

	
 
        // All types are fine
 
        ctx.types.add_monomorph(&definition_id, self.poly_vars.clone());
 

	
 
        // Check all things we need to monomorphize
 
        // TODO: Struct/enum/union monomorphization
 
        for (expr_id, extra_data) in self.extra_data.iter() {
 
            if extra_data.poly_vars.is_empty() { continue; }
 

	
 
            // Retrieve polymorph variable specification. Those of struct 
 
            // literals and those of procedure calls need to be fully inferred.
 
            // The remaining ones (e.g. select expressions) allow partial 
 
            // inference of types, as long as the accessed field's type is
 
            // fully inferred.
 
            let needs_full_inference = match &ctx.heap[*expr_id] {
 
                Expression::Call(_) => true,
 
                Expression::Literal(_) => true,
 
                _ => false
 
            };
 

	
 
            if needs_full_inference {
 
                let mut monomorph_types = Vec::with_capacity(extra_data.poly_vars.len());
 
                for (poly_idx, poly_type) in extra_data.poly_vars.iter().enumerate() {
 
                    if !poly_type.is_done {
 
                        // TODO: Single clean function for function signatures and polyvars.
 
                        // TODO: Better error message
 
                        let expr = &ctx.heap[*expr_id];
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, expr.position(),
 
                            &format!(
 
                                "Could not fully infer the type of polymorphic variable {} of this expression (got '{}')",
 
                                poly_idx, poly_type.display_name(&ctx.heap)
 
                            )
 
                        ))
 
                    }
 

	
 
                    let mut concrete_type = ConcreteType::default();
 
                    poly_type.write_concrete_type(&mut concrete_type);
 
                    monomorph_types.insert(poly_idx, concrete_type);
 
                }
 

	
 
                // Resolve to the appropriate expression and instantiate 
 
                // monomorphs.
 
                match &ctx.heap[*expr_id] {
 
                    Expression::Call(call_expr) => {
 
                        // Add to type table if not yet typechecked
 
                        if let Method::Symbolic(symbolic) = &call_expr.method {
 
                            let definition_id = symbolic.definition.unwrap();
 
                            if !ctx.types.has_monomorph(&definition_id, &monomorph_types) {
 
                                let root_id = ctx.types
 
                                    .get_base_definition(&definition_id)
 
                                    .unwrap()
 
                                    .ast_root;
 

	
 
                                // Pre-emptively add the monomorph to the type table, but
 
                                // we still need to perform typechecking on it
 
                                // TODO: Unsure about this, performance wise
 
                                let queue_element = ResolveQueueElement{
 
                                    root_id,
 
                                    definition_id,
 
                                    monomorph_types,
 
                                };
 
                                if !queue.contains(&queue_element) {
 
                                    queue.push(queue_element);
 
                                }
 
                            }
 
                        }
 
                    },
 
                    Expression::Literal(lit_expr) => {
 
                        let lit_struct = lit_expr.value.as_struct();
 
                        let definition_id = lit_struct.definition.as_ref().unwrap();
 
                        let definition_id = match &lit_expr.value {
 
                            Literal::Struct(literal) => literal.definition.as_ref().unwrap(),
 
                            Literal::Enum(literal) => literal.definition.as_ref().unwrap(),
 
                            _ => unreachable!("post-inference monomorph for non-struct, non-enum literal")
 
                        };
 
                        if !ctx.types.has_monomorph(definition_id, &monomorph_types) {
 
                            ctx.types.add_monomorph(definition_id, monomorph_types);
 
                        }
 
                    },
 
                    _ => unreachable!("needs fully inference, but not a struct literal or call expression")
 
                }
 
            } // else: was just a helper structure...
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> Result<(), ParseError> {
 
        match &ctx.heap[id] {
 
            Expression::Assignment(expr) => {
 
                let id = expr.this;
 
                self.progress_assignment_expr(ctx, id)
 
            },
 
            Expression::Conditional(expr) => {
 
                let id = expr.this;
 
                self.progress_conditional_expr(ctx, id)
 
            },
 
            Expression::Binary(expr) => {
 
                let id = expr.this;
 
                self.progress_binary_expr(ctx, id)
 
            },
 
            Expression::Unary(expr) => {
 
                let id = expr.this;
 
                self.progress_unary_expr(ctx, id)
 
            },
 
            Expression::Indexing(expr) => {
 
                let id = expr.this;
 
                self.progress_indexing_expr(ctx, id)
 
            },
 
            Expression::Slicing(expr) => {
 
                let id = expr.this;
 
                self.progress_slicing_expr(ctx, id)
 
            },
 
            Expression::Select(expr) => {
 
                let id = expr.this;
 
                self.progress_select_expr(ctx, id)
 
            },
 
            Expression::Array(expr) => {
 
                let id = expr.this;
 
                self.progress_array_expr(ctx, id)
 
            },
 
            Expression::Literal(expr) => {
 
                let id = expr.this;
 
                self.progress_literal_expr(ctx, id)
 
            },
 
            Expression::Call(expr) => {
 
                let id = expr.this;
 
                self.progress_call_expr(ctx, id)
 
            },
 
            Expression::Variable(expr) => {
 
                let id = expr.this;
 
                self.progress_variable_expr(ctx, id)
 
            }
 
        }
 
    }
 

	
 
    fn progress_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> Result<(), ParseError> {
 
        use AssignmentOperator as AO;
 

	
 
        // TODO: Assignable check
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.left;
 
        let arg2_expr_id = expr.right;
 

	
 
        debug_log!("Assignment expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let progress_base = match expr.operation {
 
            AO::Set =>
 
                false,
 
            AO::Multiplied | AO::Divided | AO::Added | AO::Subtracted =>
 
                self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?,
 
            AO::Remained | AO::ShiftedLeft | AO::ShiftedRight |
 
            AO::BitwiseAnded | AO::BitwiseXored | AO::BitwiseOred =>
 
                self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?,
 
        };
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = self.apply_equal3_constraint(
 
            ctx, upcast_id, arg1_expr_id, arg2_expr_id, 0
 
        )?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_base || progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 

	
 
        if progress_base || progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> Result<(), ParseError> {
 
        // Note: test expression type is already enforced
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.true_expression;
 
        let arg2_expr_id = expr.false_expression;
 

	
 
        debug_log!("Conditional expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = self.apply_equal3_constraint(
 
            ctx, upcast_id, arg1_expr_id, arg2_expr_id, 0
 
        )?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> Result<(), ParseError> {
 
        // Note: our expression type might be fixed by our parent, but we still
 
        // need to make sure it matches the type associated with our operation.
 
        use BinaryOperator as BO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_id = expr.left;
 
        let arg2_id = expr.right;
 

	
 
        debug_log!("Binary expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = match expr.operation {
 
            BO::Concatenate => {
 
                // Arguments may be arrays/slices, output is always an array
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &ARRAY_TEMPLATE)?;
 
                let progress_arg1 = self.apply_forced_constraint(ctx, arg1_id, &ARRAYLIKE_TEMPLATE)?;
 
                let progress_arg2 = self.apply_forced_constraint(ctx, arg2_id, &ARRAYLIKE_TEMPLATE)?;
 

	
 
                // If they're all arraylike, then we want the subtype to match
 
                let (subtype_expr, subtype_arg1, subtype_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 1)?;
 

	
 
                (progress_expr || subtype_expr, progress_arg1 || subtype_arg1, progress_arg2 || subtype_arg2)
 
            },
 
            BO::LogicalOr | BO::LogicalAnd => {
 
                // Forced boolean on all
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg1 = self.apply_forced_constraint(ctx, arg1_id, &BOOL_TEMPLATE)?;
 
                let progress_arg2 = self.apply_forced_constraint(ctx, arg2_id, &BOOL_TEMPLATE)?;
 

	
 
                (progress_expr, progress_arg1, progress_arg2)
 
            },
 
            BO::BitwiseOr | BO::BitwiseXor | BO::BitwiseAnd | BO::Remainder | BO::ShiftLeft | BO::ShiftRight => {
 
                // All equal of integer type
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg1, progress_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg1, progress_base || progress_arg2)
 
            },
 
            BO::Equality | BO::Inequality => {
 
                // Equal2 on args, forced boolean output
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let (progress_arg1, progress_arg2) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, arg1_id, 0, arg2_id, 0)?;
 

	
 
                (progress_expr, progress_arg1, progress_arg2)
 
            },
 
            BO::LessThan | BO::GreaterThan | BO::LessThanEqual | BO::GreaterThanEqual => {
 
                // Equal2 on args with numberlike type, forced boolean output
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg_base = self.apply_forced_constraint(ctx, arg1_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_arg1, progress_arg2) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, arg1_id, 0, arg2_id, 0)?;
 

	
 
@@ -1873,386 +1883,422 @@ impl TypeResolvingVisitor {
 
                    self.expr_queued.insert(subject_id);
 
                }
 
                
 
                // Apply to field's type
 
                let signature_type: *mut _ = &mut poly_data.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let (_, progress_expr) = Self::apply_equal2_signature_constraint(
 
                    ctx, upcast_id, None, poly_data, &mut poly_progress, 
 
                    signature_type, 0, expr_type, 0
 
                )?;
 

	
 
                if progress_expr {
 
                    if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                        self.expr_queued.insert(parent_id);
 
                    }
 
                }
 

	
 
                // Reapply progress in polymorphic variables to struct's type
 
                let signature_type: *mut _ = &mut poly_data.embedded[0];
 
                let subject_type: *mut _ = self.expr_types.get_mut(&subject_id).unwrap();
 
                
 
                let progress_subject = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    poly_data, &poly_progress, signature_type, subject_type
 
                );
 

	
 
                let signature_type: *mut _ = &mut poly_data.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    poly_data, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                (progress_subject, progress_expr)
 
            }
 
        };
 

	
 
        if progress_subject { self.queue_expr(subject_id); }
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Subject type [{}]: {}", progress_subject, self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let expr_elements = expr.elements.clone(); // TODO: @performance
 

	
 
        debug_log!("Array expr ({} elements): {}", expr_elements.len(), upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // All elements should have an equal type
 
        let progress = self.apply_equal_n_constraint(ctx, upcast_id, &expr_elements)?;
 
        for (progress_arg, arg_id) in progress.iter().zip(expr_elements.iter()) {
 
            if *progress_arg {
 
                self.queue_expr(*arg_id);
 
            }
 
        }
 

	
 
        // And the output should be an array of the element types
 
        let mut expr_progress = self.apply_forced_constraint(ctx, upcast_id, &ARRAY_TEMPLATE)?;
 
        if !expr_elements.is_empty() {
 
            let first_arg_id = expr_elements[0];
 
            let (inner_expr_progress, arg_progress) = self.apply_equal2_constraint(
 
                ctx, upcast_id, upcast_id, 1, first_arg_id, 0
 
            )?;
 

	
 
            expr_progress = expr_progress || inner_expr_progress;
 

	
 
            // Note that if the array type progressed the type of the arguments,
 
            // then we should enqueue this progression function again
 
            // TODO: @fix Make apply_equal_n accept a start idx as well
 
            if arg_progress { self.queue_expr(upcast_id); }
 
        }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type [{}]: {}", expr_progress, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if expr_progress { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 

	
 
        debug_log!("Literal expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let progress_expr = match &expr.value {
 
            Literal::Null => {
 
                self.apply_forced_constraint(ctx, upcast_id, &MESSAGE_TEMPLATE)?
 
            },
 
            Literal::Integer(_) => {
 
                self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?
 
            },
 
            Literal::True | Literal::False => {
 
                self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?
 
            },
 
            Literal::Character(_) => todo!("character literals"),
 
            Literal::Struct(data) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
                debug_assert_eq!(extra.embedded.len(), data.fields.len());
 

	
 
                debug_log!(" * During (inferring types from fields and struct type):");
 

	
 
                // Mutually infer field signature/expression types
 
                for (field_idx, field) in data.fields.iter().enumerate() {
 
                    let field_expr_id = field.value;
 
                    let signature_type: *mut _ = &mut extra.embedded[field_idx];
 
                    let field_type: *mut _ = self.expr_types.get_mut(&field_expr_id).unwrap();
 
                    let (_, progress_arg) = Self::apply_equal2_signature_constraint(
 
                        ctx, upcast_id, Some(field_expr_id), extra, &mut poly_progress,
 
                        signature_type, 0, field_type, 0
 
                    )?;
 

	
 
                    debug_log!(
 
                        "   - Field {} type | sig: {}, field: {}", field_idx,
 
                        unsafe{&*signature_type}.display_name(&ctx.heap),
 
                        unsafe{&*field_type}.display_name(&ctx.heap)
 
                    );
 

	
 
                    if progress_arg {
 
                        self.expr_queued.insert(field_expr_id);
 
                    }
 
                }
 

	
 
                debug_log!("   - Field poly progress | {:?}", poly_progress);
 

	
 
                // Same for the type of the struct itself
 
                let signature_type: *mut _ = &mut extra.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 
                let (_, progress_expr) = Self::apply_equal2_signature_constraint(
 
                    ctx, upcast_id, None, extra, &mut poly_progress,
 
                    signature_type, 0, expr_type, 0
 
                )?;
 

	
 
                debug_log!(
 
                    "   - Ret type | sig: {}, expr: {}",
 
                    unsafe{&*signature_type}.display_name(&ctx.heap),
 
                    unsafe{&*expr_type}.display_name(&ctx.heap)
 
                );
 
                debug_log!("   - Ret poly progress | {:?}", poly_progress);
 

	
 
                if progress_expr {
 
                    // TODO: @cleanup, cannot call utility self.queue_parent thingo
 
                    if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                        self.expr_queued.insert(parent_id);
 
                    }
 
                }
 

	
 
                // Check which expressions use the polymorphic arguments. If the
 
                // polymorphic variables have been progressed then we try to 
 
                // progress them inside the expression as well.
 
                debug_log!(" * During (reinferring from progressed polyvars):");
 

	
 
                // For all field expressions
 
                for field_idx in 0..extra.embedded.len() {
 
                    debug_assert_eq!(field_idx, data.fields[field_idx].field_idx, "confusing, innit?");
 
                    let signature_type: *mut _ = &mut extra.embedded[field_idx];
 
                    let field_expr_id = data.fields[field_idx].value;
 
                    let field_type: *mut _ = self.expr_types.get_mut(&field_expr_id).unwrap();
 

	
 
                    let progress_arg = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                        extra, &poly_progress, signature_type, field_type
 
                    );
 

	
 
                    debug_log!(
 
                        "   - Field {} type | sig: {}, field: {}", field_idx,
 
                        unsafe{&*signature_type}.display_name(&ctx.heap),
 
                        unsafe{&*field_type}.display_name(&ctx.heap)
 
                    );
 
                    if progress_arg {
 
                        self.expr_queued.insert(field_expr_id);
 
                    }
 
                }
 
                
 
                // For the return type
 
                let signature_type: *mut _ = &mut extra.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    extra, &poly_progress, signature_type, expr_type
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(
 
                    &ctx.heap, extra, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                progress_expr
 
            },
 
            Literal::Enum(_) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
                
 
                debug_log!(" * During (inferring types from return type)");
 

	
 
                let signature_type: *mut _ = &mut extra.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 
                let (_, progress_expr) = Self::apply_equal2_signature_constraint(
 
                    ctx, upcast_id, None, extra, &mut poly_progress,
 
                    signature_type, 0, expr_type, 0
 
                )?;
 

	
 
                debug_log!(
 
                    "   - Ret type | sig: {}, expr: {}",
 
                    unsafe{&*signature_type}.display_name(&ctx.heap),
 
                    unsafe{&*expr_type}.display_name(&ctx.heap)
 
                );
 

	
 
                if progress_expr {
 
                    // TODO: @cleanup
 
                    if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                        self.expr_queued.insert(parent_id);
 
                    }
 
                }
 

	
 
                debug_log!(" * During (reinferring from progress polyvars):");
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(
 
                    &ctx.heap, extra, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                progress_expr
 
            }
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // TODO: FIX!!!!
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    // TODO: @cleanup, see how this can be cleaned up once I implement
 
    //  polymorphic struct/enum/union literals. These likely follow the same
 
    //  pattern as here.
 
    fn progress_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 

	
 
        debug_log!("Call expr '{}': {}", match &expr.method {
 
            Method::Create => String::from("create"),
 
            Method::Fires => String::from("fires"),
 
            Method::Get => String::from("get"),
 
            Method::Put => String::from("put"),
 
            Method::Symbolic(method) => String::from_utf8_lossy(&method.identifier.value).to_string()
 
        },upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 
        debug_log!(" * During (inferring types from arguments and return type):");
 

	
 
        // Check if we can make progress using the arguments and/or return types
 
        // while keeping track of the polyvars we've extended
 
        let mut poly_progress = HashSet::new();
 
        debug_assert_eq!(extra.embedded.len(), expr.arguments.len());
 

	
 
        for (arg_idx, arg_id) in expr.arguments.clone().into_iter().enumerate() {
 
            let signature_type: *mut _ = &mut extra.embedded[arg_idx];
 
            let argument_type: *mut _ = self.expr_types.get_mut(&arg_id).unwrap();
 
            let (_, progress_arg) = Self::apply_equal2_signature_constraint(
 
                ctx, upcast_id, Some(arg_id), extra, &mut poly_progress,
 
                signature_type, 0, argument_type, 0
 
            )?;
 

	
 
            debug_log!(
 
                "   - Arg {} type | sig: {}, arg: {}", arg_idx,
 
                unsafe{&*signature_type}.display_name(&ctx.heap), 
 
                unsafe{&*argument_type}.display_name(&ctx.heap));
 

	
 
            if progress_arg {
 
                // Progressed argument expression
 
                self.expr_queued.insert(arg_id);
 
            }
 
        }
 

	
 
        // Do the same for the return type
 
        let signature_type: *mut _ = &mut extra.returned;
 
        let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 
        let (_, progress_expr) = Self::apply_equal2_signature_constraint(
 
            ctx, upcast_id, None, extra, &mut poly_progress,
 
            signature_type, 0, expr_type, 0
 
        )?;
 

	
 
        debug_log!(
 
            "   - Ret type | sig: {}, expr: {}", 
 
            unsafe{&*signature_type}.display_name(&ctx.heap), 
 
            unsafe{&*expr_type}.display_name(&ctx.heap)
 
        );
 

	
 
        if progress_expr {
 
            // TODO: @cleanup, cannot call utility self.queue_parent thingo
 
            if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                self.expr_queued.insert(parent_id);
 
            }
 
        }
 

	
 
        // If we did not have an error in the polymorph inference above, then
 
        // reapplying the polymorph type to each argument type and the return
 
        // type should always succeed.
 
        debug_log!(" * During (reinferring from progressed polyvars):");
 
        for (poly_idx, poly_var) in extra.poly_vars.iter().enumerate() {
 
            debug_log!("   - Poly {} | sig: {}", poly_idx, poly_var.display_name(&ctx.heap));
 
        }
 
        // TODO: @performance If the algorithm is changed to be more "on demand
 
        //  argument re-evaluation", instead of "all-argument re-evaluation",
 
        //  then this is no longer true
 
        for arg_idx in 0..extra.embedded.len() {
 
            let signature_type: *mut _ = &mut extra.embedded[arg_idx];
 
            let arg_expr_id = expr.arguments[arg_idx];
 
            let arg_type: *mut _ = self.expr_types.get_mut(&arg_expr_id).unwrap();
 
            
 
            let progress_arg = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                extra, &poly_progress,
 
                signature_type, arg_type
 
            );
 
            
 
            debug_log!(
 
                "   - Arg {} type | sig: {}, arg: {}", arg_idx, 
 
                unsafe{&*signature_type}.display_name(&ctx.heap), 
 
                unsafe{&*arg_type}.display_name(&ctx.heap)
 
            );
 
            if progress_arg {
 
                self.expr_queued.insert(arg_expr_id);
 
            }
 
        }
 

	
 
        // Once more for the return type
 
        let signature_type: *mut _ = &mut extra.returned;
 
        let ret_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
        let progress_ret = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
            extra, &poly_progress, signature_type, ret_type
 
        );
 
        debug_log!(
 
            "   - Ret type | sig: {}, arg: {}", 
 
            unsafe{&*signature_type}.display_name(&ctx.heap), 
 
            unsafe{&*ret_type}.display_name(&ctx.heap)
 
        );
 
        if progress_ret {
 
            self.queue_expr_parent(ctx, upcast_id);
 
        }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let var_expr = &ctx.heap[id];
 
        let var_id = var_expr.declaration.unwrap();
 

	
 
        debug_log!("Variable expr '{}': {}", &String::from_utf8_lossy(&ctx.heap[var_id].identifier().value), upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Var  type: {}", self.var_types.get(&var_id).unwrap().var_type.display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // Retrieve shared variable type and expression type and apply inference
 
        let var_data = self.var_types.get_mut(&var_id).unwrap();
 
        let expr_type = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            &mut var_data.var_type as *mut _, 0, expr_type, 0
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            let var_decl = &ctx.heap[var_id];
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, var_decl.position(),
 
                &format!(
 
                    "Conflicting types for this variable, previously assigned the type '{}'",
 
                    var_data.var_type.display_name(&ctx.heap)
 
                )
 
            ).with_postfixed_info(
 
                &ctx.module.source, var_expr.position,
 
                &format!(
 
                    "But inferred to have incompatible type '{}' here",
 
                    expr_type.display_name(&ctx.heap)
 
                )
 
            ))
 
        }
 

	
 
        let progress_var = infer_res.modified_lhs();
 
        let progress_expr = infer_res.modified_rhs();
 

	
 
        if progress_var {
 
            // Let other variable expressions using this type progress as well
 
            for other_expr in var_data.used_at.iter() {
 
                if *other_expr != upcast_id {
 
                    self.expr_queued.insert(*other_expr);
 
                }
 
            }
 

	
 
            // Let a linked port know that our type has updated
 
            if let Some(linked_id) = var_data.linked_var {
 
                // Only perform one-way inference to prevent updating our type, this
 
                // would lead to an inconsistency
 
                let var_type: *mut _ = &mut var_data.var_type;
 
                let link_data = self.var_types.get_mut(&linked_id).unwrap();
 

	
 
                debug_assert!(
 
                    unsafe{&*var_type}.parts[0] == InferenceTypePart::Input ||
 
                    unsafe{&*var_type}.parts[0] == InferenceTypePart::Output
 
                );
 
                debug_assert!(
 
                    link_data.var_type.parts[0] == InferenceTypePart::Input ||
 
                    link_data.var_type.parts[0] == InferenceTypePart::Output
 
                );
 
                match InferenceType::infer_subtree_for_single_type(&mut link_data.var_type, 1, &unsafe{&*var_type}.parts, 1) {
 
@@ -2599,421 +2645,461 @@ impl TypeResolvingVisitor {
 
            }
 
            progress[arg_idx] = true;
 
        }
 

	
 
        Ok(progress)
 
    }
 

	
 
    /// Determines the `InferenceType` for the expression based on the
 
    /// expression parent. Note that if the parent is another expression, we do
 
    /// not take special action, instead we let parent expressions fix the type
 
    /// of subexpressions before they have a chance to call this function.
 
    fn insert_initial_expr_inference_type(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId
 
    ) -> Result<(), ParseError> {
 
        use ExpressionParent as EP;
 
        use InferenceTypePart as ITP;
 

	
 
        let expr = &ctx.heap[expr_id];
 
        let inference_type = match expr.parent() {
 
            EP::None =>
 
                // Should have been set by linker
 
                unreachable!(),
 
            EP::ExpressionStmt(_) | EP::Expression(_, _) =>
 
                // Determined during type inference
 
                InferenceType::new(false, false, vec![ITP::Unknown]),
 
            EP::If(_) | EP::While(_) | EP::Assert(_) =>
 
                // Must be a boolean
 
                InferenceType::new(false, true, vec![ITP::Bool]),
 
            EP::Return(_) =>
 
                // Must match the return type of the function
 
                if let DefinitionType::Function(func_id) = self.definition_type {
 
                    let return_parser_type_id = ctx.heap[func_id].return_type;
 
                    self.determine_inference_type_from_parser_type(ctx, return_parser_type_id, true)
 
                } else {
 
                    // Cannot happen: definition always set upon body traversal
 
                    // and "return" calls in components are illegal.
 
                    unreachable!();
 
                },
 
            EP::New(_) =>
 
                // Must be a component call, which we assign a "Void" return
 
                // type
 
                InferenceType::new(false, true, vec![ITP::Void]),
 
        };
 

	
 
        match self.expr_types.entry(expr_id) {
 
            Entry::Vacant(vacant) => {
 
                vacant.insert(inference_type);
 
            },
 
            Entry::Occupied(mut preexisting) => {
 
                // We already have an entry, this happens if our parent fixed
 
                // our type (e.g. we're used in a conditional expression's test)
 
                // but we have a different type.
 
                // TODO: Is this ever called? Seems like it can't
 
                debug_assert!(false, "I am actually called, my ID is {}", expr_id.index);
 
                let old_type = preexisting.get_mut();
 
                if let SingleInferenceResult::Incompatible = InferenceType::infer_subtree_for_single_type(
 
                    old_type, 0, &inference_type.parts, 0
 
                ) {
 
                    return Err(self.construct_expr_type_error(ctx, expr_id, expr_id))
 
                }
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn insert_initial_call_polymorph_data(
 
        &mut self, ctx: &mut Ctx, call_id: CallExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 

	
 
        // Note: the polymorph variables may be partially specified and may
 
        // contain references to the wrapping definition's (i.e. the proctype
 
        // we are currently visiting) polymorphic arguments.
 
        //
 
        // The arguments of the call may refer to polymorphic variables in the
 
        // definition of the function we're calling, not of the wrapping
 
        // definition. We insert markers in these inferred types to be able to
 
        // map them back and forth to the polymorphic arguments of the function
 
        // we are calling.
 
        let call = &ctx.heap[call_id];
 

	
 
        // Handle the polymorphic variables themselves
 
        let mut poly_vars = Vec::with_capacity(call.poly_args.len());
 
        for poly_arg_type_id in call.poly_args.clone() { // TODO: @performance
 
            poly_vars.push(self.determine_inference_type_from_parser_type(ctx, poly_arg_type_id, true));
 
        }
 

	
 
        // Handle the arguments
 
        // TODO: @cleanup: Maybe factor this out for reuse in the validator/linker, should also
 
        //  make the code slightly more robust.
 
        let (embedded_types, return_type) = match &call.method {
 
            Method::Create => {
 
                // Not polymorphic
 
                (
 
                    vec![InferenceType::new(false, true, vec![ITP::Int])],
 
                    InferenceType::new(false, true, vec![ITP::Message, ITP::Byte])
 
                )
 
            },
 
            Method::Fires => {
 
                // bool fires<T>(PortLike<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::PortLike, ITP::MarkerBody(0), ITP::Unknown])],
 
                    InferenceType::new(false, true, vec![ITP::Bool])
 
                )
 
            },
 
            Method::Get => {
 
                // T get<T>(input<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::Input, ITP::MarkerBody(0), ITP::Unknown])],
 
                    InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown])
 
                )
 
            },
 
            Method::Put => {
 
                // void Put<T>(output<T> port, T msg)
 
                (
 
                    vec![
 
                        InferenceType::new(true, false, vec![ITP::Output, ITP::MarkerBody(0), ITP::Unknown]),
 
                        InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown])
 
                    ],
 
                    InferenceType::new(false, true, vec![ITP::Void])
 
                )
 
            }
 
            Method::Symbolic(symbolic) => {
 
                let definition = &ctx.heap[symbolic.definition.unwrap()];
 

	
 
                match definition {
 
                    Definition::Component(definition) => {
 
                        debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
                        for param_id in definition.parameters.clone() {
 
                            let param = &ctx.heap[param_id];
 
                            let param_parser_type_id = param.parser_type;
 
                            parameter_types.push(self.determine_inference_type_from_parser_type(ctx, param_parser_type_id, false));
 
                        }
 

	
 
                        (parameter_types, InferenceType::new(false, true, vec![InferenceTypePart::Void]))
 
                    },
 
                    Definition::Function(definition) => {
 
                        debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
                        for param_id in definition.parameters.clone() {
 
                            let param = &ctx.heap[param_id];
 
                            let param_parser_type_id = param.parser_type;
 
                            parameter_types.push(self.determine_inference_type_from_parser_type(ctx, param_parser_type_id, false));
 
                        }
 

	
 
                        let return_type = self.determine_inference_type_from_parser_type(ctx, definition.return_type, false);
 
                        (parameter_types, return_type)
 
                    },
 
                    Definition::Struct(_) | Definition::Enum(_) => {
 
                        unreachable!("insert initial polymorph data for struct/enum");
 
                    }
 
                }
 
            }
 
        };
 

	
 
        self.extra_data.insert(call_id.upcast(), ExtraData {
 
            poly_vars,
 
            embedded: embedded_types,
 
            returned: return_type
 
        });
 
    }
 

	
 
    fn insert_initial_struct_polymorph_data(
 
        &mut self, ctx: &mut Ctx, lit_id: LiteralExpressionId,
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_struct();
 

	
 
        // Handle polymorphic arguments
 
        let mut poly_vars = Vec::with_capacity(literal.poly_args2.len());
 
        let mut total_num_poly_parts = 0;
 
        for poly_arg_type_id in literal.poly_args2.clone() { // TODO: @performance
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, poly_arg_type_id, true
 
            ); 
 
            total_num_poly_parts += inference_type.parts.len();
 
            poly_vars.push(inference_type);
 
        }
 

	
 
        // Handle parser types on struct definition
 
        let definition = &ctx.heap[literal.definition.unwrap()];
 
        let definition = match definition {
 
            Definition::Struct(definition) => {
 
                debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                definition
 
            },
 
            _ => unreachable!("definition for struct literal does not point to struct definition")
 
        };
 

	
 
        // Note: programmer is capable of specifying fields in a struct literal
 
        // in a different order than on the definition. We take the programmer-
 
        // in a different order than on the definition. We take the literal-
 
        // specified order to be leading.
 
        let mut embedded_types = Vec::with_capacity(definition.fields.len());
 
        for lit_field in literal.fields.iter() {
 
            let def_field = &definition.fields[lit_field.field_idx];
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, def_field.parser_type, false
 
            );
 
            embedded_types.push(inference_type);
 
        }
 

	
 
        // Return type is the struct type itself, with the appropriate 
 
        // polymorphic variables. So:
 
        // - 1 part for definition
 
        // - N_poly_arg marker parts for each polymorphic argument
 
        // - all the parts for the currently known polymorphic arguments 
 
        let parts_reserved = 1 + poly_vars.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(definition.this.upcast(), poly_vars.len()));
 
        let mut return_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
            if !poly_var.is_done { return_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let return_type = InferenceType::new(true, return_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
            poly_vars, 
 
            embedded: embedded_types,
 
            returned: return_type,
 
        });
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct for enum expressions. These
 
    fn insert_initial_enum_polymorph_data(
 
        &mut self, ctx: &Ctx, lit_id: LiteralExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_enum();
 

	
 
        // Handle polymorphic arguments to the enum
 
        let mut poly_vars = Vec::with_capacity(literal.poly_args2.len());
 
        let mut total_num_poly_parts = 0;
 
        for poly_arg_type_id in literal.poly_args2.clone() { // TODO: @performance
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, poly_arg_type_id, true
 
            );
 
            total_num_poly_parts += inference_type.parts.len();
 
            poly_vars.push(inference_type);
 
        }
 

	
 
        // Handle enum type itself
 
        let parts_reserved = 1 + poly_vars.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(literal.definition.unwrap(), poly_vars.len()));
 
        let mut enum_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
            if !poly_var.is_done { enum_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let enum_type = InferenceType::new(true, enum_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
            poly_vars,
 
            embedded: Vec::new(),
 
            returned: enum_type,
 
        });
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct. Assumes that the select
 
    /// expression's referenced (definition_id, field_idx) has been resolved.
 
    fn insert_initial_select_polymorph_data(
 
        &mut self, ctx: &Ctx, select_id: SelectExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 

	
 
        // Retrieve relevant data
 
        let expr = &ctx.heap[select_id];
 
        let field = expr.field.as_symbolic();
 

	
 
        let definition_id = field.definition.unwrap();
 
        let definition = ctx.heap[definition_id].as_struct();
 
        let field_idx = field.field_idx;
 

	
 
        // Generate initial polyvar types and struct type
 
        let num_poly_vars = definition.poly_vars.len();
 
        let mut poly_vars = Vec::with_capacity(num_poly_vars);
 
        let struct_parts_reserved = 1 + 2 * num_poly_vars;
 
        let mut struct_parts = Vec::with_capacity(struct_parts_reserved);
 
        struct_parts.push(ITP::Instance(definition_id, num_poly_vars));        
 

	
 
        for poly_idx in 0..num_poly_vars {
 
            poly_vars.push(InferenceType::new(true, false, vec![
 
                ITP::MarkerBody(poly_idx), ITP::Unknown,
 
            ]));
 
            struct_parts.push(ITP::MarkerBody(poly_idx));
 
            struct_parts.push(ITP::Unknown);
 
        }
 
        debug_assert_eq!(struct_parts.len(), struct_parts_reserved);
 

	
 
        // Generate initial field type
 
        let field_type = self.determine_inference_type_from_parser_type(
 
            ctx, definition.fields[field_idx].parser_type, false
 
        );
 

	
 
        self.extra_data.insert(select_id.upcast(), ExtraData{
 
            poly_vars,
 
            embedded: vec![InferenceType::new(true, false, struct_parts)],
 
            returned: field_type
 
        });
 
    }
 

	
 
    /// Determines the initial InferenceType from the provided ParserType. This
 
    /// may be called with two kinds of intentions:
 
    /// 1. To resolve a ParserType within the body of a function, or on
 
    ///     polymorphic arguments to calls/instantiations within that body. This
 
    ///     means that the polymorphic variables are known and can be replaced
 
    ///     with the monomorph we're instantiating.
 
    /// 2. To resolve a ParserType on a called function's definition or on
 
    ///     an instantiated datatype's members. This means that the polymorphic
 
    ///     arguments inside those ParserTypes refer to the polymorphic
 
    ///     variables in the called/instantiated type's definition.
 
    /// In the second case we place InferenceTypePart::Marker instances such
 
    /// that we can perform type inference on the polymorphic variables.
 
    fn determine_inference_type_from_parser_type(
 
        &mut self, ctx: &Ctx, parser_type_id: ParserTypeId,
 
        parser_type_in_body: bool
 
    ) -> InferenceType {
 
        use ParserTypeVariant as PTV;
 
        use InferenceTypePart as ITP;
 

	
 
        let mut to_consider = VecDeque::with_capacity(16);
 
        to_consider.push_back(parser_type_id);
 

	
 
        let mut infer_type = Vec::new();
 
        let mut has_inferred = false;
 
        let mut has_markers = false;
 

	
 
        while !to_consider.is_empty() {
 
            let parser_type_id = to_consider.pop_front().unwrap();
 
            let parser_type = &ctx.heap[parser_type_id];
 
            match &parser_type.variant {
 
                PTV::Message => {
 
                    // TODO: @types Remove the Message -> Byte hack at some point...
 
                    infer_type.push(ITP::Message);
 
                    infer_type.push(ITP::Byte);
 
                },
 
                PTV::Bool => { infer_type.push(ITP::Bool); },
 
                PTV::Byte => { infer_type.push(ITP::Byte); },
 
                PTV::Short => { infer_type.push(ITP::Short); },
 
                PTV::Int => { infer_type.push(ITP::Int); },
 
                PTV::Long => { infer_type.push(ITP::Long); },
 
                PTV::String => { infer_type.push(ITP::String); },
 
                PTV::IntegerLiteral => { unreachable!("integer literal type on variable type"); },
 
                PTV::Inferred => {
 
                    infer_type.push(ITP::Unknown);
 
                    has_inferred = true;
 
                },
 
                PTV::Array(subtype_id) => {
 
                    infer_type.push(ITP::Array);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Input(subtype_id) => {
 
                    infer_type.push(ITP::Input);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Output(subtype_id) => {
 
                    infer_type.push(ITP::Output);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Symbolic(symbolic) => {
 
                    debug_assert!(symbolic.variant.is_some(), "symbolic variant not yet determined");
 
                    match symbolic.variant.as_ref().unwrap() {
 
                        SymbolicParserTypeVariant::PolyArg(_, arg_idx) => {
 
                            let arg_idx = *arg_idx;
 
                            debug_assert!(symbolic.poly_args2.is_empty()); // TODO: @hkt
 

	
 
                            if parser_type_in_body {
 
                                // Polymorphic argument refers to definition's
 
                                // polymorphic variables
 
                                debug_assert!(arg_idx < self.poly_vars.len());
 
                                debug_assert!(!self.poly_vars[arg_idx].has_marker());
 
                                infer_type.push(ITP::MarkerDefinition(arg_idx));
 
                                for concrete_part in &self.poly_vars[arg_idx].parts {
 
                                    infer_type.push(ITP::from(*concrete_part));
 
                                }
 
                            } else {
 
                                // Polymorphic argument has to be inferred
 
                                has_markers = true;
 
                                has_inferred = true;
 
                                infer_type.push(ITP::MarkerBody(arg_idx));
 
                                infer_type.push(ITP::Unknown);
 
                            }
 
                        },
 
                        SymbolicParserTypeVariant::Definition(definition_id) => {
 
                            // TODO: @cleanup
 
                            if cfg!(debug_assertions) {
 
                                let definition = &ctx.heap[*definition_id];
 
                                debug_assert!(definition.is_struct() || definition.is_enum()); // TODO: @function_ptrs
 
                                let num_poly = match definition {
 
                                    Definition::Struct(v) => v.poly_vars.len(),
 
                                    Definition::Enum(v) => v.poly_vars.len(),
 
                                    _ => unreachable!(),
 
                                };
 
                                debug_assert_eq!(symbolic.poly_args2.len(), num_poly);
 
                            }
 

	
 
                            infer_type.push(ITP::Instance(*definition_id, symbolic.poly_args2.len()));
 
                            let mut poly_arg_idx = symbolic.poly_args2.len();
 
                            while poly_arg_idx > 0 {
 
                                poly_arg_idx -= 1;
 
                                to_consider.push_front(symbolic.poly_args2[poly_arg_idx]);
 
                            }
 
                        }
 
                    }
 
                }
 
            }
 
        }
 

	
 
        InferenceType::new(has_markers, !has_inferred, infer_type)
 
    }
 

	
 
    /// Construct an error when an expression's type does not match. This
 
    /// happens if we infer the expression type from its arguments (e.g. the
 
    /// expression type of an addition operator is the type of the arguments)
 
    /// But the expression type was already set due to our parent (e.g. an
 
    /// "if statement" or a "logical not" always expecting a boolean)
 
    fn construct_expr_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, arg_id: ExpressionId
 
    ) -> ParseError {
 
        // TODO: Expand and provide more meaningful information for humans
 
        let expr = &ctx.heap[expr_id];
 
        let arg_expr = &ctx.heap[arg_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 
        let arg_type = self.expr_types.get(&arg_id).unwrap();
 

	
 
        return ParseError::new_error(
 
            &ctx.module.source, expr.position(),
 
            &format!(
 
                "Incompatible types: this expression expected a '{}'", 
 
                expr_type.display_name(&ctx.heap)
 
            )
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg_expr.position(),
 
            &format!(
 
                "But this expression yields a '{}'",
 
                arg_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_arg_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let arg1 = &ctx.heap[arg1_id];
 
        let arg2 = &ctx.heap[arg2_id];
 

	
 
        let arg1_type = self.expr_types.get(&arg1_id).unwrap();
 
        let arg2_type = self.expr_types.get(&arg2_id).unwrap();
src/protocol/parser/type_table.rs
Show inline comments
 
/**
 
TypeTable
 

	
 
Contains the type table: a datastructure that, when compilation succeeds,
 
contains a concrete type definition for each AST type definition. In general
 
terms the type table will go through the following phases during the compilation
 
process:
 

	
 
1. The base type definitions are resolved after the parser phase has
 
    finished. This implies that the AST is fully constructed, but not yet
 
    annotated.
 
2. With the base type definitions resolved, the validation/linker phase will
 
    use the type table (together with the symbol table) to disambiguate
 
    terms (e.g. does an expression refer to a variable, an enum, a constant,
 
    etc.)
 
3. During the type checking/inference phase the type table is used to ensure
 
    that the AST contains valid use of types in expressions and statements.
 
    At the same time type inference will find concrete instantiations of
 
    polymorphic types, these will be stored in the type table as monomorphed
 
    instantiations of a generic type.
 
4. After type checking and inference (and possibly when constructing byte
 
    code) the type table will construct a type graph and solidify each
 
    non-polymorphic type and monomorphed instantiations of polymorphic types
 
    into concrete types.
 

	
 
So a base type is defined by its (optionally polymorphic) representation in the
 
AST. A concrete type has concrete types for each of the polymorphic arguments. A
 
struct, enum or union may have polymorphic arguments but not actually be a
 
polymorphic type. This happens when the polymorphic arguments are not used in
 
the type definition itself. Similarly for functions/components: but here we just
 
check the arguments/return type of the signature.
 

	
 
Apart from base types and concrete types, we also use the term "embedded type"
 
for types that are embedded within another type, such as a type of a struct
 
struct field or of a union variant. Embedded types may themselves have
 
polymorphic arguments and therefore form an embedded type tree.
 

	
 
NOTE: for now a polymorphic definition of a function/component is illegal if the
 
    polymorphic arguments are not used in the arguments/return type. It should
 
    be legal, but we disallow it for now.
 

	
 
TODO: Allow potentially cyclic datatypes and reject truly cyclic datatypes.
 
TODO: Allow for the full potential of polymorphism
 
TODO: Detect "true" polymorphism: for datatypes like structs/enum/unions this
 
    is simple. For functions we need to check the entire body. Do it here? Or
 
    do it somewhere else?
 
TODO: Do we want to check fn argument collision here, or in validation phase?
 
TODO: Make type table an on-demand thing instead of constructing all base types.
 
TODO: Cleanup everything, feels like a lot can be written cleaner and with less
 
    assumptions on each function call.
 
// TODO: Review all comments
 
*/
 

	
 
use std::fmt::{Formatter, Result as FmtResult};
 
use std::collections::{HashMap, VecDeque};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::parser::symbol_table::{SymbolTable, Symbol};
 
use crate::protocol::inputsource::*;
 
use crate::protocol::parser::*;
 

	
 
//------------------------------------------------------------------------------
 
// Defined Types
 
//------------------------------------------------------------------------------
 

	
 
#[derive(Copy, Clone, PartialEq, Eq)]
 
pub enum TypeClass {
 
    Enum,
 
    Union,
 
    Struct,
 
    Function,
 
    Component
 
}
 

	
 
impl TypeClass {
 
    pub(crate) fn display_name(&self) -> &'static str {
 
        match self {
 
            TypeClass::Enum => "enum",
 
            TypeClass::Union => "enum",
 
            TypeClass::Struct => "struct",
 
            TypeClass::Function => "function",
 
            TypeClass::Component => "component",
 
        }
 
    }
 

	
 
    pub(crate) fn is_data_type(&self) -> bool {
 
        *self == TypeClass::Enum || *self == TypeClass::Union || *self == TypeClass::Struct
 
    }
 

	
 
    pub(crate) fn is_proc_type(&self) -> bool {
 
        *self == TypeClass::Function || *self == TypeClass::Component
 
    }
 
}
 

	
 
impl std::fmt::Display for TypeClass {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
 
        write!(f, "{}", self.display_name())
 
    }
 
}
 

	
 
/// Struct wrapping around a potentially polymorphic type. If the type does not
 
/// have any polymorphic arguments then it will not have any monomorphs and
 
/// `is_polymorph` will be set to `false`. A type with polymorphic arguments
 
/// only has `is_polymorph` set to `true` if the polymorphic arguments actually
 
/// appear in the types associated types (function return argument, struct
 
/// field, enum variant, etc.). Otherwise the polymorphic argument is just a
 
/// marker and does not influence the bytesize of the type.
 
pub struct DefinedType {
 
    pub(crate) ast_root: RootId,
 
    pub(crate) ast_definition: DefinitionId,
 
    pub(crate) definition: DefinedTypeVariant,
 
    pub(crate) poly_vars: Vec<PolyVar>,
 
    pub(crate) is_polymorph: bool,
 
    pub(crate) is_pointerlike: bool,
 
    // TODO: @optimize
 
    pub(crate) monomorphs: Vec<Vec<ConcreteType>>,
 
}
 

	
 
impl DefinedType {
 
    fn add_monomorph(&mut self, types: Vec<ConcreteType>) {
 
        debug_assert!(!self.has_monomorph(&types), "monomorph already exists");
 
        self.monomorphs.push(types);
 
    }
 

	
 
    pub(crate) fn has_any_monomorph(&self) -> bool {
 
        !self.monomorphs.is_empty()
 
    }
 

	
 
    pub(crate) fn has_monomorph(&self, types: &Vec<ConcreteType>) -> bool {
 
        debug_assert_eq!(self.poly_vars.len(), types.len(), "mismatch in number of polymorphic types");
 
        for monomorph in &self.monomorphs {
 
            if monomorph == types { return true; }
 
        }
 

	
 
        return false;
 
    }
 
}
 

	
 
pub enum DefinedTypeVariant {
 
    Enum(EnumType),
 
    Union(UnionType),
 
    Struct(StructType),
 
    Function(FunctionType),
 
    Component(ComponentType)
 
}
 

	
 
pub struct PolyVar {
 
    identifier: Identifier,
 
    /// Whether the polymorphic variables is used directly in the definition of
 
    /// the type (not including bodies of function/component types)
 
    is_in_use: bool,
 
}
 

	
 
impl DefinedTypeVariant {
 
    pub(crate) fn type_class(&self) -> TypeClass {
 
        match self {
 
            DefinedTypeVariant::Enum(_) => TypeClass::Enum,
 
            DefinedTypeVariant::Union(_) => TypeClass::Union,
 
            DefinedTypeVariant::Struct(_) => TypeClass::Struct,
 
            DefinedTypeVariant::Function(_) => TypeClass::Function,
 
            DefinedTypeVariant::Component(_) => TypeClass::Component
 
        }
 
    }
 

	
 
    pub(crate) fn as_struct(&self) -> &StructType {
 
        match self {
 
            DefinedTypeVariant::Struct(v) => v,
 
            _ => unreachable!("Cannot convert {} to struct variant", self.type_class())
 
        }
 
    }
 

	
 
    pub(crate) fn as_enum(&self) -> &EnumType {
 
        match self {
 
            DefinedTypeVariant::Enum(v) => v,
 
            _ => unreachable!("Cannot convert {} to enum variant", self.type_class())
 
        }
 
    }
 
}
 

	
 
/// `EnumType` is the classical C/C++ enum type. It has various variants with
 
/// an assigned integer value. The integer values may be user-defined,
 
/// compiler-defined, or a mix of the two. If a user assigns the same enum
 
/// value multiple times, we assume the user is an expert and we consider both
 
/// variants to be equal to one another.
 
pub struct EnumType {
 
    variants: Vec<EnumVariant>,
 
    representation: PrimitiveType,
 
    pub(crate) variants: Vec<EnumVariant>,
 
    pub(crate) representation: PrimitiveType,
 
}
 

	
 
// TODO: Also support maximum u64 value
 
pub struct EnumVariant {
 
    identifier: Identifier,
 
    value: i64,
 
    pub(crate) identifier: Identifier,
 
    pub(crate) value: i64,
 
}
 

	
 
/// `UnionType` is the algebraic datatype (or sum type, or discriminated union).
 
/// A value is an element of the union, identified by its tag, and may contain
 
/// a single subtype.
 
pub struct UnionType {
 
    variants: Vec<UnionVariant>,
 
    tag_representation: PrimitiveType
 
}
 

	
 
pub struct UnionVariant {
 
    identifier: Identifier,
 
    parser_type: Option<ParserTypeId>,
 
    tag_value: i64,
 
}
 

	
 
pub struct StructType {
 
    pub(crate) fields: Vec<StructField>,
 
}
 

	
 
pub struct StructField {
 
    pub(crate) identifier: Identifier,
 
    pub(crate) parser_type: ParserTypeId,
 
}
 

	
 
pub struct FunctionType {
 
    pub return_type: ParserTypeId,
 
    pub arguments: Vec<FunctionArgument>
 
}
 

	
 
pub struct ComponentType {
 
    pub variant: ComponentVariant,
 
    pub arguments: Vec<FunctionArgument>
 
}
 

	
 
pub struct FunctionArgument {
 
    identifier: Identifier,
 
    parser_type: ParserTypeId,
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Type table
 
//------------------------------------------------------------------------------
 

	
 
// TODO: @cleanup Do I really need this, doesn't make the code that much cleaner
 
struct TypeIterator {
 
    breadcrumbs: Vec<(RootId, DefinitionId)>
 
}
 

	
 
impl TypeIterator {
 
    fn new() -> Self {
 
        Self{ breadcrumbs: Vec::with_capacity(32) }
 
    }
 

	
 
    fn reset(&mut self, root_id: RootId, definition_id: DefinitionId) {
 
        self.breadcrumbs.clear();
 
        self.breadcrumbs.push((root_id, definition_id))
 
    }
 

	
 
    fn push(&mut self, root_id: RootId, definition_id: DefinitionId) {
 
        self.breadcrumbs.push((root_id, definition_id));
 
    }
 

	
 
    fn contains(&self, root_id: RootId, definition_id: DefinitionId) -> bool {
 
        for (stored_root_id, stored_definition_id) in self.breadcrumbs.iter() {
 
            if *stored_root_id == root_id && *stored_definition_id == definition_id { return true; }
 
        }
 

	
 
        return false
 
    }
 

	
 
    fn top(&self) -> Option<(RootId, DefinitionId)> {
 
        self.breadcrumbs.last().map(|(r, d)| (*r, *d))
 
    }
 

	
 
    fn pop(&mut self) {
 
        debug_assert!(!self.breadcrumbs.is_empty());
 
        self.breadcrumbs.pop();
 
    }
 
}
 

	
 
/// Result from attempting to resolve a `ParserType` using the symbol table and
 
/// the type table.
 
enum ResolveResult {
 
    /// ParserType is a builtin type
 
    BuiltIn,
 
    /// ParserType points to a polymorphic argument, contains the index of the
 
    /// polymorphic argument in the outermost definition (e.g. we may have 
 
    /// structs nested three levels deep, but in the innermost struct we can 
 
    /// only use the polyargs that are specified in the type definition of the
 
    /// outermost struct).
 
    PolyArg(usize),
 
    /// ParserType points to a user-defined type that is already resolved in the
 
    /// type table.
 
    Resolved((RootId, DefinitionId)),
 
    /// ParserType points to a user-defined type that is not yet resolved into
 
    /// the type table.
 
    Unresolved((RootId, DefinitionId))
 
}
 

	
 
pub(crate) struct TypeTable {
 
    /// Lookup from AST DefinitionId to a defined type. Considering possible
 
    /// polymorphs is done inside the `DefinedType` struct.
 
    lookup: HashMap<DefinitionId, DefinedType>,
 
    /// Iterator over `(module, definition)` tuples used as workspace to make sure
 
    /// that each base definition of all a type's subtypes are resolved.
 
    iter: TypeIterator,
 
    /// Iterator over `parser type`s during the process where `parser types` are
 
    /// resolved into a `(module, definition)` tuple.
 
    parser_type_iter: VecDeque<ParserTypeId>,
 
}
 

	
 
pub(crate) struct TypeCtx<'a> {
 
    symbols: &'a SymbolTable,
 
    heap: &'a mut Heap,
 
    modules: &'a [LexedModule]
 
}
 

	
 
impl<'a> TypeCtx<'a> {
 
    pub(crate) fn new(symbols: &'a SymbolTable, heap: &'a mut Heap, modules: &'a [LexedModule]) -> Self {
 
        Self{ symbols, heap, modules }
 
    }
 
}
 

	
 
impl TypeTable {
 
    /// Construct a new type table without any resolved types.
 
    pub(crate) fn new() -> Self {
 
        Self{ 
 
            lookup: HashMap::new(), 
 
            iter: TypeIterator::new(), 
 
            parser_type_iter: VecDeque::with_capacity(64), 
 
        }
 
    }
 

	
 
    pub(crate) fn build_base_types(&mut self, ctx: &mut TypeCtx) -> Result<(), ParseError> {
 
        // Make sure we're allowed to cast root_id to index into ctx.modules
 
        debug_assert!(self.lookup.is_empty());
 
        debug_assert!(self.iter.top().is_none());
 
        debug_assert!(self.parser_type_iter.is_empty());
 

	
 
        if cfg!(debug_assertions) {
 
            for (index, module) in ctx.modules.iter().enumerate() {
 
                debug_assert_eq!(index, module.root_id.index as usize);
 
            }
 
        }
 

	
 
        // Use context to guess hashmap size
 
        let reserve_size = ctx.heap.definitions.len();
 
        self.lookup.reserve(reserve_size);
 

	
 
        // TODO: @cleanup Rework this hack
 
        for root_idx in 0..ctx.modules.len() {
 
            let last_definition_idx = ctx.heap[ctx.modules[root_idx].root_id].definitions.len();
 
            for definition_idx in 0..last_definition_idx {
 
                let definition_id = ctx.heap[ctx.modules[root_idx].root_id].definitions[definition_idx];
 
                self.resolve_base_definition(ctx, definition_id)?;
 
            }
 
        }
 

	
 
        debug_assert_eq!(self.lookup.len(), reserve_size, "mismatch in reserved size of type table");
 

	
 
        Ok(())
 
    }
 

	
 
    /// Retrieves base definition from type table. We must be able to retrieve
 
    /// it as we resolve all base types upon type table construction (for now).
 
    /// However, in the future we might do on-demand type resolving, so return
 
    /// an option anyway
 
    pub(crate) fn get_base_definition(&self, definition_id: &DefinitionId) -> Option<&DefinedType> {
 
        self.lookup.get(&definition_id)
 
    }
 

	
 
    /// Instantiates a monomorph for a given base definition.
 
    pub(crate) fn add_monomorph(&mut self, definition_id: &DefinitionId, types: Vec<ConcreteType>) {
 
        debug_assert!(
 
            self.lookup.contains_key(definition_id),
 
            "attempting to instantiate monomorph of definition unknown to type table"
 
        );
 

	
 
        let definition = self.lookup.get_mut(definition_id).unwrap();
 
        definition.add_monomorph(types);
 
    }
 

	
 
    /// Checks if a given definition already has a specific monomorph
 
    pub(crate) fn has_monomorph(&mut self, definition_id: &DefinitionId, types: &Vec<ConcreteType>) -> bool {
 
        debug_assert!(
 
            self.lookup.contains_key(definition_id),
 
            "attempting to check monomorph existence of definition unknown to type table"
 
        );
 

	
 
        let definition = self.lookup.get(definition_id).unwrap();
 
        definition.has_monomorph(types)
src/protocol/parser/utils.rs
Show inline comments
 
    use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use super::symbol_table::*;
 
use super::type_table::*;
 

	
 
/// Utility result type.
 
pub(crate) enum FindTypeResult<'t, 'i> {
 
    // Found the type exactly
 
    Found((&'t DefinedType, NamespacedIdentifierIter<'i>)),
 
    // Could not match symbol
 
    SymbolNotFound{ident_pos: InputPosition},
 
    // Matched part of the namespaced identifier, but not completely
 
    SymbolPartial{ident_pos: InputPosition, ident_iter: NamespacedIdentifierIter<'i>},
 
    // Symbol matched, but points to a namespace/module instead of a type
 
    SymbolNamespace{ident_pos: InputPosition, symbol_pos: InputPosition},
 
}
 

	
 
// TODO: @cleanup Find other uses of this pattern
 
// TODO: Hindsight is 20/20: this belongs in the visitor_linker, not in a 
 
//  separate file.
 
impl<'t, 'i> FindTypeResult<'t, 'i> {
 
    /// Utility function to transform the `FindTypeResult` into a `Result` where
 
    /// `Ok` contains the resolved type, and `Err` contains a `ParseError` which
 
    /// can be readily returned. This is the most common use.
 
    pub(crate) fn as_parse_error(self, module_source: &InputSource) -> Result<(&'t DefinedType, NamespacedIdentifierIter<'i>), ParseError> {
 
        match self {
 
            FindTypeResult::Found(defined_type) => Ok(defined_type),
 
            FindTypeResult::SymbolNotFound{ident_pos} => {
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos,
 
                    "Could not resolve this identifier to a symbol"
 
                ))
 
            },
 
            FindTypeResult::SymbolPartial{ident_pos, ident_iter} => {
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos, 
 
                    &format!(
 
                        "Could not fully resolve this identifier to a symbol, was only able to match '{}'",
 
                        &String::from_utf8_lossy(ident_iter.returned_section())
 
                    )
 
                ))
 
            },
 
            FindTypeResult::SymbolNamespace{ident_pos, symbol_pos} => {
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos,
 
                    "This identifier was resolved to a namespace instead of a type"
 
                ).with_postfixed_info(
 
                    module_source, symbol_pos,
 
                    "This is the referenced namespace"
 
                ))
 
            }
 
        }
 
    }
 
}
 

	
 
/// Attempt to find the type pointer to by a (root, identifier) combination. The
 
/// type must match exactly (no parts in the namespace iterator remaining) and
 
/// must be a type, not a namespace. 
 
pub(crate) fn find_type_definition<'t, 'i>(
 
    symbols: &SymbolTable, types: &'t TypeTable, 
 
    root_id: RootId, identifier: &'i NamespacedIdentifier
 
) -> FindTypeResult<'t, 'i> {
 
    // Lookup symbol
 
    let (symbol, ident_iter) = symbols.resolve_namespaced_identifier(root_id, identifier);
 
    if symbol.is_none() { 
 
        return FindTypeResult::SymbolNotFound{ident_pos: identifier.position};
 
    }
 
    
 
    // Make sure we resolved it exactly
 
    let symbol = symbol.unwrap();
 
    if ident_iter.num_remaining() != 0 { 
 
        return FindTypeResult::SymbolPartial{
 
            ident_pos: identifier.position,
 
            ident_iter
 
        };
 
    }
 

	
 
    match symbol.symbol {
 
        Symbol::Namespace(_) => {
 
            FindTypeResult::SymbolNamespace{
 
                ident_pos: identifier.position, 
 
                symbol_pos: symbol.position
 
            }
 
        },
 
        Symbol::Definition((_, definition_id)) => {
 
            // If this function is called correctly, then we should always be
 
            // able to match the definition's ID to an entry in the type table.
 
            let definition = types.get_base_definition(&definition_id);
 
            debug_assert!(definition.is_some());
 
            FindTypeResult::Found((definition.unwrap(), ident_iter))
 
        }
 
    }
 
}
 

	
 
pub(crate) enum MatchPolymorphResult<'t> {
 
    Matching,
 
    InferAll(usize),
 
    Mismatch{defined_type: &'t DefinedType, ident_position: InputPosition, num_specified: usize},
 
    NoneExpected{defined_type: &'t DefinedType, ident_position: InputPosition},
 
}
 

	
 
impl<'t> MatchPolymorphResult<'t> {
 
    pub(crate) fn as_parse_error(self, heap: &Heap, module_source: &InputSource) -> Result<usize, ParseError> {
 
        match self {
 
            MatchPolymorphResult::Matching => Ok(0),
 
            MatchPolymorphResult::InferAll(count) => {
 
                debug_assert!(count > 0);
 
                Ok(count)
 
            },
 
            MatchPolymorphResult::Mismatch{defined_type, ident_position, num_specified} => {
 
                let type_identifier = heap[defined_type.ast_definition].identifier();
 
                let args_name = if defined_type.poly_vars.len() == 1 {
 
                    "argument"
 
                } else {
 
                    "arguments"
 
                };
 

	
 
                return Err(ParseError::new_error(
 
                    module_source, ident_position,
 
                    &format!(
 
                        "expected {} polymorphic {} (or none, to infer them) for the type {}, but {} were specified",
 
                        defined_type.poly_vars.len(), args_name, 
 
                        &String::from_utf8_lossy(&type_identifier.value),
 
                        num_specified
 
                    )
 
                ))
 
            },
 
            MatchPolymorphResult::NoneExpected{defined_type, ident_position, ..} => {
 
                let type_identifier = heap[defined_type.ast_definition].identifier();
 
                return Err(ParseError::new_error(
 
                    module_source, ident_position,
 
                    &format!(
 
                        "the type {} is not polymorphic",
 
                        &String::from_utf8_lossy(&type_identifier.value)
 
                    )
 
                ))
 
            }
 
        }
 
    }
 
}
 

	
 
/// Attempt to match the polymorphic arguments to the number of polymorphic
 
/// variables in the definition.
 
pub(crate) fn match_polymorphic_args_to_vars<'t>(
 
    defined_type: &'t DefinedType, poly_args: Option<&[ParserTypeId]>, ident_position: InputPosition
 
) -> MatchPolymorphResult<'t> {
 
    if defined_type.poly_vars.is_empty() {
 
        // No polymorphic variables on type
 
        if poly_args.is_some() {
 
            return MatchPolymorphResult::NoneExpected{
 
                defined_type,
 
                ident_position, 
 
            };
 
        }
 
    } else {
 
        // Polymorphic variables on type
 
        let has_specified = poly_args.map_or(false, |a| a.len() != 0);
 
        if !has_specified {
 
            // Implicitly infer all of the polymorphic arguments
 
            return MatchPolymorphResult::InferAll(defined_type.poly_vars.len());
 
        }
 

	
 
        let num_specified = poly_args.unwrap().len();
 
        if num_specified != defined_type.poly_vars.len() {
 
            return MatchPolymorphResult::Mismatch{
 
                defined_type,
 
                ident_position,
 
                num_specified,
 
            };
 
        }
 
    }
 

	
 
    MatchPolymorphResult::Matching
 
}
 
\ No newline at end of file

Changeset was too big and was cut off... Show full diff anyway

0 comments (0 inline, 0 general)