Changeset - 0d171740d3f1
[Not reviewed]
0 4 0
MH - 4 years ago 2021-12-20 18:07:16
contact@maxhenger.nl
Handling select guard variables in validator and typer
3 files changed:
0 comments (0 inline, 0 general)
src/protocol/ast.rs
Show inline comments
 
@@ -597,1264 +597,1266 @@ impl ConcreteType {
 
            CTP::Message => { target.push_str(KW_TYPE_MESSAGE_STR); },
 
            CTP::Bool => { target.push_str(KW_TYPE_BOOL_STR); },
 
            CTP::UInt8 => { target.push_str(KW_TYPE_UINT8_STR); },
 
            CTP::UInt16 => { target.push_str(KW_TYPE_UINT16_STR); },
 
            CTP::UInt32 => { target.push_str(KW_TYPE_UINT32_STR); },
 
            CTP::UInt64 => { target.push_str(KW_TYPE_UINT64_STR); },
 
            CTP::SInt8 => { target.push_str(KW_TYPE_SINT8_STR); },
 
            CTP::SInt16 => { target.push_str(KW_TYPE_SINT16_STR); },
 
            CTP::SInt32 => { target.push_str(KW_TYPE_SINT32_STR); },
 
            CTP::SInt64 => { target.push_str(KW_TYPE_SINT64_STR); },
 
            CTP::Character => { target.push_str(KW_TYPE_CHAR_STR); },
 
            CTP::String => { target.push_str(KW_TYPE_STRING_STR); },
 
            CTP::Array | CTP::Slice => {
 
                idx = Self::render_type_part_at(parts, heap, idx, target);
 
                target.push_str("[]");
 
            },
 
            CTP::Input => {
 
                target.push_str(KW_TYPE_IN_PORT_STR);
 
                target.push('<');
 
                idx = Self::render_type_part_at(parts, heap, idx, target);
 
                target.push('>');
 
            },
 
            CTP::Output => {
 
                target.push_str(KW_TYPE_OUT_PORT_STR);
 
                target.push('<');
 
                idx = Self::render_type_part_at(parts, heap, idx, target);
 
                target.push('>');
 
            },
 
            CTP::Tuple(num_parts) => {
 
                target.push('(');
 
                if num_parts != 0 {
 
                    idx = Self::render_type_part_at(parts, heap, idx, target);
 
                    for _ in 1..num_parts {
 
                        target.push(',');
 
                        idx = Self::render_type_part_at(parts, heap, idx, target);
 
                    }
 
                }
 
                target.push(')');
 
            },
 
            CTP::Instance(definition_id, num_poly_args) |
 
            CTP::Function(definition_id, num_poly_args) |
 
            CTP::Component(definition_id, num_poly_args) => {
 
                let definition = &heap[definition_id];
 
                target.push_str(definition.identifier().value.as_str());
 

	
 
                if num_poly_args != 0 {
 
                    target.push('<');
 
                    for poly_arg_idx in 0..num_poly_args {
 
                        if poly_arg_idx != 0 {
 
                            target.push(',');
 
                        }
 
                        idx = Self::render_type_part_at(parts, heap, idx, target);
 
                    }
 
                    target.push('>');
 
                }
 
            }
 
        }
 

	
 
        idx
 
    }
 
}
 

	
 
#[derive(Debug)]
 
pub struct ConcreteTypeIter<'a> {
 
    parts: &'a [ConcreteTypePart],
 
    idx_embedded: u32,
 
    num_embedded: u32,
 
    part_idx: usize,
 
}
 

	
 
impl<'a> ConcreteTypeIter<'a> {
 
    pub(crate) fn new(parts: &'a[ConcreteTypePart], parent_idx: usize) -> Self {
 
        let num_embedded = parts[parent_idx].num_embedded();
 
        return ConcreteTypeIter{
 
            parts,
 
            idx_embedded: 0,
 
            num_embedded,
 
            part_idx: parent_idx + 1,
 
        }
 
    }
 
}
 

	
 
impl<'a> Iterator for ConcreteTypeIter<'a> {
 
    type Item = &'a [ConcreteTypePart];
 

	
 
    fn next(&mut self) -> Option<Self::Item> {
 
        if self.idx_embedded == self.num_embedded {
 
            return None;
 
        }
 

	
 
        // Retrieve the subtree of interest
 
        let start_idx = self.part_idx;
 
        let end_idx = ConcreteType::type_parts_subtree_end_idx(&self.parts, start_idx);
 

	
 
        self.idx_embedded += 1;
 
        self.part_idx = end_idx;
 

	
 
        return Some(&self.parts[start_idx..end_idx]);
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub enum Scope {
 
    Definition(DefinitionId),
 
    Regular(BlockStatementId),
 
    Synchronous((SynchronousStatementId, BlockStatementId)),
 
}
 

	
 
impl Scope {
 
    pub fn is_block(&self) -> bool {
 
        match &self {
 
            Scope::Definition(_) => false,
 
            Scope::Regular(_) => true,
 
            Scope::Synchronous(_) => true,
 
        }
 
    }
 
    pub fn to_block(&self) -> BlockStatementId {
 
        match &self {
 
            Scope::Regular(id) => *id,
 
            Scope::Synchronous((_, id)) => *id,
 
            _ => panic!("unable to get BlockStatement from Scope")
 
        }
 
    }
 
}
 

	
 
/// `ScopeNode` is a helper that links scopes in two directions. It doesn't
 
/// actually contain any information associated with the scope, this may be
 
/// found on the AST elements that `Scope` points to.
 
#[derive(Debug, Clone)]
 
pub struct ScopeNode {
 
    pub parent: Scope,
 
    pub nested: Vec<Scope>,
 
}
 

	
 
impl ScopeNode {
 
    pub(crate) fn new_invalid() -> Self {
 
        ScopeNode{
 
            parent: Scope::Definition(DefinitionId::new_invalid()),
 
            nested: Vec::new(),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq)]
 
pub enum VariableKind {
 
    Parameter,      // in parameter list of function/component
 
    Local,          // declared in function/component body
 
    Binding,        // may be bound to in a binding expression (determined in validator/linker)
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct Variable {
 
    pub this: VariableId,
 
    // Parsing
 
    pub kind: VariableKind,
 
    pub parser_type: ParserType,
 
    pub identifier: Identifier,
 
    // Validator/linker
 
    pub relative_pos_in_block: i32,
 
    pub unique_id_in_scope: i32, // Temporary fix until proper bytecode/asm is generated
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Definition {
 
    Struct(StructDefinition),
 
    Enum(EnumDefinition),
 
    Union(UnionDefinition),
 
    Component(ComponentDefinition),
 
    Function(FunctionDefinition),
 
}
 

	
 
impl Definition {
 
    pub fn is_struct(&self) -> bool {
 
        match self {
 
            Definition::Struct(_) => true,
 
            _ => false
 
        }
 
    }
 
    pub(crate) fn as_struct(&self) -> &StructDefinition {
 
        match self {
 
            Definition::Struct(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'StructDefinition'"),
 
        }
 
    }
 
    pub(crate) fn as_struct_mut(&mut self) -> &mut StructDefinition {
 
        match self {
 
            Definition::Struct(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'StructDefinition'"),
 
        }
 
    }
 
    pub fn is_enum(&self) -> bool {
 
        match self {
 
            Definition::Enum(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub(crate) fn as_enum(&self) -> &EnumDefinition {
 
        match self {
 
            Definition::Enum(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'EnumDefinition'"),
 
        }
 
    }
 
    pub(crate) fn as_enum_mut(&mut self) -> &mut EnumDefinition {
 
        match self {
 
            Definition::Enum(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'EnumDefinition'"),
 
        }
 
    }
 
    pub fn is_union(&self) -> bool {
 
        match self {
 
            Definition::Union(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub(crate) fn as_union(&self) -> &UnionDefinition {
 
        match self {
 
            Definition::Union(result) => result, 
 
            _ => panic!("Unable to cast 'Definition' to 'UnionDefinition'"),
 
        }
 
    }
 
    pub(crate) fn as_union_mut(&mut self) -> &mut UnionDefinition {
 
        match self {
 
            Definition::Union(result) => result,
 
            _ => panic!("Unable to cast 'Definition' to 'UnionDefinition'"),
 
        }
 
    }
 
    pub fn is_component(&self) -> bool {
 
        match self {
 
            Definition::Component(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub(crate) fn as_component(&self) -> &ComponentDefinition {
 
        match self {
 
            Definition::Component(result) => result,
 
            _ => panic!("Unable to cast `Definition` to `Component`"),
 
        }
 
    }
 
    pub(crate) fn as_component_mut(&mut self) -> &mut ComponentDefinition {
 
        match self {
 
            Definition::Component(result) => result,
 
            _ => panic!("Unable to cast `Definition` to `Component`"),
 
        }
 
    }
 
    pub fn is_function(&self) -> bool {
 
        match self {
 
            Definition::Function(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub(crate) fn as_function(&self) -> &FunctionDefinition {
 
        match self {
 
            Definition::Function(result) => result,
 
            _ => panic!("Unable to cast `Definition` to `Function`"),
 
        }
 
    }
 
    pub(crate) fn as_function_mut(&mut self) -> &mut FunctionDefinition {
 
        match self {
 
            Definition::Function(result) => result,
 
            _ => panic!("Unable to cast `Definition` to `Function`"),
 
        }
 
    }
 
    pub fn parameters(&self) -> &Vec<VariableId> {
 
        match self {
 
            Definition::Component(def) => &def.parameters,
 
            Definition::Function(def) => &def.parameters,
 
            _ => panic!("Called parameters() on {:?}", self)
 
        }
 
    }
 
    pub fn defined_in(&self) -> RootId {
 
        match self {
 
            Definition::Struct(def) => def.defined_in,
 
            Definition::Enum(def) => def.defined_in,
 
            Definition::Union(def) => def.defined_in,
 
            Definition::Component(def) => def.defined_in,
 
            Definition::Function(def) => def.defined_in,
 
        }
 
    }
 
    pub fn identifier(&self) -> &Identifier {
 
        match self {
 
            Definition::Struct(def) => &def.identifier,
 
            Definition::Enum(def) => &def.identifier,
 
            Definition::Union(def) => &def.identifier,
 
            Definition::Component(def) => &def.identifier,
 
            Definition::Function(def) => &def.identifier,
 
        }
 
    }
 
    pub fn poly_vars(&self) -> &Vec<Identifier> {
 
        match self {
 
            Definition::Struct(def) => &def.poly_vars,
 
            Definition::Enum(def) => &def.poly_vars,
 
            Definition::Union(def) => &def.poly_vars,
 
            Definition::Component(def) => &def.poly_vars,
 
            Definition::Function(def) => &def.poly_vars,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct StructFieldDefinition {
 
    pub span: InputSpan,
 
    pub field: Identifier,
 
    pub parser_type: ParserType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct StructDefinition {
 
    pub this: StructDefinitionId,
 
    pub defined_in: RootId,
 
    // Symbol scanning
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Parsing
 
    pub fields: Vec<StructFieldDefinition>
 
}
 

	
 
impl StructDefinition {
 
    pub(crate) fn new_empty(
 
        this: StructDefinitionId, defined_in: RootId, span: InputSpan,
 
        identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self{ this, defined_in, span, identifier, poly_vars, fields: Vec::new() }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub enum EnumVariantValue {
 
    None,
 
    Integer(i64),
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EnumVariantDefinition {
 
    pub identifier: Identifier,
 
    pub value: EnumVariantValue,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EnumDefinition {
 
    pub this: EnumDefinitionId,
 
    pub defined_in: RootId,
 
    // Symbol scanning
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Parsing
 
    pub variants: Vec<EnumVariantDefinition>,
 
}
 

	
 
impl EnumDefinition {
 
    pub(crate) fn new_empty(
 
        this: EnumDefinitionId, defined_in: RootId, span: InputSpan,
 
        identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self{ this, defined_in, span, identifier, poly_vars, variants: Vec::new() }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct UnionVariantDefinition {
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub value: Vec<ParserType>, // if empty, then union variant does not contain any embedded types
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct UnionDefinition {
 
    pub this: UnionDefinitionId,
 
    pub defined_in: RootId,
 
    // Phase 1: symbol scanning
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Phase 2: parsing
 
    pub variants: Vec<UnionVariantDefinition>,
 
}
 

	
 
impl UnionDefinition {
 
    pub(crate) fn new_empty(
 
        this: UnionDefinitionId, defined_in: RootId, span: InputSpan,
 
        identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self{ this, defined_in, span, identifier, poly_vars, variants: Vec::new() }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub enum ComponentVariant {
 
    Primitive,
 
    Composite,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ComponentDefinition {
 
    pub this: ComponentDefinitionId,
 
    pub defined_in: RootId,
 
    // Symbol scanning
 
    pub span: InputSpan,
 
    pub variant: ComponentVariant,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Parsing
 
    pub parameters: Vec<VariableId>,
 
    pub body: BlockStatementId,
 
    // Validation/linking
 
    pub num_expressions_in_body: i32,
 
}
 

	
 
impl ComponentDefinition {
 
    // Used for preallocation during symbol scanning
 
    pub(crate) fn new_empty(
 
        this: ComponentDefinitionId, defined_in: RootId, span: InputSpan,
 
        variant: ComponentVariant, identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self{ 
 
            this, defined_in, span, variant, identifier, poly_vars,
 
            parameters: Vec::new(), 
 
            body: BlockStatementId::new_invalid(),
 
            num_expressions_in_body: -1,
 
        }
 
    }
 
}
 

	
 
// Note that we will have function definitions for builtin functions as well. In
 
// that case the span, the identifier span and the body are all invalid.
 
#[derive(Debug, Clone)]
 
pub struct FunctionDefinition {
 
    pub this: FunctionDefinitionId,
 
    pub defined_in: RootId,
 
    // Symbol scanning
 
    pub builtin: bool,
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Parser
 
    pub return_types: Vec<ParserType>,
 
    pub parameters: Vec<VariableId>,
 
    pub body: BlockStatementId,
 
    // Validation/linking
 
    pub num_expressions_in_body: i32,
 
}
 

	
 
impl FunctionDefinition {
 
    pub(crate) fn new_empty(
 
        this: FunctionDefinitionId, defined_in: RootId, span: InputSpan,
 
        identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self {
 
            this, defined_in,
 
            builtin: false,
 
            span, identifier, poly_vars,
 
            return_types: Vec::new(),
 
            parameters: Vec::new(),
 
            body: BlockStatementId::new_invalid(),
 
            num_expressions_in_body: -1,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Statement {
 
    Block(BlockStatement),
 
    EndBlock(EndBlockStatement),
 
    Local(LocalStatement),
 
    Labeled(LabeledStatement),
 
    If(IfStatement),
 
    EndIf(EndIfStatement),
 
    While(WhileStatement),
 
    EndWhile(EndWhileStatement),
 
    Break(BreakStatement),
 
    Continue(ContinueStatement),
 
    Synchronous(SynchronousStatement),
 
    EndSynchronous(EndSynchronousStatement),
 
    Fork(ForkStatement),
 
    EndFork(EndForkStatement),
 
    Select(SelectStatement),
 
    EndSelect(EndSelectStatement),
 
    Return(ReturnStatement),
 
    Goto(GotoStatement),
 
    New(NewStatement),
 
    Expression(ExpressionStatement),
 
}
 

	
 
impl Statement {
 
    pub fn as_block(&self) -> &BlockStatement {
 
        match self {
 
            Statement::Block(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `BlockStatement`"),
 
        }
 
    }
 
    pub fn as_local(&self) -> &LocalStatement {
 
        match self {
 
            Statement::Local(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `LocalStatement`"),
 
        }
 
    }
 
    pub fn as_memory(&self) -> &MemoryStatement {
 
        self.as_local().as_memory()
 
    }
 
    pub fn as_channel(&self) -> &ChannelStatement {
 
        self.as_local().as_channel()
 
    }
 

	
 
    pub fn as_new(&self) -> &NewStatement {
 
        match self {
 
            Statement::New(result) => result,
 
            _ => panic!("Unable to cast `Statement` to `NewStatement`"),
 
        }
 
    }
 

	
 
    pub fn span(&self) -> InputSpan {
 
        match self {
 
            Statement::Block(v) => v.span,
 
            Statement::Local(v) => v.span(),
 
            Statement::Labeled(v) => v.label.span,
 
            Statement::If(v) => v.span,
 
            Statement::While(v) => v.span,
 
            Statement::Break(v) => v.span,
 
            Statement::Continue(v) => v.span,
 
            Statement::Synchronous(v) => v.span,
 
            Statement::Fork(v) => v.span,
 
            Statement::Select(v) => v.span,
 
            Statement::Return(v) => v.span,
 
            Statement::Goto(v) => v.span,
 
            Statement::New(v) => v.span,
 
            Statement::Expression(v) => v.span,
 
            Statement::EndBlock(_)
 
            | Statement::EndIf(_)
 
            | Statement::EndWhile(_)
 
            | Statement::EndSynchronous(_)
 
            | Statement::EndFork(_)
 
            | Statement::EndSelect(_) => unreachable!(),
 
        }
 
    }
 
    pub fn link_next(&mut self, next: StatementId) {
 
        match self {
 
            Statement::Block(stmt) => stmt.next = next,
 
            Statement::EndBlock(stmt) => stmt.next = next,
 
            Statement::Local(stmt) => match stmt {
 
                LocalStatement::Channel(stmt) => stmt.next = next,
 
                LocalStatement::Memory(stmt) => stmt.next = next,
 
            },
 
            Statement::EndIf(stmt) => stmt.next = next,
 
            Statement::EndWhile(stmt) => stmt.next = next,
 
            Statement::EndSynchronous(stmt) => stmt.next = next,
 
            Statement::EndFork(stmt) => stmt.next = next,
 
            Statement::EndSelect(stmt) => stmt.next = next,
 
            Statement::New(stmt) => stmt.next = next,
 
            Statement::Expression(stmt) => stmt.next = next,
 
            Statement::Return(_)
 
            | Statement::Break(_)
 
            | Statement::Continue(_)
 
            | Statement::Synchronous(_)
 
            | Statement::Fork(_)
 
            | Statement::Select(_)
 
            | Statement::Goto(_)
 
            | Statement::While(_)
 
            | Statement::Labeled(_)
 
            | Statement::If(_) => unreachable!(),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BlockStatement {
 
    pub this: BlockStatementId,
 
    // Phase 1: parser
 
    pub is_implicit: bool,
 
    pub span: InputSpan, // of the complete block
 
    pub statements: Vec<StatementId>,
 
    pub end_block: EndBlockStatementId,
 
    // Phase 2: linker
 
    pub scope_node: ScopeNode,
 
    pub first_unique_id_in_scope: i32, // Temporary fix until proper bytecode/asm is generated
 
    pub next_unique_id_in_scope: i32, // Temporary fix until proper bytecode/asm is generated
 
    pub relative_pos_in_parent: i32,
 
    pub locals: Vec<VariableId>,
 
    pub labels: Vec<LabeledStatementId>,
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndBlockStatement {
 
    pub this: EndBlockStatementId,
 
    // Parser
 
    pub start_block: BlockStatementId,
 
    // Validation/Linking
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum LocalStatement {
 
    Memory(MemoryStatement),
 
    Channel(ChannelStatement),
 
}
 

	
 
impl LocalStatement {
 
    pub fn this(&self) -> LocalStatementId {
 
        match self {
 
            LocalStatement::Memory(stmt) => stmt.this.upcast(),
 
            LocalStatement::Channel(stmt) => stmt.this.upcast(),
 
        }
 
    }
 
    pub fn as_memory(&self) -> &MemoryStatement {
 
        match self {
 
            LocalStatement::Memory(result) => result,
 
            _ => panic!("Unable to cast `LocalStatement` to `MemoryStatement`"),
 
        }
 
    }
 
    pub fn as_channel(&self) -> &ChannelStatement {
 
        match self {
 
            LocalStatement::Channel(result) => result,
 
            _ => panic!("Unable to cast `LocalStatement` to `ChannelStatement`"),
 
        }
 
    }
 
    pub fn span(&self) -> InputSpan {
 
        match self {
 
            LocalStatement::Channel(v) => v.span,
 
            LocalStatement::Memory(v) => v.span,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct MemoryStatement {
 
    pub this: MemoryStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub variable: VariableId,
 
    pub initial_expr: AssignmentExpressionId,
 
    // Phase 2: linker
 
    pub next: StatementId,
 
}
 

	
 
/// ChannelStatement is the declaration of an input and output port associated
 
/// with the same channel. Note that the polarity of the ports are from the
 
/// point of view of the component. So an output port is something that a
 
/// component uses to send data over (i.e. it is the "input end" of the
 
/// channel), and vice versa.
 
#[derive(Debug, Clone)]
 
pub struct ChannelStatement {
 
    pub this: ChannelStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "channel" keyword
 
    pub from: VariableId, // output
 
    pub to: VariableId,   // input
 
    // Phase 2: linker
 
    pub relative_pos_in_block: i32,
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LabeledStatement {
 
    pub this: LabeledStatementId,
 
    // Phase 1: parser
 
    pub label: Identifier,
 
    pub body: StatementId,
 
    // Phase 2: linker
 
    pub relative_pos_in_block: i32,
 
    pub in_sync: SynchronousStatementId, // may be invalid
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct IfStatement {
 
    pub this: IfStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "if" keyword
 
    pub test: ExpressionId,
 
    pub true_body: BlockStatementId,
 
    pub false_body: Option<BlockStatementId>,
 
    pub end_if: EndIfStatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndIfStatement {
 
    pub this: EndIfStatementId,
 
    pub start_if: IfStatementId,
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct WhileStatement {
 
    pub this: WhileStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "while" keyword
 
    pub test: ExpressionId,
 
    pub body: BlockStatementId,
 
    pub end_while: EndWhileStatementId,
 
    pub in_sync: SynchronousStatementId, // may be invalid
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndWhileStatement {
 
    pub this: EndWhileStatementId,
 
    pub start_while: WhileStatementId,
 
    // Phase 2: linker
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BreakStatement {
 
    pub this: BreakStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "break" keyword
 
    pub label: Option<Identifier>,
 
    // Phase 2: linker
 
    pub target: Option<EndWhileStatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ContinueStatement {
 
    pub this: ContinueStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "continue" keyword
 
    pub label: Option<Identifier>,
 
    // Phase 2: linker
 
    pub target: Option<WhileStatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SynchronousStatement {
 
    pub this: SynchronousStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "sync" keyword
 
    pub body: BlockStatementId,
 
    pub end_sync: EndSynchronousStatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndSynchronousStatement {
 
    pub this: EndSynchronousStatementId,
 
    pub start_sync: SynchronousStatementId,
 
    // Phase 2: linker
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ForkStatement {
 
    pub this: ForkStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "fork" keyword
 
    pub left_body: BlockStatementId,
 
    pub right_body: Option<BlockStatementId>,
 
    pub end_fork: EndForkStatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndForkStatement {
 
    pub this: EndForkStatementId,
 
    pub start_fork: ForkStatementId,
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SelectStatement {
 
    pub this: SelectStatementId,
 
    pub span: InputSpan, // of the "select" keyword
 
    pub cases: Vec<SelectCase>,
 
    pub end_select: EndSelectStatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SelectCase {
 
    // The guard statement of a `select` is either a MemoryStatement or an
 
    // ExpressionStatement. Nothing else is allowed by the initial parsing
 
    pub guard: StatementId,
 
    pub block: BlockStatementId,
 
    // Phase 2: Validation and Linking
 
    pub involved_ports: Vec<(CallExpressionId, ExpressionId)>, // call to `get` and its port argument
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EndSelectStatement {
 
    pub this: EndSelectStatementId,
 
    pub start_select: SelectStatementId,
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ReturnStatement {
 
    pub this: ReturnStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "return" keyword
 
    pub expressions: Vec<ExpressionId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct GotoStatement {
 
    pub this: GotoStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "goto" keyword
 
    pub label: Identifier,
 
    // Phase 2: linker
 
    pub target: Option<LabeledStatementId>,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct NewStatement {
 
    pub this: NewStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan, // of the "new" keyword
 
    pub expression: CallExpressionId,
 
    // Phase 2: linker
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ExpressionStatement {
 
    pub this: ExpressionStatementId,
 
    // Phase 1: parser
 
    pub span: InputSpan,
 
    pub expression: ExpressionId,
 
    // Phase 2: linker
 
    pub next: StatementId,
 
}
 

	
 
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
 
pub enum ExpressionParent {
 
    None, // only set during initial parsing
 
    Memory(MemoryStatementId),
 
    If(IfStatementId),
 
    While(WhileStatementId),
 
    Return(ReturnStatementId),
 
    New(NewStatementId),
 
    ExpressionStmt(ExpressionStatementId),
 
    Expression(ExpressionId, u32) // index within expression (e.g LHS or RHS of expression)
 
}
 

	
 
impl ExpressionParent {
 
    pub fn is_new(&self) -> bool {
 
        match self {
 
            ExpressionParent::New(_) => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    pub fn as_expression(&self) -> ExpressionId {
 
        match self {
 
            ExpressionParent::Expression(id, _) => *id,
 
            _ => panic!("called as_expression() on {:?}", self),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Expression {
 
    Assignment(AssignmentExpression),
 
    Binding(BindingExpression),
 
    Conditional(ConditionalExpression),
 
    Binary(BinaryExpression),
 
    Unary(UnaryExpression),
 
    Indexing(IndexingExpression),
 
    Slicing(SlicingExpression),
 
    Select(SelectExpression),
 
    Literal(LiteralExpression),
 
    Cast(CastExpression),
 
    Call(CallExpression),
 
    Variable(VariableExpression),
 
}
 

	
 
impl Expression {
 
    pub fn as_variable(&self) -> &VariableExpression {
 
        match self {
 
            Expression::Variable(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `VariableExpression`"),
 
        }
 
    }
 

	
 
    /// Returns operator span, function name, a binding's "let" span, etc. An
 
    /// indicator for the kind of expression that is being applied.
 
    pub fn operation_span(&self) -> InputSpan {
 
        match self {
 
            Expression::Assignment(expr) => expr.operator_span,
 
            Expression::Binding(expr) => expr.operator_span,
 
            Expression::Conditional(expr) => expr.operator_span,
 
            Expression::Binary(expr) => expr.operator_span,
 
            Expression::Unary(expr) => expr.operator_span,
 
            Expression::Indexing(expr) => expr.operator_span,
 
            Expression::Slicing(expr) => expr.slicing_span,
 
            Expression::Select(expr) => expr.operator_span,
 
            Expression::Literal(expr) => expr.span,
 
            Expression::Cast(expr) => expr.cast_span,
 
            Expression::Call(expr) => expr.func_span,
 
            Expression::Variable(expr) => expr.identifier.span,
 
        }
 
    }
 

	
 
    /// Returns the span covering the entire expression (i.e. including the
 
    /// spans of the arguments as well).
 
    pub fn full_span(&self) -> InputSpan {
 
        match self {
 
            Expression::Assignment(expr) => expr.full_span,
 
            Expression::Binding(expr) => expr.full_span,
 
            Expression::Conditional(expr) => expr.full_span,
 
            Expression::Binary(expr) => expr.full_span,
 
            Expression::Unary(expr) => expr.full_span,
 
            Expression::Indexing(expr) => expr.full_span,
 
            Expression::Slicing(expr) => expr.full_span,
 
            Expression::Select(expr) => expr.full_span,
 
            Expression::Literal(expr) => expr.span,
 
            Expression::Cast(expr) => expr.full_span,
 
            Expression::Call(expr) => expr.full_span,
 
            Expression::Variable(expr) => expr.identifier.span,
 
        }
 
    }
 

	
 
    pub fn parent(&self) -> &ExpressionParent {
 
        match self {
 
            Expression::Assignment(expr) => &expr.parent,
 
            Expression::Binding(expr) => &expr.parent,
 
            Expression::Conditional(expr) => &expr.parent,
 
            Expression::Binary(expr) => &expr.parent,
 
            Expression::Unary(expr) => &expr.parent,
 
            Expression::Indexing(expr) => &expr.parent,
 
            Expression::Slicing(expr) => &expr.parent,
 
            Expression::Select(expr) => &expr.parent,
 
            Expression::Literal(expr) => &expr.parent,
 
            Expression::Cast(expr) => &expr.parent,
 
            Expression::Call(expr) => &expr.parent,
 
            Expression::Variable(expr) => &expr.parent,
 
        }
 
    }
 

	
 
    pub fn parent_expr_id(&self) -> Option<ExpressionId> {
 
        if let ExpressionParent::Expression(id, _) = self.parent() {
 
            Some(*id)
 
        } else {
 
            None
 
        }
 
    }
 

	
 
    pub fn get_unique_id_in_definition(&self) -> i32 {
 
        match self {
 
            Expression::Assignment(expr) => expr.unique_id_in_definition,
 
            Expression::Binding(expr) => expr.unique_id_in_definition,
 
            Expression::Conditional(expr) => expr.unique_id_in_definition,
 
            Expression::Binary(expr) => expr.unique_id_in_definition,
 
            Expression::Unary(expr) => expr.unique_id_in_definition,
 
            Expression::Indexing(expr) => expr.unique_id_in_definition,
 
            Expression::Slicing(expr) => expr.unique_id_in_definition,
 
            Expression::Select(expr) => expr.unique_id_in_definition,
 
            Expression::Literal(expr) => expr.unique_id_in_definition,
 
            Expression::Cast(expr) => expr.unique_id_in_definition,
 
            Expression::Call(expr) => expr.unique_id_in_definition,
 
            Expression::Variable(expr) => expr.unique_id_in_definition,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub enum AssignmentOperator {
 
    Set,
 
    Concatenated,
 
    Multiplied,
 
    Divided,
 
    Remained,
 
    Added,
 
    Subtracted,
 
    ShiftedLeft,
 
    ShiftedRight,
 
    BitwiseAnded,
 
    BitwiseXored,
 
    BitwiseOred,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct AssignmentExpression {
 
    pub this: AssignmentExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan,
 
    pub full_span: InputSpan,
 
    pub left: ExpressionId,
 
    pub operation: AssignmentOperator,
 
    pub right: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BindingExpression {
 
    pub this: BindingExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan,
 
    pub full_span: InputSpan,
 
    pub bound_to: ExpressionId,
 
    pub bound_from: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ConditionalExpression {
 
    pub this: ConditionalExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan,
 
    pub full_span: InputSpan,
 
    pub test: ExpressionId,
 
    pub true_expression: ExpressionId,
 
    pub false_expression: ExpressionId,
 
    // Validator/Linking
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
}
 

	
 
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
 
pub enum BinaryOperator {
 
    Concatenate,
 
    LogicalOr,
 
    LogicalAnd,
 
    BitwiseOr,
 
    BitwiseXor,
 
    BitwiseAnd,
 
    Equality,
 
    Inequality,
 
    LessThan,
 
    GreaterThan,
 
    LessThanEqual,
 
    GreaterThanEqual,
 
    ShiftLeft,
 
    ShiftRight,
 
    Add,
 
    Subtract,
 
    Multiply,
 
    Divide,
 
    Remainder,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BinaryExpression {
 
    pub this: BinaryExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan,
 
    pub full_span: InputSpan,
 
    pub left: ExpressionId,
 
    pub operation: BinaryOperator,
 
    pub right: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
}
 

	
 
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
 
pub enum UnaryOperator {
 
    Positive,
 
    Negative,
 
    BitwiseNot,
 
    LogicalNot,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct UnaryExpression {
 
    pub this: UnaryExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan,
 
    pub full_span: InputSpan,
 
    pub operation: UnaryOperator,
 
    pub expression: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct IndexingExpression {
 
    pub this: IndexingExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan,
 
    pub full_span: InputSpan,
 
    pub subject: ExpressionId,
 
    pub index: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SlicingExpression {
 
    pub this: SlicingExpressionId,
 
    // Parsing
 
    pub slicing_span: InputSpan, // from '[' to ']'
 
    pub full_span: InputSpan, // includes subject
 
    pub subject: ExpressionId,
 
    pub from_index: ExpressionId,
 
    pub to_index: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum SelectKind {
 
    StructField(Identifier),
 
    TupleMember(u64), // u64 is overkill, but space is taken up by `StructField` variant anyway
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SelectExpression {
 
    pub this: SelectExpressionId,
 
    // Parsing
 
    pub operator_span: InputSpan, // of the '.'
 
    pub full_span: InputSpan, // includes subject and field
 
    pub subject: ExpressionId,
 
    pub kind: SelectKind,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct CastExpression {
 
    pub this: CastExpressionId,
 
    // Parsing
 
    pub cast_span: InputSpan, // of the "cast" keyword,
 
    pub full_span: InputSpan, // includes the cast subject
 
    pub to_type: ParserType,
 
    pub subject: ExpressionId,
 
    // Validator/linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct CallExpression {
 
    pub this: CallExpressionId,
 
    // Parsing
 
    pub func_span: InputSpan, // of the function name
 
    pub full_span: InputSpan, // includes the arguments and parentheses
 
    pub parser_type: ParserType, // of the function call, not the return type
 
    pub method: Method,
 
    pub arguments: Vec<ExpressionId>,
 
    pub definition: DefinitionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq)]
 
pub enum Method {
 
    // Builtin
 
    Get,
 
    Put,
 
    Fires,
 
    Create,
 
    Length,
 
    Assert,
 
    Print,
 
    UserFunction,
 
    UserComponent,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct MethodSymbolic {
 
    pub(crate) parser_type: ParserType,
 
    pub(crate) definition: DefinitionId
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralExpression {
 
    pub this: LiteralExpressionId,
 
    // Parsing
 
    pub span: InputSpan,
 
    pub value: Literal,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Literal {
 
    Null, // message
 
    True,
 
    False,
 
    Character(char),
 
    String(StringRef<'static>),
 
    Integer(LiteralInteger),
 
    Struct(LiteralStruct),
 
    Enum(LiteralEnum),
 
    Union(LiteralUnion),
 
    Array(Vec<ExpressionId>),
 
    Tuple(Vec<ExpressionId>),
 
}
 

	
 
impl Literal {
 
    pub(crate) fn as_struct(&self) -> &LiteralStruct {
 
        if let Literal::Struct(literal) = self{
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Struct", self)
 
        }
 
    }
 

	
 
    pub(crate) fn as_enum(&self) -> &LiteralEnum {
 
        if let Literal::Enum(literal) = self {
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Enum", self)
 
        }
 
    }
 

	
 
    pub(crate) fn as_union(&self) -> &LiteralUnion {
 
        if let Literal::Union(literal) = self {
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Union", self)
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralInteger {
 
    pub(crate) unsigned_value: u64,
 
    pub(crate) negated: bool, // for constant expression evaluation, TODO: @Int
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralStructField {
 
    // Phase 1: parser
 
    pub(crate) identifier: Identifier,
 
    pub(crate) value: ExpressionId,
 
    // Phase 2: linker
 
    pub(crate) field_idx: usize, // in struct definition
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralStruct {
 
    // Phase 1: parser
 
    pub(crate) parser_type: ParserType,
 
    pub(crate) fields: Vec<LiteralStructField>,
 
    pub(crate) definition: DefinitionId,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralEnum {
 
    // Phase 1: parser
 
    pub(crate) parser_type: ParserType,
 
    pub(crate) variant: Identifier,
 
    pub(crate) definition: DefinitionId,
 
    // Phase 2: linker
 
    pub(crate) variant_idx: usize, // as present in the type table
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct LiteralUnion {
 
    // Phase 1: parser
 
    pub(crate) parser_type: ParserType,
 
    pub(crate) variant: Identifier,
 
    pub(crate) values: Vec<ExpressionId>,
 
    pub(crate) definition: DefinitionId,
 
    // Phase 2: linker
 
    pub(crate) variant_idx: usize, // as present in type table
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct VariableExpression {
 
    pub this: VariableExpressionId,
 
    // Parsing
 
    pub identifier: Identifier,
 
    // Validator/Linker
 
    pub declaration: Option<VariableId>,
 
    pub used_as_binding_target: bool,
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
}
 
\ No newline at end of file
src/protocol/parser/pass_definitions.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use super::symbol_table::*;
 
use super::{Module, ModuleCompilationPhase, PassCtx};
 
use super::tokens::*;
 
use super::token_parsing::*;
 
use super::pass_definitions_types::*;
 

	
 
use crate::protocol::input_source::{InputSource, InputPosition, InputSpan, ParseError};
 
use crate::collections::*;
 

	
 
/// Parses all the tokenized definitions into actual AST nodes.
 
pub(crate) struct PassDefinitions {
 
    // State associated with the definition currently being processed
 
    cur_definition: DefinitionId,
 
    // Itty bitty parsing machines
 
    type_parser: ParserTypeParser,
 
    // Temporary buffers of various kinds
 
    buffer: String,
 
    struct_fields: ScopedBuffer<StructFieldDefinition>,
 
    enum_variants: ScopedBuffer<EnumVariantDefinition>,
 
    union_variants: ScopedBuffer<UnionVariantDefinition>,
 
    variables: ScopedBuffer<VariableId>,
 
    expressions: ScopedBuffer<ExpressionId>,
 
    statements: ScopedBuffer<StatementId>,
 
    parser_types: ScopedBuffer<ParserType>,
 
}
 

	
 
impl PassDefinitions {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            cur_definition: DefinitionId::new_invalid(),
 
            type_parser: ParserTypeParser::new(),
 
            buffer: String::with_capacity(128),
 
            struct_fields: ScopedBuffer::with_capacity(128),
 
            enum_variants: ScopedBuffer::with_capacity(128),
 
            union_variants: ScopedBuffer::with_capacity(128),
 
            variables: ScopedBuffer::with_capacity(128),
 
            expressions: ScopedBuffer::with_capacity(128),
 
            statements: ScopedBuffer::with_capacity(128),
 
            parser_types: ScopedBuffer::with_capacity(128),
 
        }
 
    }
 

	
 
    pub(crate) fn parse(&mut self, modules: &mut [Module], module_idx: usize, ctx: &mut PassCtx) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let module_range = &module.tokens.ranges[0];
 
        debug_assert_eq!(module.phase, ModuleCompilationPhase::ImportsResolved);
 
        debug_assert_eq!(module_range.range_kind, TokenRangeKind::Module);
 

	
 
        // Although we only need to parse the definitions, we want to go through
 
        // code ranges as well such that we can throw errors if we get
 
        // unexpected tokens at the module level of the source.
 
        let mut range_idx = module_range.first_child_idx;
 
        loop {
 
            let range_idx_usize = range_idx as usize;
 
            let cur_range = &module.tokens.ranges[range_idx_usize];
 

	
 
            match cur_range.range_kind {
 
                TokenRangeKind::Module => unreachable!(), // should not be reachable
 
                TokenRangeKind::Pragma | TokenRangeKind::Import => {
 
                    // Already fully parsed, fall through and go to next range
 
                },
 
                TokenRangeKind::Definition | TokenRangeKind::Code => {
 
                    // Visit range even if it is a "code" range to provide
 
                    // proper error messages.
 
                    self.visit_range(modules, module_idx, ctx, range_idx_usize)?;
 
                },
 
            }
 

	
 
            if cur_range.next_sibling_idx == NO_SIBLING {
 
                break;
 
            } else {
 
                range_idx = cur_range.next_sibling_idx;
 
            }
 
        }
 

	
 
        modules[module_idx].phase = ModuleCompilationPhase::DefinitionsParsed;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_range(
 
        &mut self, modules: &[Module], module_idx: usize, ctx: &mut PassCtx, range_idx: usize
 
    ) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let cur_range = &module.tokens.ranges[range_idx];
 
        debug_assert!(cur_range.range_kind == TokenRangeKind::Definition || cur_range.range_kind == TokenRangeKind::Code);
 

	
 
        // Detect which definition we're parsing
 
        let mut iter = module.tokens.iter_range(cur_range);
 
        loop {
 
            let next = iter.next();
 
            if next.is_none() {
 
                return Ok(())
 
            }
 

	
 
            // Token was not None, so peek_ident returns None if not an ident
 
            let ident = peek_ident(&module.source, &mut iter);
 
            match ident {
 
                Some(KW_STRUCT) => self.visit_struct_definition(module, &mut iter, ctx)?,
 
                Some(KW_ENUM) => self.visit_enum_definition(module, &mut iter, ctx)?,
 
                Some(KW_UNION) => self.visit_union_definition(module, &mut iter, ctx)?,
 
                Some(KW_FUNCTION) => self.visit_function_definition(module, &mut iter, ctx)?,
 
                Some(KW_PRIMITIVE) | Some(KW_COMPOSITE) => self.visit_component_definition(module, &mut iter, ctx)?,
 
                _ => return Err(ParseError::new_error_str_at_pos(
 
                    &module.source, iter.last_valid_pos(),
 
                    "unexpected symbol, expected a keyword marking the start of a definition"
 
                )),
 
            }
 
        }
 
    }
 

	
 
    fn visit_struct_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_STRUCT)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        // Parse struct definition
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        let mut fields_section = self.struct_fields.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
                let start_pos = iter.last_valid_pos();
 
                let parser_type = self.type_parser.consume_parser_type(
 
                    iter, &ctx.heap, source, &ctx.symbols, poly_vars, definition_id,
 
                    module_scope, false, None
 
                )?;
 
                let field = consume_ident_interned(source, iter, ctx)?;
 
                Ok(StructFieldDefinition{
 
                    span: InputSpan::from_positions(start_pos, field.span.end),
 
                    field, parser_type
 
                })
 
            },
 
            &mut fields_section, "a struct field", "a list of struct fields", None
 
        )?;
 

	
 
        // Transfer to preallocated definition
 
        let struct_def = ctx.heap[definition_id].as_struct_mut();
 
        struct_def.fields = fields_section.into_vec();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_enum_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_ENUM)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        // Parse enum definition
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        let mut enum_section = self.enum_variants.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let identifier = consume_ident_interned(source, iter, ctx)?;
 
                let value = if iter.next() == Some(TokenKind::Equal) {
 
                    iter.consume();
 
                    let (variant_number, _) = consume_integer_literal(source, iter, &mut self.buffer)?;
 
                    EnumVariantValue::Integer(variant_number as i64) // TODO: @int
 
                } else {
 
                    EnumVariantValue::None
 
                };
 
                Ok(EnumVariantDefinition{ identifier, value })
 
            },
 
            &mut enum_section, "an enum variant", "a list of enum variants", None
 
        )?;
 

	
 
        // Transfer to definition
 
        let enum_def = ctx.heap[definition_id].as_enum_mut();
 
        enum_def.variants = enum_section.into_vec();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_union_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_UNION)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        // Parse union definition
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        let mut variants_section = self.union_variants.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let identifier = consume_ident_interned(source, iter, ctx)?;
 
                let mut close_pos = identifier.span.end;
 

	
 
                let mut types_section = self.parser_types.start_section();
 

	
 
                let has_embedded = maybe_consume_comma_separated(
 
                    TokenKind::OpenParen, TokenKind::CloseParen, source, iter, ctx,
 
                    |source, iter, ctx| {
 
                        let poly_vars = ctx.heap[definition_id].poly_vars();
 
                        self.type_parser.consume_parser_type(
 
                            iter, &ctx.heap, source, &ctx.symbols, poly_vars, definition_id,
 
                            module_scope, false, None
 
                        )
 
                    },
 
                    &mut types_section, "an embedded type", Some(&mut close_pos)
 
                )?;
 
                let value = if has_embedded {
 
                    types_section.into_vec()
 
                } else {
 
                    types_section.forget();
 
                    Vec::new()
 
                };
 

	
 
                Ok(UnionVariantDefinition{
 
                    span: InputSpan::from_positions(identifier.span.begin, close_pos),
 
                    identifier,
 
                    value
 
                })
 
            },
 
            &mut variants_section, "a union variant", "a list of union variants", None
 
        )?;
 

	
 
        // Transfer to AST
 
        let union_def = ctx.heap[definition_id].as_union_mut();
 
        union_def.variants = variants_section.into_vec();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_function_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        // Retrieve function name
 
        consume_exact_ident(&module.source, iter, KW_FUNCTION)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        // Parse function's argument list
 
        let mut parameter_section = self.variables.start_section();
 
        consume_parameter_list(
 
            &mut self.type_parser, &module.source, iter, ctx, &mut parameter_section, module_scope, definition_id
 
        )?;
 
        let parameters = parameter_section.into_vec();
 

	
 
        // Consume return types
 
        consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
        let mut return_types = self.parser_types.start_section();
 
        let mut open_curly_pos = iter.last_valid_pos(); // bogus value
 
        consume_comma_separated_until(
 
            TokenKind::OpenCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let poly_vars = ctx.heap[definition_id].poly_vars();
 
                self.type_parser.consume_parser_type(
 
                    iter, &ctx.heap, source, &ctx.symbols, poly_vars, definition_id,
 
                    module_scope, false, None
 
                )
 
            },
 
            &mut return_types, "a return type", Some(&mut open_curly_pos)
 
        )?;
 
        let return_types = return_types.into_vec();
 

	
 
        match return_types.len() {
 
            0 => return Err(ParseError::new_error_str_at_pos(&module.source, open_curly_pos, "expected a return type")),
 
            1 => {},
 
            _ => return Err(ParseError::new_error_str_at_pos(&module.source, open_curly_pos, "multiple return types are not (yet) allowed")),
 
        }
 

	
 
        // Consume block
 
        let body = self.consume_block_statement_without_leading_curly(module, iter, ctx, open_curly_pos)?;
 

	
 
        // Assign everything in the preallocated AST node
 
        let function = ctx.heap[definition_id].as_function_mut();
 
        function.return_types = return_types;
 
        function.parameters = parameters;
 
        function.body = body;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_component_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        // Consume component variant and name
 
        let (_variant_text, _) = consume_any_ident(&module.source, iter)?;
 
        debug_assert!(_variant_text == KW_PRIMITIVE || _variant_text == KW_COMPOSITE);
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated definition
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        // Parse component's argument list
 
        let mut parameter_section = self.variables.start_section();
 
        consume_parameter_list(
 
            &mut self.type_parser, &module.source, iter, ctx, &mut parameter_section, module_scope, definition_id
 
        )?;
 
        let parameters = parameter_section.into_vec();
 

	
 
        // Consume block
 
        let body = self.consume_block_statement(module, iter, ctx)?;
 

	
 
        // Assign everything in the AST node
 
        let component = ctx.heap[definition_id].as_component_mut();
 
        component.parameters = parameters;
 
        component.body = body;
 

	
 
        Ok(())
 
    }
 

	
 
    /// Consumes a block statement. If the resulting statement is not a block
 
    /// (e.g. for a shorthand "if (expr) single_statement") then it will be
 
    /// wrapped in one
 
    fn consume_block_or_wrapped_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<BlockStatementId, ParseError> {
 
        if Some(TokenKind::OpenCurly) == iter.next() {
 
            // This is a block statement
 
            self.consume_block_statement(module, iter, ctx)
 
        } else {
 
            // Not a block statement, so wrap it in one
 
            let mut statements = self.statements.start_section();
 
            let wrap_begin_pos = iter.last_valid_pos();
 
            self.consume_statement(module, iter, ctx, &mut statements)?;
 
            let wrap_end_pos = iter.last_valid_pos();
 

	
 
            let statements = statements.into_vec();
 

	
 
            let id = ctx.heap.alloc_block_statement(|this| BlockStatement{
 
                this,
 
                is_implicit: true,
 
                span: InputSpan::from_positions(wrap_begin_pos, wrap_end_pos),
 
                statements,
 
                end_block: EndBlockStatementId::new_invalid(),
 
                scope_node: ScopeNode::new_invalid(),
 
                first_unique_id_in_scope: -1,
 
                next_unique_id_in_scope: -1,
 
                relative_pos_in_parent: 0,
 
                locals: Vec::new(),
 
                labels: Vec::new(),
 
                next: StatementId::new_invalid(),
 
            });
 

	
 
            let end_block = ctx.heap.alloc_end_block_statement(|this| EndBlockStatement{
 
                this, start_block: id, next: StatementId::new_invalid()
 
            });
 

	
 
            let block_stmt = &mut ctx.heap[id];
 
            block_stmt.end_block = end_block;
 

	
 
            Ok(id)
 
        }
 
    }
 

	
 
    /// Consumes a statement and returns a boolean indicating whether it was a
 
    /// block or not.
 
    fn consume_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, section: &mut ScopedSection<StatementId>
 
    ) -> Result<(), ParseError> {
 
        let next = iter.next().expect("consume_statement has a next token");
 

	
 
        if next == TokenKind::OpenCurly {
 
            let id = self.consume_block_statement(module, iter, ctx)?;
 
            section.push(id.upcast());
 
        } else if next == TokenKind::Ident {
 
            let ident = peek_ident(&module.source, iter).unwrap();
 
            if ident == KW_STMT_IF {
 
                // Consume if statement and place end-if statement directly
 
                // after it.
 
                let id = self.consume_if_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 

	
 
                let end_if = ctx.heap.alloc_end_if_statement(|this| EndIfStatement {
 
                    this,
 
                    start_if: id,
 
                    next: StatementId::new_invalid()
 
                });
 
                section.push(end_if.upcast());
 

	
 
                let if_stmt = &mut ctx.heap[id];
 
                if_stmt.end_if = end_if;
 
            } else if ident == KW_STMT_WHILE {
 
                let id = self.consume_while_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 

	
 
                let end_while = ctx.heap.alloc_end_while_statement(|this| EndWhileStatement {
 
                    this,
 
                    start_while: id,
 
                    next: StatementId::new_invalid()
 
                });
 
                section.push(end_while.upcast());
 

	
 
                let while_stmt = &mut ctx.heap[id];
 
                while_stmt.end_while = end_while;
 
            } else if ident == KW_STMT_BREAK {
 
                let id = self.consume_break_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 
            } else if ident == KW_STMT_CONTINUE {
 
                let id = self.consume_continue_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 
            } else if ident == KW_STMT_SYNC {
 
                let id = self.consume_synchronous_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 

	
 
                let end_sync = ctx.heap.alloc_end_synchronous_statement(|this| EndSynchronousStatement {
 
                    this,
 
                    start_sync: id,
 
                    next: StatementId::new_invalid()
 
                });
 
                section.push(end_sync.upcast());
 

	
 
                let sync_stmt = &mut ctx.heap[id];
 
                sync_stmt.end_sync = end_sync;
 
            } else if ident == KW_STMT_FORK {
 
                let id = self.consume_fork_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 

	
 
                let end_fork = ctx.heap.alloc_end_fork_statement(|this| EndForkStatement {
 
                    this,
 
                    start_fork: id,
 
                    next: StatementId::new_invalid(),
 
                });
 
                section.push(end_fork.upcast());
 

	
 
                let fork_stmt = &mut ctx.heap[id];
 
                fork_stmt.end_fork = end_fork;
 
            } else if ident == KW_STMT_SELECT {
 
                let id = self.consume_select_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 

	
 
                let end_select = ctx.heap.alloc_end_select_statement(|this| EndSelectStatement{
 
                    this,
 
                    start_select: id,
 
                    next: StatementId::new_invalid(),
 
                });
 
                section.push(end_select.upcast());
 

	
 
                let select_stmt = &mut ctx.heap[id];
 
                select_stmt.end_select = end_select;
 
            } else if ident == KW_STMT_RETURN {
 
                let id = self.consume_return_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 
            } else if ident == KW_STMT_GOTO {
 
                let id = self.consume_goto_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 
            } else if ident == KW_STMT_NEW {
 
                let id = self.consume_new_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 
            } else if ident == KW_STMT_CHANNEL {
 
                let id = self.consume_channel_statement(module, iter, ctx)?;
 
                section.push(id.upcast().upcast());
 
            } else if iter.peek() == Some(TokenKind::Colon) {
 
                self.consume_labeled_statement(module, iter, ctx, section)?;
 
            } else {
 
                // Two fallback possibilities: the first one is a memory
 
                // declaration, the other one is to parse it as a normal
 
                // expression. This is a bit ugly.
 
                if let Some(memory_stmt_id) = self.maybe_consume_memory_statement_without_semicolon(module, iter, ctx)? {
 
                    consume_token(&module.source, iter, TokenKind::SemiColon)?;
 
                    section.push(memory_stmt_id.upcast().upcast());
 
                } else {
 
                    let id = self.consume_expression_statement(module, iter, ctx)?;
 
                    section.push(id.upcast());
 
                }
 
            }
 
        } else if next == TokenKind::OpenParen {
 
            // Same as above: memory statement or normal expression
 
            if let Some(memory_stmt_id) = self.maybe_consume_memory_statement_without_semicolon(module, iter, ctx)? {
 
                consume_token(&module.source, iter, TokenKind::SemiColon);
 
                section.push(memory_stmt_id.upcast().upcast());
 
            } else {
 
                let id = self.consume_expression_statement(module, iter, ctx)?;
 
                section.push(id.upcast());
 
            }
 
        } else {
 
            let id = self.consume_expression_statement(module, iter, ctx)?;
 
            section.push(id.upcast());
 
        }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn consume_block_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<BlockStatementId, ParseError> {
 
        let open_span = consume_token(&module.source, iter, TokenKind::OpenCurly)?;
 
        self.consume_block_statement_without_leading_curly(module, iter, ctx, open_span.begin)
 
    }
 

	
 
    fn consume_block_statement_without_leading_curly(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, open_curly_pos: InputPosition
 
    ) -> Result<BlockStatementId, ParseError> {
 
        let mut stmt_section = self.statements.start_section();
 
        let mut next = iter.next();
 
        while next != Some(TokenKind::CloseCurly) {
 
            if next.is_none() {
 
                return Err(ParseError::new_error_str_at_pos(
 
                    &module.source, iter.last_valid_pos(), "expected a statement or '}'"
 
                ));
 
            }
 
            self.consume_statement(module, iter, ctx, &mut stmt_section)?;
 
            next = iter.next();
 
        }
 

	
 
        let statements = stmt_section.into_vec();
 
        let mut block_span = consume_token(&module.source, iter, TokenKind::CloseCurly)?;
 
        block_span.begin = open_curly_pos;
 

	
 
        let id = ctx.heap.alloc_block_statement(|this| BlockStatement{
 
            this,
 
            is_implicit: false,
 
            span: block_span,
 
            statements,
 
            end_block: EndBlockStatementId::new_invalid(),
 
            scope_node: ScopeNode::new_invalid(),
 
            first_unique_id_in_scope: -1,
 
            next_unique_id_in_scope: -1,
 
            relative_pos_in_parent: 0,
 
            locals: Vec::new(),
 
            labels: Vec::new(),
 
            next: StatementId::new_invalid(),
 
        });
 

	
 
        let end_block = ctx.heap.alloc_end_block_statement(|this| EndBlockStatement{
 
            this, start_block: id, next: StatementId::new_invalid()
 
        });
 

	
 
        let block_stmt = &mut ctx.heap[id];
 
        block_stmt.end_block = end_block;
 

	
 
        Ok(id)
 
    }
 

	
 
    fn consume_if_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<IfStatementId, ParseError> {
 
        let if_span = consume_exact_ident(&module.source, iter, KW_STMT_IF)?;
 
        consume_token(&module.source, iter, TokenKind::OpenParen)?;
 
        let test = self.consume_expression(module, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::CloseParen)?;
 
        let true_body = self.consume_block_or_wrapped_statement(module, iter, ctx)?;
 

	
 
        let false_body = if has_ident(&module.source, iter, KW_STMT_ELSE) {
 
            iter.consume();
 
            let false_body = self.consume_block_or_wrapped_statement(module, iter, ctx)?;
 
            Some(false_body)
 
        } else {
 
            None
 
        };
 

	
 
        Ok(ctx.heap.alloc_if_statement(|this| IfStatement{
 
            this,
 
            span: if_span,
 
            test,
 
            true_body,
 
            false_body,
 
            end_if: EndIfStatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_while_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<WhileStatementId, ParseError> {
 
        let while_span = consume_exact_ident(&module.source, iter, KW_STMT_WHILE)?;
 
        consume_token(&module.source, iter, TokenKind::OpenParen)?;
 
        let test = self.consume_expression(module, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::CloseParen)?;
 
        let body = self.consume_block_or_wrapped_statement(module, iter, ctx)?;
 

	
 
        Ok(ctx.heap.alloc_while_statement(|this| WhileStatement{
 
            this,
 
            span: while_span,
 
            test,
 
            body,
 
            end_while: EndWhileStatementId::new_invalid(),
 
            in_sync: SynchronousStatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_break_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<BreakStatementId, ParseError> {
 
        let break_span = consume_exact_ident(&module.source, iter, KW_STMT_BREAK)?;
 
        let label = if Some(TokenKind::Ident) == iter.next() {
 
            let label = consume_ident_interned(&module.source, iter, ctx)?;
 
            Some(label)
 
        } else {
 
            None
 
        };
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 
        Ok(ctx.heap.alloc_break_statement(|this| BreakStatement{
 
            this,
 
            span: break_span,
 
            label,
 
            target: None,
 
        }))
 
    }
 

	
 
    fn consume_continue_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ContinueStatementId, ParseError> {
 
        let continue_span = consume_exact_ident(&module.source, iter, KW_STMT_CONTINUE)?;
 
        let label=  if Some(TokenKind::Ident) == iter.next() {
 
            let label = consume_ident_interned(&module.source, iter, ctx)?;
 
            Some(label)
 
        } else {
 
            None
 
        };
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 
        Ok(ctx.heap.alloc_continue_statement(|this| ContinueStatement{
 
            this,
 
            span: continue_span,
 
            label,
 
            target: None
 
        }))
 
    }
 

	
 
    fn consume_synchronous_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<SynchronousStatementId, ParseError> {
 
        let synchronous_span = consume_exact_ident(&module.source, iter, KW_STMT_SYNC)?;
 
        let body = self.consume_block_or_wrapped_statement(module, iter, ctx)?;
 

	
 
        Ok(ctx.heap.alloc_synchronous_statement(|this| SynchronousStatement{
 
            this,
 
            span: synchronous_span,
 
            body,
 
            end_sync: EndSynchronousStatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_fork_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ForkStatementId, ParseError> {
 
        let fork_span = consume_exact_ident(&module.source, iter, KW_STMT_FORK)?;
 
        let left_body = self.consume_block_or_wrapped_statement(module, iter, ctx)?;
 

	
 
        let right_body = if has_ident(&module.source, iter, KW_STMT_OR) {
 
            iter.consume();
 
            let right_body = self.consume_block_or_wrapped_statement(module, iter, ctx)?;
 
            Some(right_body)
 
        } else {
 
            None
 
        };
 

	
 
        Ok(ctx.heap.alloc_fork_statement(|this| ForkStatement{
 
            this,
 
            span: fork_span,
 
            left_body,
 
            right_body,
 
            end_fork: EndForkStatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_select_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<SelectStatementId, ParseError> {
 
        let select_span = consume_exact_ident(&module.source, iter, KW_STMT_SELECT)?;
 
        consume_token(&module.source, iter, TokenKind::OpenCurly)?;
 

	
 
        let mut cases = Vec::new();
 
        let mut next = iter.next();
 

	
 
        while Some(TokenKind::CloseCurly) != next {
 
            let guard = match self.maybe_consume_memory_statement_without_semicolon(module, iter, ctx)? {
 
                Some(guard_mem_stmt) => guard_mem_stmt.upcast().upcast(),
 
                None => {
 
                    let start_pos = iter.last_valid_pos();
 
                    let expr = self.consume_expression(module, iter, ctx)?;
 
                    let end_pos = iter.last_valid_pos();
 

	
 
                    let guard_expr_stmt = ctx.heap.alloc_expression_statement(|this| ExpressionStatement{
 
                        this,
 
                        span: InputSpan::from_positions(start_pos, end_pos),
 
                        expression: expr,
 
                        next: StatementId::new_invalid(),
 
                    });
 

	
 
                    guard_expr_stmt.upcast()
 
                },
 
            };
 
            consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
            let block = self.consume_block_or_wrapped_statement(module, iter, ctx)?;
 
            cases.push(SelectCase{ guard, block });
 
            cases.push(SelectCase{
 
                guard, block,
 
                involved_ports: Vec::with_capacity(1)
 
            });
 

	
 
            next = iter.next();
 
        }
 

	
 
        consume_token(&module.source, iter, TokenKind::CloseCurly)?;
 

	
 
        Ok(ctx.heap.alloc_select_statement(|this| SelectStatement{
 
            this,
 
            span: select_span,
 
            cases,
 
            end_select: EndSelectStatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_return_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ReturnStatementId, ParseError> {
 
        let return_span = consume_exact_ident(&module.source, iter, KW_STMT_RETURN)?;
 
        let mut scoped_section = self.expressions.start_section();
 

	
 
        consume_comma_separated_until(
 
            TokenKind::SemiColon, &module.source, iter, ctx,
 
            |_source, iter, ctx| self.consume_expression(module, iter, ctx),
 
            &mut scoped_section, "an expression", None
 
        )?;
 
        let expressions = scoped_section.into_vec();
 

	
 
        if expressions.is_empty() {
 
            return Err(ParseError::new_error_str_at_span(&module.source, return_span, "expected at least one return value"));
 
        } else if expressions.len() > 1 {
 
            return Err(ParseError::new_error_str_at_span(&module.source, return_span, "multiple return values are not (yet) supported"))
 
        }
 

	
 
        Ok(ctx.heap.alloc_return_statement(|this| ReturnStatement{
 
            this,
 
            span: return_span,
 
            expressions
 
        }))
 
    }
 

	
 
    fn consume_goto_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<GotoStatementId, ParseError> {
 
        let goto_span = consume_exact_ident(&module.source, iter, KW_STMT_GOTO)?;
 
        let label = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 
        Ok(ctx.heap.alloc_goto_statement(|this| GotoStatement{
 
            this,
 
            span: goto_span,
 
            label,
 
            target: None
 
        }))
 
    }
 

	
 
    fn consume_new_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<NewStatementId, ParseError> {
 
        let new_span = consume_exact_ident(&module.source, iter, KW_STMT_NEW)?;
 

	
 
        let start_pos = iter.last_valid_pos();
 
        let expression_id = self.consume_primary_expression(module, iter, ctx)?;
 
        let expression = &ctx.heap[expression_id];
 
        let mut valid = false;
 

	
 
        let mut call_id = CallExpressionId::new_invalid();
 
        if let Expression::Call(expression) = expression {
 
            // Allow both components and functions, as it makes more sense to
 
            // check their correct use in the validation and linking pass
 
            if expression.method == Method::UserComponent || expression.method == Method::UserFunction {
 
                call_id = expression.this;
 
                valid = true;
 
            }
 
        }
 

	
 
        if !valid {
 
            return Err(ParseError::new_error_str_at_span(
 
                &module.source, InputSpan::from_positions(start_pos, iter.last_valid_pos()), "expected a call expression"
 
            ));
 
        }
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
        debug_assert!(!call_id.is_invalid());
 
        Ok(ctx.heap.alloc_new_statement(|this| NewStatement{
 
            this,
 
            span: new_span,
 
            expression: call_id,
 
            next: StatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_channel_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ChannelStatementId, ParseError> {
 
        // Consume channel specification
 
        let channel_span = consume_exact_ident(&module.source, iter, KW_STMT_CHANNEL)?;
 
        let (inner_port_type, end_pos) = if Some(TokenKind::OpenAngle) == iter.next() {
 
            // Retrieve the type of the channel, we're cheating a bit here by
 
            // consuming the first '<' and setting the initial angle depth to 1
 
            // such that our final '>' will be consumed as well.
 
            let angle_start_pos = iter.next_start_position();
 
            iter.consume();
 
            let definition_id = self.cur_definition;
 
            let poly_vars = ctx.heap[definition_id].poly_vars();
 
            let parser_type = self.type_parser.consume_parser_type(
 
                iter, &ctx.heap, &module.source, &ctx.symbols, poly_vars,
 
                definition_id, SymbolScope::Module(module.root_id),
 
                true, Some(angle_start_pos)
 
            )?;
 

	
 
            (parser_type.elements, parser_type.full_span.end)
 
        } else {
 
            // Assume inferred
 
            (
 
                vec![ParserTypeElement{
 
                    element_span: channel_span,
 
                    variant: ParserTypeVariant::Inferred
 
                }],
 
                channel_span.end
 
            )
 
        };
 

	
 
        let from_identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
        let to_identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
        // Construct ports
 
        let port_type_span = InputSpan::from_positions(channel_span.begin, end_pos);
 
        let port_type_len = inner_port_type.len() + 1;
 
        let mut from_port_type = ParserType{ elements: Vec::with_capacity(port_type_len), full_span: port_type_span };
 
        from_port_type.elements.push(ParserTypeElement{
 
            element_span: channel_span,
 
            variant: ParserTypeVariant::Output,
 
        });
 
        from_port_type.elements.extend_from_slice(&inner_port_type);
 
        let from = ctx.heap.alloc_variable(|this| Variable{
 
            this,
 
            kind: VariableKind::Local,
 
            identifier: from_identifier,
 
            parser_type: from_port_type,
 
            relative_pos_in_block: 0,
 
            unique_id_in_scope: -1,
 
        });
 

	
 
        let mut to_port_type = ParserType{ elements: Vec::with_capacity(port_type_len), full_span: port_type_span };
 
        to_port_type.elements.push(ParserTypeElement{
 
            element_span: channel_span,
 
            variant: ParserTypeVariant::Input
 
        });
 
        to_port_type.elements.extend_from_slice(&inner_port_type);
 
        let to = ctx.heap.alloc_variable(|this|Variable{
 
            this,
 
            kind: VariableKind::Local,
 
            identifier: to_identifier,
 
            parser_type: to_port_type,
 
            relative_pos_in_block: 0,
 
            unique_id_in_scope: -1,
 
        });
 

	
 
        // Construct the channel
 
        Ok(ctx.heap.alloc_channel_statement(|this| ChannelStatement{
 
            this,
 
            span: channel_span,
 
            from, to,
 
            relative_pos_in_block: 0,
 
            next: StatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_labeled_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, section: &mut ScopedSection<StatementId>
 
    ) -> Result<(), ParseError> {
 
        let label = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::Colon)?;
 

	
 
        // Not pretty: consume_statement may produce more than one statement.
 
        // The values in the section need to be in the correct order if some
 
        // kind of outer block is consumed, so we take another section, push
 
        // the expressions in that one, and then allocate the labeled statement.
 
        let mut inner_section = self.statements.start_section();
 
        self.consume_statement(module, iter, ctx, &mut inner_section)?;
 
        debug_assert!(inner_section.len() >= 1);
 

	
 
        let stmt_id = ctx.heap.alloc_labeled_statement(|this| LabeledStatement {
 
            this,
 
            label,
 
            body: inner_section[0],
 
            relative_pos_in_block: 0,
 
            in_sync: SynchronousStatementId::new_invalid(),
 
        });
 

	
 
        if inner_section.len() == 1 {
 
            // Produce the labeled statement pointing to the first statement.
 
            // This is by far the most common case.
 
            inner_section.forget();
 
            section.push(stmt_id.upcast());
 
        } else {
 
            // Produce the labeled statement using the first statement, and push
 
            // the remaining ones at the end.
 
            let inner_statements = inner_section.into_vec();
 
            section.push(stmt_id.upcast());
 
            for idx in 1..inner_statements.len() {
 
                section.push(inner_statements[idx])
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    /// Attempts to consume a memory statement (a statement along the lines of
 
    /// `type var_name = initial_expr`). Will return `Ok(None)` if it didn't
 
    /// seem like there was a memory statement, `Ok(Some(...))` if there was
 
    /// one, and `Err(...)` if its reasonable to assume that there was a memory
 
    /// statement, but we failed to parse it.
 
    fn maybe_consume_memory_statement_without_semicolon(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<Option<MemoryStatementId>, ParseError> {
 
        // This is a bit ugly. It would be nicer if we could somehow
 
        // consume the expression with a type hint if we do get a valid
 
        // type, but we don't get an identifier following it
 
        let iter_state = iter.save();
 
        let definition_id = self.cur_definition;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        let parser_type = self.type_parser.consume_parser_type(
 
            iter, &ctx.heap, &module.source, &ctx.symbols, poly_vars,
 
            definition_id, SymbolScope::Definition(definition_id), true, None
 
        );
 

	
 
        if let Ok(parser_type) = parser_type {
 
            if Some(TokenKind::Ident) == iter.next() {
 
                // Assume this is a proper memory statement
 
                let identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
                let memory_span = InputSpan::from_positions(parser_type.full_span.begin, identifier.span.end);
 
                let assign_span = consume_token(&module.source, iter, TokenKind::Equal)?;
 

	
 
                let initial_expr_id = self.consume_expression(module, iter, ctx)?;
 
                let initial_expr_end_pos = iter.last_valid_pos();
 

	
 
                // Create the AST variable
 
                let local_id = ctx.heap.alloc_variable(|this| Variable{
 
                    this,
 
                    kind: VariableKind::Local,
 
                    identifier: identifier.clone(),
 
                    parser_type,
 
                    relative_pos_in_block: 0,
 
                    unique_id_in_scope: -1,
 
                });
 

	
 
                // Create the initial assignment expression
 
                // Note: we set the initial variable declaration here
 
                let variable_expr_id = ctx.heap.alloc_variable_expression(|this| VariableExpression{
 
                    this,
 
                    identifier,
 
                    declaration: Some(local_id),
 
                    used_as_binding_target: false,
 
                    parent: ExpressionParent::None,
 
                    unique_id_in_definition: -1,
 
                });
 
                let assignment_expr_id = ctx.heap.alloc_assignment_expression(|this| AssignmentExpression{
 
                    this,
 
                    operator_span: assign_span,
 
                    full_span: InputSpan::from_positions(memory_span.begin, initial_expr_end_pos),
 
                    left: variable_expr_id.upcast(),
 
                    operation: AssignmentOperator::Set,
 
                    right: initial_expr_id,
 
                    parent: ExpressionParent::None,
 
                    unique_id_in_definition: -1,
 
                });
 

	
 
                // Put both together in the memory statement
 
                let memory_stmt_id = ctx.heap.alloc_memory_statement(|this| MemoryStatement{
 
                    this,
 
                    span: memory_span,
 
                    variable: local_id,
 
                    initial_expr: assignment_expr_id,
 
                    next: StatementId::new_invalid()
 
                });
 

	
 
                return Ok(Some(memory_stmt_id));
 
            }
 
        }
 

	
 
        // If here then one of the preconditions for a memory statement was not
 
        // met. So recover the iterator and return
 
        iter.load(iter_state);
 
        Ok(None)
 
    }
 

	
 
    fn consume_expression_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionStatementId, ParseError> {
 
        let start_pos = iter.last_valid_pos();
 
        let expression = self.consume_expression(module, iter, ctx)?;
 
        let end_pos = iter.last_valid_pos();
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
        Ok(ctx.heap.alloc_expression_statement(|this| ExpressionStatement{
 
            this,
 
            span: InputSpan::from_positions(start_pos, end_pos),
 
            expression,
 
            next: StatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Expression Parsing
 
    //--------------------------------------------------------------------------
 

	
 
    // TODO: @Cleanup This is fine for now. But I prefer my stacktraces not to
 
    //  look like enterprise Java code...
 
    fn consume_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_assignment_expression(module, iter, ctx)
 
    }
 

	
 
    fn consume_assignment_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        // Utility to convert token into assignment operator
 
        fn parse_assignment_operator(token: Option<TokenKind>) -> Option<AssignmentOperator> {
 
            use TokenKind as TK;
 
            use AssignmentOperator as AO;
 

	
 
            if token.is_none() {
 
                return None
 
            }
 

	
 
            match token.unwrap() {
 
                TK::Equal               => Some(AO::Set),
 
                TK::AtEquals            => Some(AO::Concatenated),
 
                TK::StarEquals          => Some(AO::Multiplied),
 
                TK::SlashEquals         => Some(AO::Divided),
 
                TK::PercentEquals       => Some(AO::Remained),
 
                TK::PlusEquals          => Some(AO::Added),
 
                TK::MinusEquals         => Some(AO::Subtracted),
 
                TK::ShiftLeftEquals     => Some(AO::ShiftedLeft),
 
                TK::ShiftRightEquals    => Some(AO::ShiftedRight),
 
                TK::AndEquals           => Some(AO::BitwiseAnded),
 
                TK::CaretEquals         => Some(AO::BitwiseXored),
 
                TK::OrEquals            => Some(AO::BitwiseOred),
 
                _                       => None
 
            }
 
        }
 

	
 
        let expr = self.consume_conditional_expression(module, iter, ctx)?;
 
        if let Some(operation) = parse_assignment_operator(iter.next()) {
 
            let operator_span = iter.next_span();
 
            iter.consume();
 

	
 
            let left = expr;
 
            let right = self.consume_expression(module, iter, ctx)?;
 

	
 
            let full_span = InputSpan::from_positions(
 
                ctx.heap[left].full_span().begin,
 
                ctx.heap[right].full_span().end,
 
            );
 

	
 
            Ok(ctx.heap.alloc_assignment_expression(|this| AssignmentExpression{
 
                this, operator_span, full_span, left, operation, right,
 
                parent: ExpressionParent::None,
 
                unique_id_in_definition: -1,
 
            }).upcast())
 
        } else {
 
            Ok(expr)
 
        }
 
    }
 

	
 
    fn consume_conditional_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        let result = self.consume_concat_expression(module, iter, ctx)?;
 
        if let Some(TokenKind::Question) = iter.next() {
 
            let operator_span = iter.next_span();
 
            iter.consume();
 

	
 
            let test = result;
 
            let true_expression = self.consume_expression(module, iter, ctx)?;
 
            consume_token(&module.source, iter, TokenKind::Colon)?;
 
            let false_expression = self.consume_expression(module, iter, ctx)?;
 

	
 
            let full_span = InputSpan::from_positions(
 
                ctx.heap[test].full_span().begin,
 
                ctx.heap[false_expression].full_span().end,
 
            );
 

	
 
            Ok(ctx.heap.alloc_conditional_expression(|this| ConditionalExpression{
 
                this, operator_span, full_span, test, true_expression, false_expression,
 
                parent: ExpressionParent::None,
 
                unique_id_in_definition: -1,
 
            }).upcast())
 
        } else {
 
            Ok(result)
 
        }
 
    }
 

	
 
    fn consume_concat_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::At) => Some(BinaryOperator::Concatenate),
 
                _ => None
 
            },
 
            Self::consume_logical_or_expression
 
        )
 
    }
 

	
 
    fn consume_logical_or_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::OrOr) => Some(BinaryOperator::LogicalOr),
 
                _ => None
 
            },
 
            Self::consume_logical_and_expression
 
        )
 
    }
 

	
 
    fn consume_logical_and_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::AndAnd) => Some(BinaryOperator::LogicalAnd),
 
                _ => None
 
            },
 
            Self::consume_bitwise_or_expression
 
        )
 
    }
 

	
 
    fn consume_bitwise_or_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::Or) => Some(BinaryOperator::BitwiseOr),
 
                _ => None
 
            },
 
            Self::consume_bitwise_xor_expression
 
        )
 
    }
 

	
 
    fn consume_bitwise_xor_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::Caret) => Some(BinaryOperator::BitwiseXor),
 
                _ => None
 
            },
 
            Self::consume_bitwise_and_expression
 
        )
 
    }
 

	
 
    fn consume_bitwise_and_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::And) => Some(BinaryOperator::BitwiseAnd),
 
                _ => None
 
            },
 
            Self::consume_equality_expression
 
        )
 
    }
 

	
 
    fn consume_equality_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::EqualEqual) => Some(BinaryOperator::Equality),
 
                Some(TokenKind::NotEqual) => Some(BinaryOperator::Inequality),
 
                _ => None
 
            },
 
            Self::consume_relational_expression
 
        )
 
    }
 

	
 
    fn consume_relational_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::OpenAngle) => Some(BinaryOperator::LessThan),
 
                Some(TokenKind::CloseAngle) => Some(BinaryOperator::GreaterThan),
 
                Some(TokenKind::LessEquals) => Some(BinaryOperator::LessThanEqual),
 
                Some(TokenKind::GreaterEquals) => Some(BinaryOperator::GreaterThanEqual),
 
                _ => None
 
            },
 
            Self::consume_shift_expression
 
        )
 
    }
 

	
 
    fn consume_shift_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::ShiftLeft) => Some(BinaryOperator::ShiftLeft),
 
                Some(TokenKind::ShiftRight) => Some(BinaryOperator::ShiftRight),
 
                _ => None
 
            },
 
            Self::consume_add_or_subtract_expression
 
        )
 
    }
 

	
 
    fn consume_add_or_subtract_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::Plus) => Some(BinaryOperator::Add),
 
                Some(TokenKind::Minus) => Some(BinaryOperator::Subtract),
 
                _ => None,
 
            },
 
            Self::consume_multiply_divide_or_modulus_expression
 
        )
 
    }
 

	
 
    fn consume_multiply_divide_or_modulus_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        self.consume_generic_binary_expression(
 
            module, iter, ctx,
 
            |token| match token {
 
                Some(TokenKind::Star) => Some(BinaryOperator::Multiply),
 
                Some(TokenKind::Slash) => Some(BinaryOperator::Divide),
 
                Some(TokenKind::Percent) => Some(BinaryOperator::Remainder),
 
                _ => None
 
            },
 
            Self::consume_prefix_expression
 
        )
 
    }
 

	
 
    fn consume_prefix_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        fn parse_prefix_token(token: Option<TokenKind>) -> Option<UnaryOperator> {
 
            use TokenKind as TK;
 
            use UnaryOperator as UO;
 
            match token {
 
                Some(TK::Plus) => Some(UO::Positive),
 
                Some(TK::Minus) => Some(UO::Negative),
 
                Some(TK::Tilde) => Some(UO::BitwiseNot),
 
                Some(TK::Exclamation) => Some(UO::LogicalNot),
 
                _ => None
 
            }
 
        }
 

	
 
        let next = iter.next();
 
        if let Some(operation) = parse_prefix_token(next) {
 
            let operator_span = iter.next_span();
 
            iter.consume();
 

	
 
            let expression = self.consume_prefix_expression(module, iter, ctx)?;
 
            let full_span = InputSpan::from_positions(
 
                operator_span.begin, ctx.heap[expression].full_span().end,
 
            );
 
            Ok(ctx.heap.alloc_unary_expression(|this| UnaryExpression {
 
                this, operator_span, full_span, operation, expression,
 
                parent: ExpressionParent::None,
 
                unique_id_in_definition: -1,
 
            }).upcast())
 
        } else if next == Some(TokenKind::PlusPlus) {
 
            return Err(ParseError::new_error_str_at_span(
 
                &module.source, iter.next_span(), "prefix increment is not supported in the language"
 
            ));
 
        } else if next == Some(TokenKind::MinusMinus) {
 
            return Err(ParseError::new_error_str_at_span(
 
                &module.source, iter.next_span(), "prefix decrement is not supported in this language"
 
            ));
 
        } else {
 
            self.consume_postfix_expression(module, iter, ctx)
 
        }
 
    }
 

	
 
    fn consume_postfix_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        fn has_matching_postfix_token(token: Option<TokenKind>) -> bool {
 
            use TokenKind as TK;
 

	
 
            if token.is_none() { return false; }
 
            match token.unwrap() {
 
                TK::PlusPlus | TK::MinusMinus | TK::OpenSquare | TK::Dot => true,
 
                _ => false
 
            }
 
        }
 

	
 
        let mut result = self.consume_primary_expression(module, iter, ctx)?;
 
        let mut next = iter.next();
 
        while has_matching_postfix_token(next) {
 
            let token = next.unwrap();
 
            let mut operator_span = iter.next_span();
 
            iter.consume();
 

	
 
            if token == TokenKind::PlusPlus {
 
                return Err(ParseError::new_error_str_at_span(
 
                    &module.source, operator_span, "postfix increment is not supported in this language"
 
                ));
 
            } else if token == TokenKind::MinusMinus {
 
                return Err(ParseError::new_error_str_at_span(
 
                    &module.source, operator_span, "prefix increment is not supported in this language"
 
                ));
 
            } else if token == TokenKind::OpenSquare {
 
                let subject = result;
 
                let from_index = self.consume_expression(module, iter, ctx)?;
 

	
 
                // Check if we have an indexing or slicing operation
 
                next = iter.next();
 
                if Some(TokenKind::DotDot) == next {
 
                    iter.consume();
 

	
 
                    let to_index = self.consume_expression(module, iter, ctx)?;
 
                    let end_span = consume_token(&module.source, iter, TokenKind::CloseSquare)?;
 
                    operator_span.end = end_span.end;
 
                    let full_span = InputSpan::from_positions(
 
                        ctx.heap[subject].full_span().begin, operator_span.end
 
                    );
 

	
 
                    result = ctx.heap.alloc_slicing_expression(|this| SlicingExpression{
 
                        this,
 
                        slicing_span: operator_span,
 
                        full_span, subject, from_index, to_index,
 
                        parent: ExpressionParent::None,
 
                        unique_id_in_definition: -1,
 
                    }).upcast();
 
                } else if Some(TokenKind::CloseSquare) == next {
 
                    let end_span = consume_token(&module.source, iter, TokenKind::CloseSquare)?;
 
                    operator_span.end = end_span.end;
 

	
 
                    let full_span = InputSpan::from_positions(
 
                        ctx.heap[subject].full_span().begin, operator_span.end
 
                    );
 

	
 
                    result = ctx.heap.alloc_indexing_expression(|this| IndexingExpression{
 
                        this, operator_span, full_span, subject,
 
                        index: from_index,
 
                        parent: ExpressionParent::None,
 
                        unique_id_in_definition: -1,
 
                    }).upcast();
 
                } else {
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        &module.source, iter.last_valid_pos(), "unexpected token: expected ']' or '..'"
 
                    ));
 
                }
 
            } else {
 
                // Can be a select expression for struct fields, or a select
 
                // for a tuple element.
 
                debug_assert_eq!(token, TokenKind::Dot);
 
                let subject = result;
 

	
 
                let next = iter.next();
 
                let (select_kind, full_span) = if Some(TokenKind::Integer) == next {
 
                    // Tuple member
 
                    let (index, index_span) = consume_integer_literal(&module.source, iter, &mut self.buffer)?;
 
                    let full_span = InputSpan::from_positions(
 
                        ctx.heap[subject].full_span().begin, index_span.end
 
                    );
 

	
 
                    (SelectKind::TupleMember(index), full_span)
 
                } else if Some(TokenKind::Ident) == next {
 
                    // Struct field
 
                    let field_name = consume_ident_interned(&module.source, iter, ctx)?;
 

	
 
                    let full_span = InputSpan::from_positions(
 
                        ctx.heap[subject].full_span().begin, field_name.span.end
 
                    );
 

	
 
                    (SelectKind::StructField(field_name), full_span)
 
                } else {
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        &module.source, iter.last_valid_pos(), "unexpected token: expected integer or identifier"
 
                    ));
 
                };
 

	
 
                result = ctx.heap.alloc_select_expression(|this| SelectExpression{
 
                    this, operator_span, full_span, subject,
 
                    kind: select_kind,
 
                    parent: ExpressionParent::None,
 
                    unique_id_in_definition: -1,
 
                }).upcast();
 
            }
 

	
 
            next = iter.next();
 
        }
 

	
 
        Ok(result)
 
    }
 

	
 
    fn consume_primary_expression(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ExpressionId, ParseError> {
 
        let next = iter.next();
 

	
 
        let result = if next == Some(TokenKind::OpenParen) {
 
            // Something parenthesized. This can mean several things: we have
 
            // a parenthesized expression or we have a tuple literal. They are
 
            // ambiguous when the tuple has one member. But like the tuple type
 
            // parsing we interpret all one-tuples as parenthesized expressions.
 
            //
 
            // Practically (to prevent unnecessary `consume_expression` calls)
 
            // we distinguish the zero-tuple, the parenthesized expression, and
 
            // the N-tuple (for N > 1).
 
            let open_paren_pos = iter.next_start_position();
 
            iter.consume();
 
            let result = if Some(TokenKind::CloseParen) == iter.next() {
 
                // Zero-tuple
 
                let (_, close_paren_pos) = iter.next_positions();
 
                iter.consume();
 

	
 
                let literal_id = ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                    this,
 
                    span: InputSpan::from_positions(open_paren_pos, close_paren_pos),
 
                    value: Literal::Tuple(Vec::new()),
 
                    parent: ExpressionParent::None,
 
                    unique_id_in_definition: -1,
 
                });
 

	
 
                literal_id.upcast()
 
            } else {
 
                // Start by consuming one expression, then check for a comma
 
                let expr_id = self.consume_expression(module, iter, ctx)?;
 
                if Some(TokenKind::Comma) == iter.next() && Some(TokenKind::CloseParen) != iter.peek() {
 
                    // Must be an N-tuple
 
                    iter.consume(); // the comma
 
                    let mut scoped_section = self.expressions.start_section();
 
                    scoped_section.push(expr_id);
 

	
 
                    let mut close_paren_pos = open_paren_pos;
 
                    consume_comma_separated_until(
 
                        TokenKind::CloseParen, &module.source, iter, ctx,
 
                        |_source, iter, ctx| self.consume_expression(module, iter, ctx),
 
                        &mut scoped_section, "an expression", Some(&mut close_paren_pos)
 
                    )?;
 
                    debug_assert!(scoped_section.len() > 1); // peeked token wasn't CloseParen, must be expression
 

	
 
                    let literal_id = ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                        this,
 
                        span: InputSpan::from_positions(open_paren_pos, close_paren_pos),
 
                        value: Literal::Tuple(scoped_section.into_vec()),
 
                        parent: ExpressionParent::None,
 
                        unique_id_in_definition: -1,
 
                    });
 

	
 
                    literal_id.upcast()
 
                } else {
 
                    // Assume we're dealing with a normal expression
 
                    consume_token(&module.source, iter, TokenKind::CloseParen)?;
 

	
 
                    expr_id
 
                }
 
            };
 

	
src/protocol/parser/pass_typing.rs
Show inline comments
 
@@ -410,1536 +410,1560 @@ impl InferenceType {
 
            let part_a = &type_a.parts[idx_a];
 
            let part_b = &type_b.parts[idx_b];
 
            
 
            if part_a == part_b {
 
                let depth_change = part_a.depth_change();
 
                depth += depth_change;
 
                debug_assert_eq!(depth_change, part_b.depth_change());
 
                idx_a += 1;
 
                idx_b += 1;
 
                continue;
 
            }
 
            if part_a.is_marker() { idx_a += 1; continue; }
 
            if part_b.is_marker() { idx_b += 1; continue; }
 

	
 
            // Types are not equal and are both not markers
 
            if let Some(depth_change) = Self::infer_part_for_single_type(type_a, &mut idx_a, &type_b.parts, &mut idx_b) {
 
                depth += depth_change;
 
                modified_a = true;
 
                continue;
 
            }
 
            if let Some(depth_change) = Self::infer_part_for_single_type(type_b, &mut idx_b, &type_a.parts, &mut idx_a) {
 
                depth += depth_change;
 
                modified_b = true;
 
                continue;
 
            }
 

	
 
            // Types can not be inferred in any way: types must be incompatible
 
            return DualInferenceResult::Incompatible;
 
        }
 

	
 
        if modified_a { type_a.recompute_is_done(); }
 
        if modified_b { type_b.recompute_is_done(); }
 

	
 
        // If here then we completely inferred the subtrees.
 
        match (modified_a, modified_b) {
 
            (false, false) => DualInferenceResult::Neither,
 
            (false, true) => DualInferenceResult::Second,
 
            (true, false) => DualInferenceResult::First,
 
            (true, true) => DualInferenceResult::Both
 
        }
 
    }
 

	
 
    /// Attempts to infer the first subtree based on the template. Like
 
    /// `infer_subtrees_for_both_types`, but now only applying inference to
 
    /// `to_infer` based on the type information in `template`.
 
    ///
 
    /// The `forced_template` flag controls whether `to_infer` is considered
 
    /// valid if it is more specific then the template. When `forced_template`
 
    /// is false, then as long as the `to_infer` and `template` types are
 
    /// compatible the inference will succeed. If `forced_template` is true,
 
    /// then `to_infer` MUST be less specific than `template` (e.g.
 
    /// `IntegerLike` is less specific than `UInt32`)
 
    fn infer_subtree_for_single_type(
 
        to_infer: &mut InferenceType, mut to_infer_idx: usize,
 
        template: &[InferenceTypePart], mut template_idx: usize,
 
        forced_template: bool,
 
    ) -> SingleInferenceResult {
 
        let mut modified = false;
 
        let mut depth = 1;
 

	
 
        while depth > 0 {
 
            let to_infer_part = &to_infer.parts[to_infer_idx];
 
            let template_part = &template[template_idx];
 

	
 
            if to_infer_part == template_part {
 
                let depth_change = to_infer_part.depth_change();
 
                depth += depth_change;
 
                debug_assert_eq!(depth_change, template_part.depth_change());
 
                to_infer_idx += 1;
 
                template_idx += 1;
 
                continue;
 
            }
 
            if to_infer_part.is_marker() { to_infer_idx += 1; continue; }
 
            if template_part.is_marker() { template_idx += 1; continue; }
 

	
 
            // Types are not equal and not markers. So check if we can infer 
 
            // anything
 
            if let Some(depth_change) = Self::infer_part_for_single_type(
 
                to_infer, &mut to_infer_idx, template, &mut template_idx
 
            ) {
 
                depth += depth_change;
 
                modified = true;
 
                continue;
 
            }
 

	
 
            if !forced_template {
 
                // We cannot infer anything, but the template may still be
 
                // compatible with the type we're inferring
 
                if let Some(depth_change) = Self::check_part_for_single_type(
 
                    template, &mut template_idx, &to_infer.parts, &mut to_infer_idx
 
                ) {
 
                    depth += depth_change;
 
                    continue;
 
                }
 
            }
 

	
 
            return SingleInferenceResult::Incompatible
 
        }
 

	
 
        if modified {
 
            to_infer.recompute_is_done();
 
            return SingleInferenceResult::Modified;
 
        } else {
 
            return SingleInferenceResult::Unmodified;
 
        }
 
    }
 

	
 
    /// Checks if both types are compatible, doesn't perform any inference
 
    fn check_subtrees(
 
        type_parts_a: &[InferenceTypePart], start_idx_a: usize,
 
        type_parts_b: &[InferenceTypePart], start_idx_b: usize
 
    ) -> bool {
 
        let mut depth = 1;
 
        let mut idx_a = start_idx_a;
 
        let mut idx_b = start_idx_b;
 

	
 
        while depth > 0 {
 
            let part_a = &type_parts_a[idx_a];
 
            let part_b = &type_parts_b[idx_b];
 

	
 
            if part_a == part_b {
 
                let depth_change = part_a.depth_change();
 
                depth += depth_change;
 
                debug_assert_eq!(depth_change, part_b.depth_change());
 
                idx_a += 1;
 
                idx_b += 1;
 
                continue;
 
            }
 
            
 
            if part_a.is_marker() { idx_a += 1; continue; }
 
            if part_b.is_marker() { idx_b += 1; continue; }
 

	
 
            if let Some(depth_change) = Self::check_part_for_single_type(
 
                type_parts_a, &mut idx_a, type_parts_b, &mut idx_b
 
            ) {
 
                depth += depth_change;
 
                continue;
 
            }
 
            if let Some(depth_change) = Self::check_part_for_single_type(
 
                type_parts_b, &mut idx_b, type_parts_a, &mut idx_a
 
            ) {
 
                depth += depth_change;
 
                continue;
 
            }
 

	
 
            return false;
 
        }
 

	
 
        true
 
    }
 

	
 
    /// Performs the conversion of the inference type into a concrete type.
 
    /// By calling this function you must make sure that no unspecified types
 
    /// (e.g. Unknown or IntegerLike) exist in the type. Will not clear or check
 
    /// if the supplied `ConcreteType` is empty, will simply append to the parts
 
    /// vector.
 
    fn write_concrete_type(&self, concrete_type: &mut ConcreteType) {
 
        use InferenceTypePart as ITP;
 
        use ConcreteTypePart as CTP;
 

	
 
        // Make sure inference type is specified but concrete type is not yet specified
 
        debug_assert!(!self.parts.is_empty());
 
        concrete_type.parts.reserve(self.parts.len());
 

	
 
        let mut idx = 0;
 
        while idx < self.parts.len() {
 
            let part = &self.parts[idx];
 
            let converted_part = match part {
 
                ITP::Marker(_) => {
 
                    // Markers are removed when writing to the concrete type.
 
                    idx += 1;
 
                    continue;
 
                },
 
                ITP::Unknown | ITP::NumberLike |
 
                ITP::IntegerLike | ITP::ArrayLike | ITP::PortLike => {
 
                    // Should not happen if type inferencing works correctly: we
 
                    // should have returned a programmer-readable error or have
 
                    // inferred all types.
 
                    unreachable!("attempted to convert inference type part {:?} into concrete type", part);
 
                },
 
                ITP::Void => CTP::Void,
 
                ITP::Message => CTP::Message,
 
                ITP::Bool => CTP::Bool,
 
                ITP::UInt8 => CTP::UInt8,
 
                ITP::UInt16 => CTP::UInt16,
 
                ITP::UInt32 => CTP::UInt32,
 
                ITP::UInt64 => CTP::UInt64,
 
                ITP::SInt8 => CTP::SInt8,
 
                ITP::SInt16 => CTP::SInt16,
 
                ITP::SInt32 => CTP::SInt32,
 
                ITP::SInt64 => CTP::SInt64,
 
                ITP::Character => CTP::Character,
 
                ITP::String => {
 
                    // Inferred type has a 'char' subtype to simplify array
 
                    // checking, we remove it here.
 
                    debug_assert_eq!(self.parts[idx + 1], InferenceTypePart::Character);
 
                    idx += 1;
 
                    CTP::String
 
                },
 
                ITP::Array => CTP::Array,
 
                ITP::Slice => CTP::Slice,
 
                ITP::Input => CTP::Input,
 
                ITP::Output => CTP::Output,
 
                ITP::Tuple(num) => CTP::Tuple(*num),
 
                ITP::Instance(id, num) => CTP::Instance(*id, *num),
 
            };
 

	
 
            concrete_type.parts.push(converted_part);
 
            idx += 1;
 
        }
 
    }
 

	
 
    /// Writes a human-readable version of the type to a string. This is used
 
    /// to display error messages
 
    fn write_display_name(
 
        buffer: &mut String, heap: &Heap, parts: &[InferenceTypePart], mut idx: usize
 
    ) -> usize {
 
        use InferenceTypePart as ITP;
 

	
 
        match &parts[idx] {
 
            ITP::Marker(_marker_idx) => {
 
                if debug_log_enabled!() {
 
                    buffer.push_str(&format!("{{Marker:{}}}", *_marker_idx));
 
                }
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
            },
 
            ITP::Unknown => buffer.push_str("?"),
 
            ITP::NumberLike => buffer.push_str("numberlike"),
 
            ITP::IntegerLike => buffer.push_str("integerlike"),
 
            ITP::ArrayLike => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[?]");
 
            },
 
            ITP::PortLike => {
 
                buffer.push_str("portlike<");
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            }
 
            ITP::Void => buffer.push_str("void"),
 
            ITP::Bool => buffer.push_str(KW_TYPE_BOOL_STR),
 
            ITP::UInt8 => buffer.push_str(KW_TYPE_UINT8_STR),
 
            ITP::UInt16 => buffer.push_str(KW_TYPE_UINT16_STR),
 
            ITP::UInt32 => buffer.push_str(KW_TYPE_UINT32_STR),
 
            ITP::UInt64 => buffer.push_str(KW_TYPE_UINT64_STR),
 
            ITP::SInt8 => buffer.push_str(KW_TYPE_SINT8_STR),
 
            ITP::SInt16 => buffer.push_str(KW_TYPE_SINT16_STR),
 
            ITP::SInt32 => buffer.push_str(KW_TYPE_SINT32_STR),
 
            ITP::SInt64 => buffer.push_str(KW_TYPE_SINT64_STR),
 
            ITP::Character => buffer.push_str(KW_TYPE_CHAR_STR),
 
            ITP::String => {
 
                buffer.push_str(KW_TYPE_STRING_STR);
 
                idx += 1; // skip the 'char' subtype
 
            },
 
            ITP::Message => {
 
                buffer.push_str(KW_TYPE_MESSAGE_STR);
 
                buffer.push('<');
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Array => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[]");
 
            },
 
            ITP::Slice => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[..]");
 
            },
 
            ITP::Input => {
 
                buffer.push_str(KW_TYPE_IN_PORT_STR);
 
                buffer.push('<');
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Output => {
 
                buffer.push_str(KW_TYPE_OUT_PORT_STR);
 
                buffer.push('<');
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Tuple(num_sub) => {
 
                buffer.push('(');
 
                if *num_sub > 0 {
 
                    idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    for _sub_idx in 1..*num_sub {
 
                        buffer.push_str(", ");
 
                        idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    }
 
                }
 
                buffer.push(')');
 
            }
 
            ITP::Instance(definition_id, num_sub) => {
 
                let definition = &heap[*definition_id];
 
                buffer.push_str(definition.identifier().value.as_str());
 
                if *num_sub > 0 {
 
                    buffer.push('<');
 
                    idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    for _sub_idx in 1..*num_sub {
 
                        buffer.push_str(", ");
 
                        idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    }
 
                    buffer.push('>');
 
                }
 
            },
 
        }
 

	
 
        idx
 
    }
 

	
 
    /// Returns the display name of a (part of) the type tree. Will allocate a
 
    /// string.
 
    fn partial_display_name(heap: &Heap, parts: &[InferenceTypePart]) -> String {
 
        let mut buffer = String::with_capacity(parts.len() * 6);
 
        Self::write_display_name(&mut buffer, heap, parts, 0);
 
        buffer
 
    }
 

	
 
    /// Returns the display name of the full type tree. Will allocate a string.
 
    fn display_name(&self, heap: &Heap) -> String {
 
        Self::partial_display_name(heap, &self.parts)
 
    }
 
}
 

	
 
impl Default for InferenceType {
 
    fn default() -> Self {
 
        Self{
 
            has_marker: false,
 
            is_done: false,
 
            parts: Vec::new(),
 
        }
 
    }
 
}
 

	
 
/// Iterator over the subtrees that follow a marker in an `InferenceType`
 
/// instance. Returns immutable slices over the internal parts
 
struct InferenceTypeMarkerIter<'a> {
 
    parts: &'a [InferenceTypePart],
 
    idx: usize,
 
}
 

	
 
impl<'a> InferenceTypeMarkerIter<'a> {
 
    fn new(parts: &'a [InferenceTypePart]) -> Self {
 
        Self{ parts, idx: 0 }
 
    }
 
}
 

	
 
impl<'a> Iterator for InferenceTypeMarkerIter<'a> {
 
    type Item = (u32, &'a [InferenceTypePart]);
 

	
 
    fn next(&mut self) -> Option<Self::Item> {
 
        // Iterate until we find a marker
 
        while self.idx < self.parts.len() {
 
            if let InferenceTypePart::Marker(marker) = self.parts[self.idx] {
 
                // Found a marker, find the subtree end
 
                let start_idx = self.idx + 1;
 
                let end_idx = InferenceType::find_subtree_end_idx(self.parts, start_idx);
 

	
 
                // Modify internal index, then return items
 
                self.idx = end_idx;
 
                return Some((marker, &self.parts[start_idx..end_idx]));
 
            }
 

	
 
            self.idx += 1;
 
        }
 

	
 
        None
 
    }
 
}
 

	
 
#[derive(Debug, PartialEq, Eq)]
 
enum DualInferenceResult {
 
    Neither,        // neither argument is clarified
 
    First,          // first argument is clarified using the second one
 
    Second,         // second argument is clarified using the first one
 
    Both,           // both arguments are clarified
 
    Incompatible,   // types are incompatible: programmer error
 
}
 

	
 
impl DualInferenceResult {
 
    fn modified_lhs(&self) -> bool {
 
        match self {
 
            DualInferenceResult::First | DualInferenceResult::Both => true,
 
            _ => false
 
        }
 
    }
 
    fn modified_rhs(&self) -> bool {
 
        match self {
 
            DualInferenceResult::Second | DualInferenceResult::Both => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, PartialEq, Eq)]
 
enum SingleInferenceResult {
 
    Unmodified,
 
    Modified,
 
    Incompatible
 
}
 

	
 
enum DefinitionType{
 
    Component(ComponentDefinitionId),
 
    Function(FunctionDefinitionId),
 
}
 

	
 
impl DefinitionType {
 
    fn definition_id(&self) -> DefinitionId {
 
        match self {
 
            DefinitionType::Component(v) => v.upcast(),
 
            DefinitionType::Function(v) => v.upcast(),
 
        }
 
    }
 
}
 

	
 
pub(crate) struct ResolveQueueElement {
 
    // Note that using the `definition_id` and the `monomorph_idx` one may
 
    // query the type table for the full procedure type, thereby retrieving
 
    // the polymorphic arguments to the procedure.
 
    pub(crate) root_id: RootId,
 
    pub(crate) definition_id: DefinitionId,
 
    pub(crate) reserved_monomorph_idx: i32,
 
}
 

	
 
pub(crate) type ResolveQueue = Vec<ResolveQueueElement>;
 

	
 
#[derive(Clone)]
 
struct InferenceExpression {
 
    expr_type: InferenceType,       // result type from expression
 
    expr_id: ExpressionId,          // expression that is evaluated
 
    field_or_monomorph_idx: i32,    // index of field, of index of monomorph array in type table
 
    extra_data_idx: i32,            // index of extra data needed for inference
 
}
 

	
 
impl Default for InferenceExpression {
 
    fn default() -> Self {
 
        Self{
 
            expr_type: InferenceType::default(),
 
            expr_id: ExpressionId::new_invalid(),
 
            field_or_monomorph_idx: -1,
 
            extra_data_idx: -1,
 
        }
 
    }
 
}
 

	
 
/// This particular visitor will recurse depth-first into the AST and ensures
 
/// that all expressions have the appropriate types.
 
pub(crate) struct PassTyping {
 
    // Current definition we're typechecking.
 
    reserved_idx: i32,
 
    definition_type: DefinitionType,
 
    poly_vars: Vec<ConcreteType>,
 
    // Buffers for iteration over various types
 
    var_buffer: ScopedBuffer<VariableId>,
 
    expr_buffer: ScopedBuffer<ExpressionId>,
 
    stmt_buffer: ScopedBuffer<StatementId>,
 
    bool_buffer: ScopedBuffer<bool>,
 
    // Mapping from parser type to inferred type. We attempt to continue to
 
    // specify these types until we're stuck or we've fully determined the type.
 
    var_types: HashMap<VariableId, VarData>,            // types of variables
 
    expr_types: Vec<InferenceExpression>,                     // will be transferred to type table at end
 
    extra_data: Vec<ExtraData>,       // data for polymorph inference
 
    // Keeping track of which expressions need to be reinferred because the
 
    // expressions they're linked to made progression on an associated type
 
    expr_queued: DequeSet<i32>,
 
}
 

	
 
// TODO: @Rename, this is used for a lot of type inferencing. It seems like
 
//  there is a different underlying architecture waiting to surface.
 
struct ExtraData {
 
    expr_id: ExpressionId, // the expression with which this data is associated
 
    definition_id: DefinitionId, // the definition, only used for user feedback
 
    /// Progression of polymorphic variables (if any)
 
    poly_vars: Vec<InferenceType>,
 
    /// Progression of types of call arguments or struct members
 
    embedded: Vec<InferenceType>,
 
    returned: InferenceType,
 
}
 

	
 
impl Default for ExtraData {
 
    fn default() -> Self {
 
        Self{
 
            expr_id: ExpressionId::new_invalid(),
 
            definition_id: DefinitionId::new_invalid(),
 
            poly_vars: Vec::new(),
 
            embedded: Vec::new(),
 
            returned: InferenceType::default(),
 
        }
 
    }
 
}
 

	
 
struct VarData {
 
    /// Type of the variable
 
    var_type: InferenceType,
 
    /// VariableExpressions that use the variable
 
    used_at: Vec<ExpressionId>,
 
    /// For channel statements we link to the other variable such that when one
 
    /// channel's interior type is resolved, we can also resolve the other one.
 
    linked_var: Option<VariableId>,
 
}
 

	
 
impl VarData {
 
    fn new_channel(var_type: InferenceType, other_port: VariableId) -> Self {
 
        Self{ var_type, used_at: Vec::new(), linked_var: Some(other_port) }
 
    }
 
    fn new_local(var_type: InferenceType) -> Self {
 
        Self{ var_type, used_at: Vec::new(), linked_var: None }
 
    }
 
}
 

	
 
impl PassTyping {
 
    pub(crate) fn new() -> Self {
 
        PassTyping {
 
            reserved_idx: -1,
 
            definition_type: DefinitionType::Function(FunctionDefinitionId::new_invalid()),
 
            poly_vars: Vec::new(),
 
            var_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAPACITY),
 
            expr_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAPACITY),
 
            stmt_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAPACITY),
 
            bool_buffer: ScopedBuffer::with_capacity(16),
 
            var_types: HashMap::new(),
 
            expr_types: Vec::new(),
 
            extra_data: Vec::new(),
 
            expr_queued: DequeSet::new(),
 
        }
 
    }
 

	
 
    pub(crate) fn queue_module_definitions(ctx: &mut Ctx, queue: &mut ResolveQueue) {
 
        debug_assert_eq!(ctx.module().phase, ModuleCompilationPhase::ValidatedAndLinked);
 
        let root_id = ctx.module().root_id;
 
        let root = &ctx.heap.protocol_descriptions[root_id];
 
        for definition_id in &root.definitions {
 
            let definition = &ctx.heap[*definition_id];
 

	
 
            let first_concrete_part = match definition {
 
                Definition::Function(definition) => {
 
                    if definition.poly_vars.is_empty() {
 
                        Some(ConcreteTypePart::Function(*definition_id, 0))
 
                    } else {
 
                        None
 
                    }
 
                }
 
                Definition::Component(definition) => {
 
                    if definition.poly_vars.is_empty() {
 
                        Some(ConcreteTypePart::Component(*definition_id, 0))
 
                    } else {
 
                        None
 
                    }
 
                },
 
                Definition::Enum(_) | Definition::Struct(_) | Definition::Union(_) => None,
 
            };
 

	
 
            if let Some(first_concrete_part) = first_concrete_part {
 
                let concrete_type = ConcreteType{ parts: vec![first_concrete_part] };
 
                let reserved_idx = ctx.types.reserve_procedure_monomorph_index(definition_id, concrete_type);
 
                queue.push(ResolveQueueElement{
 
                    root_id,
 
                    definition_id: *definition_id,
 
                    reserved_monomorph_idx: reserved_idx,
 
                })
 
            }
 
        }
 
    }
 

	
 
    pub(crate) fn handle_module_definition(
 
        &mut self, ctx: &mut Ctx, queue: &mut ResolveQueue, element: ResolveQueueElement
 
    ) -> VisitorResult {
 
        self.reset();
 
        debug_assert_eq!(ctx.module().root_id, element.root_id);
 
        debug_assert!(self.poly_vars.is_empty());
 

	
 
        // Prepare for visiting the definition
 
        self.reserved_idx = element.reserved_monomorph_idx;
 

	
 
        let proc_base = ctx.types.get_base_definition(&element.definition_id).unwrap();
 
        if proc_base.is_polymorph {
 
            let monomorph = ctx.types.get_monomorph(element.reserved_monomorph_idx);
 
            for poly_arg in monomorph.concrete_type.embedded_iter(0) {
 
                self.poly_vars.push(ConcreteType{ parts: Vec::from(poly_arg) });
 
            }
 
        }
 

	
 
        // Visit the definition, setting up the type resolving process, then
 
        // (attempt to) resolve all types
 
        self.visit_definition(ctx, element.definition_id)?;
 
        self.resolve_types(ctx, queue)?;
 
        Ok(())
 
    }
 

	
 
    fn reset(&mut self) {
 
        self.reserved_idx = -1;
 
        self.definition_type = DefinitionType::Function(FunctionDefinitionId::new_invalid());
 
        self.poly_vars.clear();
 
        self.var_types.clear();
 
        self.expr_types.clear();
 
        self.extra_data.clear();
 
        self.expr_queued.clear();
 
    }
 
}
 

	
 
impl Visitor for PassTyping {
 
    // Definitions
 

	
 
    fn visit_component_definition(&mut self, ctx: &mut Ctx, id: ComponentDefinitionId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Component(id);
 

	
 
        let comp_def = &ctx.heap[id];
 
        debug_assert_eq!(comp_def.poly_vars.len(), self.poly_vars.len(), "component polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting component '{}': {}", comp_def.identifier.value.as_str(), id.0.index);
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        // Reserve data for expression types
 
        debug_assert!(self.expr_types.is_empty());
 
        self.expr_types.resize(comp_def.num_expressions_in_body as usize, Default::default());
 

	
 
        // Visit parameters
 
        let section = self.var_buffer.start_section_initialized(comp_def.parameters.as_slice());
 
        for param_id in section.iter_copied() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type_elements(&param.parser_type.elements, true);
 
            debug_assert!(var_type.is_done, "expected component arguments to be concrete types");
 
            self.var_types.insert(param_id, VarData::new_local(var_type));
 
        }
 
        section.forget();
 

	
 
        // Visit the body and all of its expressions
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_block_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    fn visit_function_definition(&mut self, ctx: &mut Ctx, id: FunctionDefinitionId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Function(id);
 

	
 
        let func_def = &ctx.heap[id];
 
        debug_assert_eq!(func_def.poly_vars.len(), self.poly_vars.len(), "function polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting function '{}': {}", func_def.identifier.value.as_str(), id.0.index);
 
        if debug_log_enabled!() {
 
            debug_log!("Polymorphic variables:");
 
            for (_idx, poly_var) in self.poly_vars.iter().enumerate() {
 
                let mut infer_type_parts = Vec::new();
 
                Self::determine_inference_type_from_concrete_type(
 
                    &mut infer_type_parts, &poly_var.parts
 
                );
 
                let _infer_type = InferenceType::new(false, true, infer_type_parts);
 
                debug_log!(" - [{:03}] {:?}", _idx, _infer_type.display_name(&ctx.heap));
 
            }
 
        }
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        // Reserve data for expression types
 
        debug_assert!(self.expr_types.is_empty());
 
        self.expr_types.resize(func_def.num_expressions_in_body as usize, Default::default());
 

	
 
        // Visit parameters
 
        let section = self.var_buffer.start_section_initialized(func_def.parameters.as_slice());
 
        for param_id in section.iter_copied() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type_elements(&param.parser_type.elements, true);
 
            debug_assert!(var_type.is_done, "expected function arguments to be concrete types");
 
            self.var_types.insert(param_id, VarData::new_local(var_type));
 
        }
 
        section.forget();
 

	
 
        // Visit all of the expressions within the body
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_block_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    // Statements
 

	
 
    fn visit_block_stmt(&mut self, ctx: &mut Ctx, id: BlockStatementId) -> VisitorResult {
 
        // Transfer statements for traversal
 
        let block = &ctx.heap[id];
 

	
 
        let section = self.stmt_buffer.start_section_initialized(block.statements.as_slice());
 
        for stmt_id in section.iter_copied() {
 
            self.visit_stmt(ctx, stmt_id)?;
 
        }
 
        section.forget();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_memory_stmt(&mut self, ctx: &mut Ctx, id: MemoryStatementId) -> VisitorResult {
 
        let memory_stmt = &ctx.heap[id];
 
        let initial_expr_id = memory_stmt.initial_expr;
 

	
 
        // Setup memory statement inference
 
        let local = &ctx.heap[memory_stmt.variable];
 
        let var_type = self.determine_inference_type_from_parser_type_elements(&local.parser_type.elements, true);
 
        self.var_types.insert(memory_stmt.variable, VarData::new_local(var_type));
 

	
 
        // Process the initial value
 
        self.visit_assignment_expr(ctx, initial_expr_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_channel_stmt(&mut self, ctx: &mut Ctx, id: ChannelStatementId) -> VisitorResult {
 
        let channel_stmt = &ctx.heap[id];
 

	
 
        let from_local = &ctx.heap[channel_stmt.from];
 
        let from_var_type = self.determine_inference_type_from_parser_type_elements(&from_local.parser_type.elements, true);
 
        self.var_types.insert(from_local.this, VarData::new_channel(from_var_type, channel_stmt.to));
 

	
 
        let to_local = &ctx.heap[channel_stmt.to];
 
        let to_var_type = self.determine_inference_type_from_parser_type_elements(&to_local.parser_type.elements, true);
 
        self.var_types.insert(to_local.this, VarData::new_channel(to_var_type, channel_stmt.from));
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_labeled_stmt(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> VisitorResult {
 
        let labeled_stmt = &ctx.heap[id];
 
        let substmt_id = labeled_stmt.body;
 
        self.visit_stmt(ctx, substmt_id)
 
    }
 

	
 
    fn visit_if_stmt(&mut self, ctx: &mut Ctx, id: IfStatementId) -> VisitorResult {
 
        let if_stmt = &ctx.heap[id];
 

	
 
        let true_body_id = if_stmt.true_body;
 
        let false_body_id = if_stmt.false_body;
 
        let test_expr_id = if_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_block_stmt(ctx, true_body_id)?;
 
        if let Some(false_body_id) = false_body_id {
 
            self.visit_block_stmt(ctx, false_body_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_while_stmt(&mut self, ctx: &mut Ctx, id: WhileStatementId) -> VisitorResult {
 
        let while_stmt = &ctx.heap[id];
 

	
 
        let body_id = while_stmt.body;
 
        let test_expr_id = while_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_block_stmt(ctx, body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_synchronous_stmt(&mut self, ctx: &mut Ctx, id: SynchronousStatementId) -> VisitorResult {
 
        let sync_stmt = &ctx.heap[id];
 
        let body_id = sync_stmt.body;
 

	
 
        self.visit_block_stmt(ctx, body_id)
 
    }
 

	
 
    fn visit_fork_stmt(&mut self, ctx: &mut Ctx, id: ForkStatementId) -> VisitorResult {
 
        let fork_stmt = &ctx.heap[id];
 
        let left_body_id = fork_stmt.left_body;
 
        let right_body_id = fork_stmt.right_body;
 

	
 
        self.visit_block_stmt(ctx, left_body_id)?;
 
        if let Some(right_body_id) = right_body_id {
 
            self.visit_block_stmt(ctx, right_body_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_select_stmt(&mut self, ctx: &mut Ctx, id: SelectStatementId) -> VisitorResult {
 
        let select_stmt = &ctx.heap[id];
 

	
 
        let mut section = self.stmt_buffer.start_section();
 
        let num_cases = select_stmt.cases.len();
 

	
 
        for case in &select_stmt.cases {
 
            section.push(case.guard);
 
            section.push(case.block.upcast());
 
        }
 

	
 
        for case_index in 0..num_cases {
 
            let base_index = 2 * case_index;
 
            let guard_stmt_id = section[base_index    ];
 
            let block_stmt_id = section[base_index + 1];
 

	
 
            self.visit_stmt(ctx, guard_stmt_id);
 
            self.visit_stmt(ctx, block_stmt_id);
 
        }
 
        section.forget();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_return_stmt(&mut self, ctx: &mut Ctx, id: ReturnStatementId) -> VisitorResult {
 
        let return_stmt = &ctx.heap[id];
 
        debug_assert_eq!(return_stmt.expressions.len(), 1);
 
        let expr_id = return_stmt.expressions[0];
 

	
 
        self.visit_expr(ctx, expr_id)
 
    }
 

	
 
    fn visit_new_stmt(&mut self, ctx: &mut Ctx, id: NewStatementId) -> VisitorResult {
 
        let new_stmt = &ctx.heap[id];
 
        let call_expr_id = new_stmt.expression;
 

	
 
        self.visit_call_expr(ctx, call_expr_id)
 
    }
 

	
 
    fn visit_expr_stmt(&mut self, ctx: &mut Ctx, id: ExpressionStatementId) -> VisitorResult {
 
        let expr_stmt = &ctx.heap[id];
 
        let subexpr_id = expr_stmt.expression;
 

	
 
        self.visit_expr(ctx, subexpr_id)
 
    }
 

	
 
    // Expressions
 

	
 
    fn visit_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let assign_expr = &ctx.heap[id];
 
        let left_expr_id = assign_expr.left;
 
        let right_expr_id = assign_expr.right;
 

	
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.visit_expr(ctx, right_expr_id)?;
 

	
 
        self.progress_assignment_expr(ctx, id)
 
    }
 

	
 
    fn visit_binding_expr(&mut self, ctx: &mut Ctx, id: BindingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let binding_expr = &ctx.heap[id];
 
        let bound_to_id = binding_expr.bound_to;
 
        let bound_from_id = binding_expr.bound_from;
 

	
 
        self.visit_expr(ctx, bound_to_id)?;
 
        self.visit_expr(ctx, bound_from_id)?;
 

	
 
        self.progress_binding_expr(ctx, id)
 
    }
 

	
 
    fn visit_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let conditional_expr = &ctx.heap[id];
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_expr(ctx, true_expr_id)?;
 
        self.visit_expr(ctx, false_expr_id)?;
 

	
 
        self.progress_conditional_expr(ctx, id)
 
    }
 

	
 
    fn visit_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let binary_expr = &ctx.heap[id];
 
        let lhs_expr_id = binary_expr.left;
 
        let rhs_expr_id = binary_expr.right;
 

	
 
        self.visit_expr(ctx, lhs_expr_id)?;
 
        self.visit_expr(ctx, rhs_expr_id)?;
 

	
 
        self.progress_binary_expr(ctx, id)
 
    }
 

	
 
    fn visit_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let unary_expr = &ctx.heap[id];
 
        let arg_expr_id = unary_expr.expression;
 

	
 
        self.visit_expr(ctx, arg_expr_id)?;
 

	
 
        self.progress_unary_expr(ctx, id)
 
    }
 

	
 
    fn visit_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let indexing_expr = &ctx.heap[id];
 
        let subject_expr_id = indexing_expr.subject;
 
        let index_expr_id = indexing_expr.index;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.visit_expr(ctx, index_expr_id)?;
 

	
 
        self.progress_indexing_expr(ctx, id)
 
    }
 

	
 
    fn visit_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let slicing_expr = &ctx.heap[id];
 
        let subject_expr_id = slicing_expr.subject;
 
        let from_expr_id = slicing_expr.from_index;
 
        let to_expr_id = slicing_expr.to_index;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.visit_expr(ctx, from_expr_id)?;
 
        self.visit_expr(ctx, to_expr_id)?;
 

	
 
        self.progress_slicing_expr(ctx, id)
 
    }
 

	
 
    fn visit_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let select_expr = &ctx.heap[id];
 
        let subject_expr_id = select_expr.subject;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 

	
 
        self.progress_select_expr(ctx, id)
 
    }
 

	
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let literal_expr = &ctx.heap[id];
 
        match &literal_expr.value {
 
            Literal::Null | Literal::False | Literal::True |
 
            Literal::Integer(_) | Literal::Character(_) | Literal::String(_) => {
 
                // No subexpressions
 
            },
 
            Literal::Struct(literal) => {
 
                let mut expr_ids = self.expr_buffer.start_section();
 
                for field in &literal.fields {
 
                    expr_ids.push(field.value);
 
                }
 
                self.insert_initial_struct_polymorph_data(ctx, id);
 

	
 
                for expr_id in expr_ids.iter_copied() {
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 
                expr_ids.forget();
 
            },
 
            Literal::Enum(_) => {
 
                // Enumerations do not carry any subexpressions, but may still
 
                // have a user-defined polymorphic marker variable. For this 
 
                // reason we may still have to apply inference to this 
 
                // polymorphic variable
 
                self.insert_initial_enum_polymorph_data(ctx, id);
 
            },
 
            Literal::Union(literal) => {
 
                // May carry subexpressions and polymorphic arguments
 
                let expr_ids = self.expr_buffer.start_section_initialized(literal.values.as_slice());
 
                self.insert_initial_union_polymorph_data(ctx, id);
 

	
 
                for expr_id in expr_ids.iter_copied() {
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 
                expr_ids.forget();
 
            },
 
            Literal::Array(expressions) | Literal::Tuple(expressions) => {
 
                let expr_ids = self.expr_buffer.start_section_initialized(expressions.as_slice());
 
                for expr_id in expr_ids.iter_copied() {
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 
                expr_ids.forget();
 
            }
 
        }
 

	
 
        self.progress_literal_expr(ctx, id)
 
    }
 

	
 
    fn visit_cast_expr(&mut self, ctx: &mut Ctx, id: CastExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let cast_expr = &ctx.heap[id];
 
        let subject_expr_id = cast_expr.subject;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 

	
 
        self.progress_cast_expr(ctx, id)
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        self.insert_initial_call_polymorph_data(ctx, id);
 

	
 
        // By default we set the polymorph idx for calls to 0. If the call ends
 
        // up not being a polymorphic one, then we will select the default
 
        // expression types in the type table
 
        let call_expr = &ctx.heap[id];
 
        self.expr_types[call_expr.unique_id_in_definition as usize].field_or_monomorph_idx = 0;
 

	
 
        // Visit all arguments
 
        let expr_ids = self.expr_buffer.start_section_initialized(call_expr.arguments.as_slice());
 
        for arg_expr_id in expr_ids.iter_copied() {
 
            self.visit_expr(ctx, arg_expr_id)?;
 
        }
 
        expr_ids.forget();
 

	
 
        self.progress_call_expr(ctx, id)
 
    }
 

	
 
    fn visit_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let var_expr = &ctx.heap[id];
 
        debug_assert!(var_expr.declaration.is_some());
 

	
 
        // Not pretty: if a binding expression, then this is the first time we
 
        // encounter the variable, so we still need to insert the variable data.
 
        let declaration = &ctx.heap[var_expr.declaration.unwrap()];
 
        if !self.var_types.contains_key(&declaration.this)  {
 
            debug_assert!(declaration.kind == VariableKind::Binding);
 
            let var_type = self.determine_inference_type_from_parser_type_elements(
 
                &declaration.parser_type.elements, true
 
            );
 
            self.var_types.insert(declaration.this, VarData{
 
                var_type,
 
                used_at: vec![upcast_id],
 
                linked_var: None
 
            });
 
        } else {
 
            let var_data = self.var_types.get_mut(&declaration.this).unwrap();
 
            var_data.used_at.push(upcast_id);
 
        }
 

	
 
        self.progress_variable_expr(ctx, id)
 
    }
 
}
 

	
 
impl PassTyping {
 
    #[allow(dead_code)] // used when debug flag at the top of this file is true.
 
    fn debug_get_display_name(&self, ctx: &Ctx, expr_id: ExpressionId) -> String {
 
        let expr_idx = ctx.heap[expr_id].get_unique_id_in_definition();
 
        let expr_type = &self.expr_types[expr_idx as usize].expr_type;
 
        expr_type.display_name(&ctx.heap)
 
    }
 

	
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError> {
 
        // Keep inferring until we can no longer make any progress
 
        while !self.expr_queued.is_empty() {
 
            // Make as much progress as possible without forced integer
 
            // inference.
 
            while !self.expr_queued.is_empty() {
 
                let next_expr_idx = self.expr_queued.pop_front().unwrap();
 
                self.progress_expr(ctx, next_expr_idx)?;
 
            }
 

	
 
            // Nothing is queued anymore. However we might have integer literals
 
            // whose type cannot be inferred. For convenience's sake we'll
 
            // infer these to be s32.
 
            for (infer_expr_idx, infer_expr) in self.expr_types.iter_mut().enumerate() {
 
                let expr_type = &mut infer_expr.expr_type;
 
                if !expr_type.is_done && expr_type.parts.len() == 1 && expr_type.parts[0] == InferenceTypePart::IntegerLike {
 
                    // Force integer type to s32
 
                    expr_type.parts[0] = InferenceTypePart::SInt32;
 
                    expr_type.is_done = true;
 

	
 
                    // Requeue expression (and its parent, if it exists)
 
                    self.expr_queued.push_back(infer_expr_idx as i32);
 

	
 
                    if let Some(parent_expr) = ctx.heap[infer_expr.expr_id].parent_expr_id() {
 
                        let parent_idx = ctx.heap[parent_expr].get_unique_id_in_definition();
 
                        self.expr_queued.push_back(parent_idx);
 
                    }
 
                }
 
            }
 
        }
 

	
 
        // Helper for transferring polymorphic variables to concrete types and
 
        // checking if they're completely specified
 
        fn inference_type_to_concrete_type(
 
            ctx: &Ctx, expr_id: ExpressionId, inference: &Vec<InferenceType>,
 
            first_concrete_part: ConcreteTypePart,
 
        ) -> Result<ConcreteType, ParseError> {
 
            // Prepare storage vector
 
            let mut num_inference_parts = 0;
 
            for inference_type in inference {
 
                num_inference_parts += inference_type.parts.len();
 
            }
 

	
 
            let mut concrete_type = ConcreteType{
 
                parts: Vec::with_capacity(1 + num_inference_parts),
 
            };
 
            concrete_type.parts.push(first_concrete_part);
 

	
 
            // Go through all polymorphic arguments and add them to the concrete
 
            // types.
 
            for (poly_idx, poly_type) in inference.iter().enumerate() {
 
                if !poly_type.is_done {
 
                    let expr = &ctx.heap[expr_id];
 
                    let definition = match expr {
 
                        Expression::Call(expr) => expr.definition,
 
                        Expression::Literal(expr) => match &expr.value {
 
                            Literal::Enum(lit) => lit.definition,
 
                            Literal::Union(lit) => lit.definition,
 
                            Literal::Struct(lit) => lit.definition,
 
                            _ => unreachable!()
 
                        },
 
                        _ => unreachable!(),
 
                    };
 
                    let poly_vars = ctx.heap[definition].poly_vars();
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module().source, expr.operation_span(), format!(
 
                            "could not fully infer the type of polymorphic variable '{}' of this expression (got '{}')",
 
                            poly_vars[poly_idx].value.as_str(), poly_type.display_name(&ctx.heap)
 
                        )
 
                    ));
 
                }
 

	
 
                poly_type.write_concrete_type(&mut concrete_type);
 
            }
 

	
 
            Ok(concrete_type)
 
        }
 

	
 
        // Inference is now done. But we may still have uninferred types. So we
 
        // check for these.
 
        for infer_expr in self.expr_types.iter_mut() {
 
            if !infer_expr.expr_type.is_done {
 
                let expr = &ctx.heap[infer_expr.expr_id];
 
                return Err(ParseError::new_error_at_span(
 
                    &ctx.module().source, expr.full_span(), format!(
 
                        "could not fully infer the type of this expression (got '{}')",
 
                        infer_expr.expr_type.display_name(&ctx.heap)
 
                    )
 
                ));
 
            }
 

	
 
            // Expression is fine, check if any extra data is attached
 
            if infer_expr.extra_data_idx < 0 { continue; }
 

	
 
            // Extra data is attached, perform typechecking and transfer
 
            // resolved information to the expression
 
            let extra_data = &self.extra_data[infer_expr.extra_data_idx as usize];
 

	
 
            // Note that only call and literal expressions need full inference.
 
            // Select expressions also use `extra_data`, but only for temporary
 
            // storage of the struct type whose field it is selecting.
 
            match &ctx.heap[extra_data.expr_id] {
 
                Expression::Call(expr) => {
 
                    // Check if it is not a builtin function. If not, then
 
                    // construct the first part of the concrete type.
 
                    let first_concrete_part = if expr.method == Method::UserFunction {
 
                        ConcreteTypePart::Function(expr.definition, extra_data.poly_vars.len() as u32)
 
                    } else if expr.method == Method::UserComponent {
 
                        ConcreteTypePart::Component(expr.definition, extra_data.poly_vars.len() as u32)
 
                    } else {
 
                        // Builtin function
 
                        continue;
 
                    };
 

	
 
                    let definition_id = expr.definition;
 
                    let concrete_type = inference_type_to_concrete_type(
 
                        ctx, extra_data.expr_id, &extra_data.poly_vars, first_concrete_part
 
                    )?;
 

	
 
                    match ctx.types.get_procedure_monomorph_index(&definition_id, &concrete_type.parts) {
 
                        Some(reserved_idx) => {
 
                            // Already typechecked, or already put into the resolve queue
 
                            infer_expr.field_or_monomorph_idx = reserved_idx;
 
                        },
 
                        None => {
 
                            // Not typechecked yet, so add an entry in the queue
 
                            let reserved_idx = ctx.types.reserve_procedure_monomorph_index(&definition_id, concrete_type);
 
                            infer_expr.field_or_monomorph_idx = reserved_idx;
 
                            queue.push(ResolveQueueElement {
 
                                root_id: ctx.heap[definition_id].defined_in(),
 
                                definition_id,
 
                                reserved_monomorph_idx: reserved_idx,
 
                            });
 
                        }
 
                    }
 
                },
 
                Expression::Literal(expr) => {
 
                    let definition_id = match &expr.value {
 
                        Literal::Enum(lit) => lit.definition,
 
                        Literal::Union(lit) => lit.definition,
 
                        Literal::Struct(lit) => lit.definition,
 
                        _ => unreachable!(),
 
                    };
 
                    let first_concrete_part = ConcreteTypePart::Instance(definition_id, extra_data.poly_vars.len() as u32);
 
                    let concrete_type = inference_type_to_concrete_type(
 
                        ctx, extra_data.expr_id, &extra_data.poly_vars, first_concrete_part
 
                    )?;
 
                    let mono_index = ctx.types.add_data_monomorph(ctx.modules, ctx.heap, ctx.arch, definition_id, concrete_type)?;
 
                    infer_expr.field_or_monomorph_idx = mono_index;
 
                },
 
                Expression::Select(_) => {
 
                    debug_assert!(infer_expr.field_or_monomorph_idx >= 0);
 
                },
 
                _ => {
 
                    unreachable!("handling extra data for expression {:?}", &ctx.heap[extra_data.expr_id]);
 
                }
 
            }
 
        }
 

	
 
        // Every expression checked, and new monomorphs are queued. Transfer the
 
        // expression information to the type table.
 
        let procedure_arguments = match &self.definition_type {
 
            DefinitionType::Component(id) => {
 
                let definition = &ctx.heap[*id];
 
                &definition.parameters
 
            },
 
            DefinitionType::Function(id) => {
 
                let definition = &ctx.heap[*id];
 
                &definition.parameters
 
            },
 
        };
 

	
 
        let target = ctx.types.get_procedure_monomorph_mut(self.reserved_idx);
 
        debug_assert!(target.arg_types.is_empty()); // makes sure we never queue a procedure's type inferencing twice
 
        debug_assert!(target.expr_data.is_empty());
 

	
 
        // - Write the arguments to the procedure
 
        target.arg_types.reserve(procedure_arguments.len());
 
        for argument_id in procedure_arguments {
 
            let mut concrete = ConcreteType::default();
 
            let argument_type = self.var_types.get(argument_id).unwrap();
 
            argument_type.var_type.write_concrete_type(&mut concrete);
 
            target.arg_types.push(concrete);
 
        }
 

	
 
        // - Write the expression data
 
        target.expr_data.reserve(self.expr_types.len());
 
        for infer_expr in self.expr_types.iter() {
 
            let mut concrete = ConcreteType::default();
 
            infer_expr.expr_type.write_concrete_type(&mut concrete);
 
            target.expr_data.push(MonomorphExpression{
 
                expr_type: concrete,
 
                field_or_monomorph_idx: infer_expr.field_or_monomorph_idx
 
            });
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_expr(&mut self, ctx: &mut Ctx, idx: i32) -> Result<(), ParseError> {
 
        let id = self.expr_types[idx as usize].expr_id;
 
        match &ctx.heap[id] {
 
            Expression::Assignment(expr) => {
 
                let id = expr.this;
 
                self.progress_assignment_expr(ctx, id)
 
            },
 
            Expression::Binding(expr) => {
 
                let id = expr.this;
 
                self.progress_binding_expr(ctx, id)
 
            },
 
            Expression::Conditional(expr) => {
 
                let id = expr.this;
 
                self.progress_conditional_expr(ctx, id)
 
            },
 
            Expression::Binary(expr) => {
 
                let id = expr.this;
 
                self.progress_binary_expr(ctx, id)
 
            },
 
            Expression::Unary(expr) => {
 
                let id = expr.this;
 
                self.progress_unary_expr(ctx, id)
 
            },
 
            Expression::Indexing(expr) => {
 
                let id = expr.this;
 
                self.progress_indexing_expr(ctx, id)
 
            },
 
            Expression::Slicing(expr) => {
 
                let id = expr.this;
 
                self.progress_slicing_expr(ctx, id)
 
            },
 
            Expression::Select(expr) => {
 
                let id = expr.this;
 
                self.progress_select_expr(ctx, id)
 
            },
 
            Expression::Literal(expr) => {
 
                let id = expr.this;
 
                self.progress_literal_expr(ctx, id)
 
            },
 
            Expression::Cast(expr) => {
 
                let id = expr.this;
 
                self.progress_cast_expr(ctx, id)
 
            },
 
            Expression::Call(expr) => {
 
                let id = expr.this;
 
                self.progress_call_expr(ctx, id)
 
            },
 
            Expression::Variable(expr) => {
 
                let id = expr.this;
 
                self.progress_variable_expr(ctx, id)
 
            }
 
        }
 
    }
 

	
 
    fn progress_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> Result<(), ParseError> {
 
        use AssignmentOperator as AO;
 

	
 
        let upcast_id = id.upcast();
 

	
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.left;
 
        let arg2_expr_id = expr.right;
 

	
 
        debug_log!("Assignment expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.debug_get_display_name(ctx, arg1_expr_id));
 
        debug_log!("   - Arg2 type: {}", self.debug_get_display_name(ctx, arg2_expr_id));
 
        debug_log!("   - Expr type: {}", self.debug_get_display_name(ctx, upcast_id));
 

	
 
        // Assignment does not return anything (it operates like a statement)
 
        let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &VOID_TEMPLATE)?;
 

	
 
        // Apply forced constraint to LHS value
 
        let progress_forced = match expr.operation {
 
            AO::Set =>
 
                false,
 
            AO::Concatenated =>
 
                self.apply_template_constraint(ctx, arg1_expr_id, &ARRAYLIKE_TEMPLATE)?,
 
            AO::Multiplied | AO::Divided | AO::Added | AO::Subtracted =>
 
                self.apply_template_constraint(ctx, arg1_expr_id, &NUMBERLIKE_TEMPLATE)?,
 
            AO::Remained | AO::ShiftedLeft | AO::ShiftedRight |
 
            AO::BitwiseAnded | AO::BitwiseXored | AO::BitwiseOred =>
 
                self.apply_template_constraint(ctx, arg1_expr_id, &INTEGERLIKE_TEMPLATE)?,
 
        };
 

	
 
        let (progress_arg1, progress_arg2) = self.apply_equal2_constraint(
 
            ctx, upcast_id, arg1_expr_id, 0, arg2_expr_id, 0
 
        )?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_forced || progress_arg1, self.debug_get_display_name(ctx, arg1_expr_id));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.debug_get_display_name(ctx, arg2_expr_id));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.debug_get_display_name(ctx, upcast_id));
 

	
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_forced || progress_arg1 { self.queue_expr(ctx, arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(ctx, arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_binding_expr(&mut self, ctx: &mut Ctx, id: BindingExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let binding_expr = &ctx.heap[id];
 
        let bound_from_id = binding_expr.bound_from;
 
        let bound_to_id = binding_expr.bound_to;
 

	
 
        // Output is always a boolean. The two arguments should be of equal
 
        // type.
 
        let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
        let (progress_from, progress_to) = self.apply_equal2_constraint(ctx, upcast_id, bound_from_id, 0, bound_to_id, 0)?;
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_from { self.queue_expr(ctx, bound_from_id); }
 
        if progress_to { self.queue_expr(ctx, bound_to_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> Result<(), ParseError> {
 
        // Note: test expression type is already enforced
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.true_expression;
 
        let arg2_expr_id = expr.false_expression;
 

	
 
        debug_log!("Conditional expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.debug_get_display_name(ctx, arg1_expr_id));
 
        debug_log!("   - Arg2 type: {}", self.debug_get_display_name(ctx, arg2_expr_id));
 
        debug_log!("   - Expr type: {}", self.debug_get_display_name(ctx, upcast_id));
 

	
 
        // I keep confusing myself: this applies equality of types between the
 
        // condition branches' types, and the result from the conditional
 
        // expression, because the result from the conditional is one of the
 
        // branches.
 
        let (progress_expr, progress_arg1, progress_arg2) = self.apply_equal3_constraint(
 
            ctx, upcast_id, arg1_expr_id, arg2_expr_id, 0
 
        )?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.debug_get_display_name(ctx, arg1_expr_id));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.debug_get_display_name(ctx, arg2_expr_id));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.debug_get_display_name(ctx, upcast_id));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(ctx, arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(ctx, arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> Result<(), ParseError> {
 
        // Note: our expression type might be fixed by our parent, but we still
 
        // need to make sure it matches the type associated with our operation.
 
        use BinaryOperator as BO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_id = expr.left;
 
        let arg2_id = expr.right;
 

	
 
        debug_log!("Binary expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.debug_get_display_name(ctx, arg1_id));
 
        debug_log!("   - Arg2 type: {}", self.debug_get_display_name(ctx, arg2_id));
 
        debug_log!("   - Expr type: {}", self.debug_get_display_name(ctx, upcast_id));
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = match expr.operation {
 
            BO::Concatenate => {
 
                // Two cases: if one of the arguments or the output type is a
 
                // string, then all must be strings. Otherwise the arguments
 
                // must be arraylike and the output will be a array.
 
                let (expr_is_str, expr_is_not_str) = self.type_is_certainly_or_certainly_not_string(ctx, upcast_id);
 
                let (arg1_is_str, arg1_is_not_str) = self.type_is_certainly_or_certainly_not_string(ctx, arg1_id);
 
                let (arg2_is_str, arg2_is_not_str) = self.type_is_certainly_or_certainly_not_string(ctx, arg2_id);
 

	
 
                let someone_is_str = expr_is_str || arg1_is_str || arg2_is_str;
 
                let someone_is_not_str = expr_is_not_str || arg1_is_not_str || arg2_is_not_str;
 

	
 
                // Note: this statement is an expression returning the progression bools
 
                if someone_is_str {
 
                    // One of the arguments is a string, then all must be strings
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 0)?
 
                } else {
 
                    let progress_expr = if someone_is_not_str {
 
                        // Output must be a normal array
 
                        self.apply_template_constraint(ctx, upcast_id, &ARRAY_TEMPLATE)?
 
                    } else {
 
                        // Output may still be anything
 
                        self.apply_template_constraint(ctx, upcast_id, &ARRAYLIKE_TEMPLATE)?
 
                    };
 

	
 
                    let progress_arg1 = self.apply_template_constraint(ctx, arg1_id, &ARRAYLIKE_TEMPLATE)?;
 
                    let progress_arg2 = self.apply_template_constraint(ctx, arg2_id, &ARRAYLIKE_TEMPLATE)?;
 

	
 
                    // If they're all arraylike, then we want the subtype to match
 
                    let (subtype_expr, subtype_arg1, subtype_arg2) =
 
                        self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 1)?;
 

	
 
                    (progress_expr || subtype_expr, progress_arg1 || subtype_arg1, progress_arg2 || subtype_arg2)
 
                }
 
            },
 
            BO::LogicalAnd => {
 
                // Forced boolean on all
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg1 = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg2 = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 

	
 
                (progress_expr, progress_arg1, progress_arg2)
 
            },
 
            BO::LogicalOr => {
 
                // Forced boolean on all
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg1 = self.apply_forced_constraint(ctx, arg1_id, &BOOL_TEMPLATE)?;
 
                let progress_arg2 = self.apply_forced_constraint(ctx, arg2_id, &BOOL_TEMPLATE)?;
 

	
 
                (progress_expr, progress_arg1, progress_arg2)
 
            },
 
            BO::BitwiseOr | BO::BitwiseXor | BO::BitwiseAnd | BO::Remainder | BO::ShiftLeft | BO::ShiftRight => {
 
                // All equal of integer type
 
                let progress_base = self.apply_template_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg1, progress_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg1, progress_base || progress_arg2)
 
            },
 
            BO::Equality | BO::Inequality => {
 
                // Equal2 on args, forced boolean output
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let (progress_arg1, progress_arg2) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, arg1_id, 0, arg2_id, 0)?;
 

	
 
                (progress_expr, progress_arg1, progress_arg2)
 
            },
 
            BO::LessThan | BO::GreaterThan | BO::LessThanEqual | BO::GreaterThanEqual => {
 
                // Equal2 on args with numberlike type, forced boolean output
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg_base = self.apply_template_constraint(ctx, arg1_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_arg1, progress_arg2) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, arg1_id, 0, arg2_id, 0)?;
 

	
 
                (progress_expr, progress_arg_base || progress_arg1, progress_arg_base || progress_arg2)
 
            },
 
            BO::Add | BO::Subtract | BO::Multiply | BO::Divide => {
 
                // All equal of number type
 
                let progress_base = self.apply_template_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg1, progress_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg1, progress_base || progress_arg2)
 
            },
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.debug_get_display_name(ctx, arg1_id));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.debug_get_display_name(ctx, arg2_id));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.debug_get_display_name(ctx, upcast_id));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(ctx, arg1_id); }
 
        if progress_arg2 { self.queue_expr(ctx, arg2_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> Result<(), ParseError> {
 
        use UnaryOperator as UO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg_id = expr.expression;
 

	
 
        debug_log!("Unary expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg  type: {}", self.debug_get_display_name(ctx, arg_id));
 
        debug_log!("   - Expr type: {}", self.debug_get_display_name(ctx, upcast_id));
 

	
 
        let (progress_expr, progress_arg) = match expr.operation {
 
            UO::Positive | UO::Negative => {
 
                // Equal types of numeric class
 
                let progress_base = self.apply_template_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, arg_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg)
 
            },
 
            UO::BitwiseNot => {
 
                // Equal types of integer class
 
                let progress_base = self.apply_template_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, arg_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg)
 
            },
 
            UO::LogicalNot => {
 
                // Both bools
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                (progress_expr, progress_arg)
 
            }
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg  type [{}]: {}", progress_arg, self.debug_get_display_name(ctx, arg_id));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.debug_get_display_name(ctx, upcast_id));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg { self.queue_expr(ctx, arg_id); }
 

	
 
        Ok(())

Changeset was too big and was cut off... Show full diff anyway

0 comments (0 inline, 0 general)