Changeset - f79a98e9e2d9
[Not reviewed]
0 1 0
mh - 3 years ago 2022-02-22 16:32:29
contact@maxhenger.nl
WIP: Replaced old expr-based with new rule-based inferencing code
1 file changed with 483 insertions and 70 deletions:
0 comments (0 inline, 0 general)
src/protocol/parser/pass_typing.rs
Show inline comments
 
@@ -789,48 +789,49 @@ impl DualInferenceResult {
 
            _ => false
 
        }
 
    }
 
    fn modified_rhs(&self) -> bool {
 
        match self {
 
            DualInferenceResult::Second | DualInferenceResult::Both => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, PartialEq, Eq)]
 
enum SingleInferenceResult {
 
    Unmodified,
 
    Modified,
 
    Incompatible
 
}
 

	
 
// -----------------------------------------------------------------------------
 
// PassTyping - Public Interface
 
// -----------------------------------------------------------------------------
 

	
 
type InferNodeIndex = usize;
 
type PolyDataIndex = usize;
 
type VarDataIndex = usize;
 

	
 
enum DefinitionType{
 
    Component(ComponentDefinitionId),
 
    Function(FunctionDefinitionId),
 
}
 

	
 
impl DefinitionType {
 
    fn definition_id(&self) -> DefinitionId {
 
        match self {
 
            DefinitionType::Component(v) => v.upcast(),
 
            DefinitionType::Function(v) => v.upcast(),
 
        }
 
    }
 
}
 

	
 
pub(crate) struct ResolveQueueElement {
 
    // Note that using the `definition_id` and the `monomorph_idx` one may
 
    // query the type table for the full procedure type, thereby retrieving
 
    // the polymorphic arguments to the procedure.
 
    pub(crate) root_id: RootId,
 
    pub(crate) definition_id: DefinitionId,
 
    pub(crate) reserved_type_id: TypeId,
 
}
 

	
 
@@ -864,48 +865,51 @@ enum InferenceRule {
 
    LiteralStruct(InferenceRuleLiteralStruct),
 
    LiteralEnum,
 
    LiteralUnion(InferenceRuleLiteralUnion),
 
    LiteralArray(InferenceRuleLiteralArray),
 
    LiteralTuple(InferenceRuleLiteralTuple),
 
    CastExpr(InferenceRuleCastExpr),
 
    CallExpr(InferenceRuleCallExpr),
 
    VariableExpr(InferenceRuleVariableExpr),
 
}
 

	
 
impl InferenceRule {
 
    union_cast_method_impl!(as_mono_template, InferenceRuleTemplate, InferenceRule::MonoTemplate);
 
    union_cast_method_impl!(as_bi_equal, InferenceRuleBiEqual, InferenceRule::BiEqual);
 
    union_cast_method_impl!(as_tri_equal_args, InferenceRuleTriEqualArgs, InferenceRule::TriEqualArgs);
 
    union_cast_method_impl!(as_tri_equal_all, InferenceRuleTriEqualAll, InferenceRule::TriEqualAll);
 
    union_cast_method_impl!(as_concatenate, InferenceRuleTwoArgs, InferenceRule::Concatenate);
 
    union_cast_method_impl!(as_indexing_expr, InferenceRuleIndexingExpr, InferenceRule::IndexingExpr);
 
    union_cast_method_impl!(as_slicing_expr, InferenceRuleSlicingExpr, InferenceRule::SlicingExpr);
 
    union_cast_method_impl!(as_select_struct_field, InferenceRuleSelectStructField, InferenceRule::SelectStructField);
 
    union_cast_method_impl!(as_select_tuple_member, InferenceRuleSelectTupleMember, InferenceRule::SelectTupleMember);
 
    union_cast_method_impl!(as_literal_struct, InferenceRuleLiteralStruct, InferenceRule::LiteralStruct);
 
    union_cast_method_impl!(as_literal_union, InferenceRuleLiteralUnion, InferenceRule::LiteralUnion);
 
    union_cast_method_impl!(as_literal_array, InferenceRuleLiteralArray, InferenceRule::LiteralArray);
 
    union_cast_method_impl!(as_literal_tuple, InferenceRuleLiteralTuple, InferenceRule::LiteralTuple);
 
    union_cast_method_impl!(as_cast_expr, InferenceRuleCastExpr, InferenceRule::CastExpr);
 
    union_cast_method_impl!(as_call_expr, InferenceRuleCallExpr, InferenceRule::CallExpr);
 
    union_cast_method_impl!(as_variable_expr, InferenceRuleVariableExpr, InferenceRule::VariableExpr);
 
}
 

	
 
struct InferenceRuleTemplate {
 
    template: &'static [InferenceTypePart],
 
    application: InferenceRuleTemplateApplication,
 
}
 

	
 
impl InferenceRuleTemplate {
 
    fn new_none() -> Self {
 
        return Self{
 
            template: &[],
 
            application: InferenceRuleTemplateApplication::None,
 
        };
 
    }
 

	
 
    fn new_forced(template: &'static [InferenceTypePart]) -> Self {
 
        return Self{
 
            template,
 
            application: InferenceRuleTemplateApplication::Forced,
 
        };
 
    }
 

	
 
    fn new_template(template: &'static [InferenceTypePart]) -> Self {
 
        return Self{
 
@@ -981,152 +985,156 @@ struct InferenceRuleLiteralStruct {
 

	
 
struct InferenceRuleLiteralUnion {
 
    element_indices: Vec<InferNodeIndex>
 
}
 

	
 
struct InferenceRuleLiteralArray {
 
    element_indices: Vec<InferNodeIndex>
 
}
 

	
 
struct InferenceRuleLiteralTuple {
 
    element_indices: Vec<InferNodeIndex>
 
}
 

	
 
struct InferenceRuleCastExpr {
 
    subject_index: InferNodeIndex,
 
}
 

	
 
struct InferenceRuleCallExpr {
 
    argument_indices: Vec<InferNodeIndex>
 
}
 

	
 
/// Data associated with a variable expression: an expression that reads the
 
/// value from a variable.
 
struct InferenceRuleVariableExpr {
 
    variable_index: InferNodeIndex,
 
}
 

	
 
/// Data associated with a variable: keeping track of all the variable
 
/// expressions in which it is used.
 
struct InferenceRuleVariable {
 
    used_at_indices: Vec<InferNodeIndex>,
 
    var_data_index: VarDataIndex, // shared variable information
 
}
 

	
 
/// This particular visitor will recurse depth-first into the AST and ensures
 
/// that all expressions have the appropriate types.
 
pub(crate) struct PassTyping {
 
    // Current definition we're typechecking.
 
    reserved_type_id: TypeId,
 
    definition_type: DefinitionType,
 
    poly_vars: Vec<ConcreteType>,
 
    // Temporary variables during construction of inference rulesr
 
    parent_index: Option<InferNodeIndex>,
 
    // Buffers for iteration over various types
 
    var_buffer: ScopedBuffer<VariableId>,
 
    expr_buffer: ScopedBuffer<ExpressionId>,
 
    stmt_buffer: ScopedBuffer<StatementId>,
 
    bool_buffer: ScopedBuffer<bool>,
 
    index_buffer: ScopedBuffer<usize>,
 
    poly_progress_buffer: ScopedBuffer<u32>,
 
    temp_type_parts: Vec<InferenceTypePart>,
 
    // Mapping from parser type to inferred type. We attempt to continue to
 
    // specify these types until we're stuck or we've fully determined the type.
 
    var_types: HashMap<VariableId, VarData>,            // types of variables
 
    var_types: HashMap<VariableId, VarDataDeprecated>,            // types of variables
 
    infer_nodes: Vec<InferenceNode>,                     // will be transferred to type table at end
 
    poly_data: Vec<PolyData>,       // data for polymorph inference
 
    var_data: Vec<VarData>,
 
    // Keeping track of which expressions need to be reinferred because the
 
    // expressions they're linked to made progression on an associated type
 
    expr_queued: DequeSet<i32>,
 
}
 

	
 
/// Generic struct that is used to store inferred types associated with
 
/// polymorphic types.
 
struct PolyData {
 
    first_rule_application: bool,
 
    definition_id: DefinitionId, // the definition, only used for user feedback
 
    /// Inferred types of the polymorphic variables as they are written down
 
    /// at the type's definition.
 
    poly_vars: Vec<InferenceType>,
 
    /// Inferred types of associated types (e.g. struct fields, tuple members,
 
    /// function arguments). These types may depend on the polymorphic variables
 
    /// defined above.
 
    associated: Vec<InferenceType>,
 
    /// Inferred "returned" type (e.g. if a struct field is selected, then this
 
    /// contains the type of the selected field, for a function call it contains
 
    /// the return type). May depend on the polymorphic variables defined above.
 
    returned: InferenceType,
 
}
 

	
 
impl Default for PolyData {
 
    fn default() -> Self {
 
        Self {
 
            definition_id: DefinitionId::new_invalid(),
 
            poly_vars: Vec::new(),
 
            associated: Vec::new(),
 
            returned: InferenceType::default(),
 
        }
 
    }
 
}
 

	
 
#[derive(Clone, Copy)]
 
enum PolyDataTypeIndex {
 
    Associated(usize), // indexes into `PolyData.associated`
 
    Returned,
 
}
 

	
 
impl PolyData {
 
    fn get_type(&self, index: PolyDataTypeIndex) -> &InferenceType {
 
        match index {
 
            PolyDataTypeIndex::Associated(index) => return &self.associated[index],
 
            PolyDataTypeIndex::Returned => return &self.returned,
 
        }
 
    }
 

	
 
    fn get_type_mut(&mut self, index: PolyDataTypeIndex) -> &mut InferenceType {
 
        match index {
 
            PolyDataTypeIndex::Associated(index) => return &mut self.associated[index],
 
            PolyDataTypeIndex::Returned => return &mut self.returned,
 
        }
 
    }
 
}
 

	
 
struct VarData {
 
struct VarDataDeprecated {
 
    /// Type of the variable
 
    var_type: InferenceType,
 
    /// VariableExpressions that use the variable
 
    used_at: Vec<ExpressionId>,
 
    /// For channel statements we link to the other variable such that when one
 
    /// channel's interior type is resolved, we can also resolve the other one.
 
    linked_var: Option<VariableId>,
 
}
 

	
 
impl VarData {
 
impl VarDataDeprecated {
 
    fn new_channel(var_type: InferenceType, other_port: VariableId) -> Self {
 
        Self{ var_type, used_at: Vec::new(), linked_var: Some(other_port) }
 
    }
 
    fn new_local(var_type: InferenceType) -> Self {
 
        Self{ var_type, used_at: Vec::new(), linked_var: None }
 
    }
 
}
 

	
 
struct VarData {
 
    var_id: VariableId,
 
    var_type: InferenceType,
 
    used_at: Vec<InferNodeIndex>, // of variable expressions
 
    linked_var: Option<VarDataIndex>,
 
}
 

	
 
impl PassTyping {
 
    pub(crate) fn new() -> Self {
 
        PassTyping {
 
            reserved_type_id: TypeId::new_invalid(),
 
            definition_type: DefinitionType::Function(FunctionDefinitionId::new_invalid()),
 
            poly_vars: Vec::new(),
 
            parent_index: None,
 
            var_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_LARGE),
 
            expr_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_LARGE),
 
            stmt_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_LARGE),
 
            bool_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_SMALL),
 
            poly_progress_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_SMALL),
 
            var_types: HashMap::new(),
 
            infer_nodes: Vec::new(),
 
            poly_data: Vec::new(),
 
            expr_queued: DequeSet::new(),
 
        }
 
    }
 

	
 
    pub(crate) fn queue_module_definitions(ctx: &mut Ctx, queue: &mut ResolveQueue) {
 
        debug_assert_eq!(ctx.module().phase, ModuleCompilationPhase::ValidatedAndLinked);
 
        let root_id = ctx.module().root_id;
 
        let root = &ctx.heap.protocol_descriptions[root_id];
 
        for definition_id in &root.definitions {
 
@@ -1215,147 +1223,164 @@ impl PassTyping {
 
    fn visit_enum_definition(&mut self, _: &mut Ctx, _: EnumDefinitionId) -> VisitorResult { return Ok(()) }
 
    fn visit_struct_definition(&mut self, _: &mut Ctx, _: StructDefinitionId) -> VisitorResult { return Ok(()) }
 
    fn visit_union_definition(&mut self, _: &mut Ctx, _: UnionDefinitionId) -> VisitorResult { return Ok(()) }
 

	
 
    fn visit_component_definition(&mut self, ctx: &mut Ctx, id: ComponentDefinitionId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Component(id);
 

	
 
        let comp_def = &ctx.heap[id];
 
        debug_assert_eq!(comp_def.poly_vars.len(), self.poly_vars.len(), "component polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting component '{}': {}", comp_def.identifier.value.as_str(), id.0.index);
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        // Reserve data for expression types
 
        debug_assert!(self.infer_nodes.is_empty());
 
        self.infer_nodes.resize(comp_def.num_expressions_in_body as usize, Default::default());
 

	
 
        // Visit parameters
 
        let section = self.var_buffer.start_section_initialized(comp_def.parameters.as_slice());
 
        for param_id in section.iter_copied() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type_elements(&param.parser_type.elements, true);
 
            debug_assert!(var_type.is_done, "expected component arguments to be concrete types");
 
            self.var_types.insert(param_id, VarData::new_local(var_type));
 
            self.var_types.insert(param_id, VarDataDeprecated::new_local(var_type));
 
        }
 
        section.forget();
 

	
 
        // Visit the body and all of its expressions
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.parent_index = None;
 
        self.visit_block_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    fn visit_function_definition(&mut self, ctx: &mut Ctx, id: FunctionDefinitionId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Function(id);
 

	
 
        let func_def = &ctx.heap[id];
 
        debug_assert_eq!(func_def.poly_vars.len(), self.poly_vars.len(), "function polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting function '{}': {}", func_def.identifier.value.as_str(), id.0.index);
 
        if debug_log_enabled!() {
 
            debug_log!("Polymorphic variables:");
 
            for (_idx, poly_var) in self.poly_vars.iter().enumerate() {
 
                let mut infer_type_parts = Vec::new();
 
                Self::determine_inference_type_from_concrete_type(
 
                    &mut infer_type_parts, &poly_var.parts
 
                );
 
                let _infer_type = InferenceType::new(false, true, infer_type_parts);
 
                debug_log!(" - [{:03}] {:?}", _idx, _infer_type.display_name(&ctx.heap));
 
            }
 
        }
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        // Reserve data for expression types
 
        debug_assert!(self.infer_nodes.is_empty());
 
        self.infer_nodes.resize(func_def.num_expressions_in_body as usize, Default::default());
 

	
 
        // Visit parameters
 
        let section = self.var_buffer.start_section_initialized(func_def.parameters.as_slice());
 
        for param_id in section.iter_copied() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type_elements(&param.parser_type.elements, true);
 
            debug_assert!(var_type.is_done, "expected function arguments to be concrete types");
 
            self.var_types.insert(param_id, VarData::new_local(var_type));
 
            self.var_types.insert(param_id, VarDataDeprecated::new_local(var_type));
 
        }
 
        section.forget();
 

	
 
        // Visit all of the expressions within the body
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.parent_index = None;
 
        self.visit_block_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    // Statements
 

	
 
    fn visit_stmt(&mut self, ctx: &mut Ctx, id: StatementId) -> VisitorResult {
 
        return visitor_recursive_statement_impl!(self, &ctx.heap[id], ctx, Ok(()));
 
    }
 

	
 
    fn visit_block_stmt(&mut self, ctx: &mut Ctx, id: BlockStatementId) -> VisitorResult {
 
        // Transfer statements for traversal
 
        let block = &ctx.heap[id];
 

	
 
        let section = self.stmt_buffer.start_section_initialized(block.statements.as_slice());
 
        for stmt_id in section.iter_copied() {
 
            self.visit_stmt(ctx, stmt_id)?;
 
        }
 
        section.forget();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_stmt(&mut self, ctx: &mut Ctx, id: LocalStatementId) -> VisitorResult {
 
        return visitor_recursive_local_impl!(self, &ctx.heap[id], ctx);
 
    }
 

	
 
    fn visit_local_memory_stmt(&mut self, ctx: &mut Ctx, id: MemoryStatementId) -> VisitorResult {
 
        let memory_stmt = &ctx.heap[id];
 
        let initial_expr_id = memory_stmt.initial_expr;
 

	
 
        // Setup memory statement inference
 
        let local = &ctx.heap[memory_stmt.variable];
 
        let var_type = self.determine_inference_type_from_parser_type_elements(&local.parser_type.elements, true);
 
        self.var_types.insert(memory_stmt.variable, VarData::new_local(var_type));
 
        self.var_data.push(VarData{
 
            var_id: memory_stmt.variable,
 
            var_type,
 
            used_at: Vec::new(),
 
            linked_var: None,
 
        });
 

	
 
        // Process the initial value
 
        self.visit_assignment_expr(ctx, initial_expr_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_channel_stmt(&mut self, ctx: &mut Ctx, id: ChannelStatementId) -> VisitorResult {
 
        let channel_stmt = &ctx.heap[id];
 

	
 
        let from_var_index = self.var_data.len() as VarDataIndex;
 
        let to_var_index = from_var_index + 1;
 

	
 
        let from_local = &ctx.heap[channel_stmt.from];
 
        let from_var_type = self.determine_inference_type_from_parser_type_elements(&from_local.parser_type.elements, true);
 
        self.var_types.insert(from_local.this, VarData::new_channel(from_var_type, channel_stmt.to));
 
        self.var_data.push(VarData{
 
            var_id: channel_stmt.from,
 
            var_type: from_var_type,
 
            used_at: Vec::new(),
 
            linked_var: Some(to_var_index),
 
        });
 

	
 
        let to_local = &ctx.heap[channel_stmt.to];
 
        let to_var_type = self.determine_inference_type_from_parser_type_elements(&to_local.parser_type.elements, true);
 
        self.var_types.insert(to_local.this, VarData::new_channel(to_var_type, channel_stmt.from));
 
        self.var_data.push(VarData{
 
            var_id: channel_stmt.to,
 
            var_type: to_var_type,
 
            used_at: Vec::new(),
 
            linked_var: Some(from_var_index),
 
        });
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_labeled_stmt(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> VisitorResult {
 
        let labeled_stmt = &ctx.heap[id];
 
        let substmt_id = labeled_stmt.body;
 
        self.visit_stmt(ctx, substmt_id)
 
    }
 

	
 
    fn visit_if_stmt(&mut self, ctx: &mut Ctx, id: IfStatementId) -> VisitorResult {
 
        let if_stmt = &ctx.heap[id];
 

	
 
        let true_body_case = if_stmt.true_case;
 
        let false_body_case = if_stmt.false_case;
 
        let test_expr_id = if_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_stmt(ctx, true_body_case.body)?;
 
        if let Some(false_body_case) = false_body_case {
 
            self.visit_stmt(ctx, false_body_case.body)?;
 
        }
 

	
 
        Ok(())
 
@@ -1468,258 +1493,276 @@ impl PassTyping {
 
        let left_index = self.visit_expr(ctx, left_expr_id)?;
 
        let right_index = self.visit_expr(ctx, right_expr_id)?;
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        let argument_template = match assign_op {
 
            AO::Set =>
 
                InferenceRuleTemplate::new_none(),
 
            AO::Concatenated =>
 
                InferenceRuleTemplate::new_template(&ARRAYLIKE_TEMPLATE),
 
            AO::Multiplied | AO::Divided | AO::Added | AO::Subtracted =>
 
                InferenceRuleTemplate::new_template(&NUMBERLIKE_TEMPLATE),
 
            AO::Remained | AO::ShiftedLeft | AO::ShiftedRight |
 
            AO::BitwiseAnded | AO::BitwiseXored | AO::BitwiseOred =>
 
                InferenceRuleTemplate::new_template(&INTEGERLIKE_TEMPLATE),
 
        };
 

	
 
        node.inference_rule = InferenceRule::TriEqualArgs(InferenceRuleTriEqualArgs{
 
            argument_template,
 
            result_template: InferenceRuleTemplate::new_forced(&VOID_TEMPLATE),
 
            argument1_index: left_index,
 
            argument2_index: right_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_assignment_expr(ctx, id)
 
        self.progress_inference_rule_tri_equal_args(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_binding_expr(&mut self, ctx: &mut Ctx, id: BindingExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let binding_expr = &ctx.heap[id];
 
        let bound_to_id = binding_expr.bound_to;
 
        let bound_from_id = binding_expr.bound_from;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let arg_to_index = self.visit_expr(ctx, bound_to_id)?;
 
        let arg_from_index = self.visit_expr(ctx, bound_from_id)?;
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::TriEqualArgs(InferenceRuleTriEqualArgs{
 
            argument_template: InferenceRuleTemplate::new_none(),
 
            result_template: InferenceRuleTemplate::new_forced(&BOOL_TEMPLATE),
 
            argument1_index: arg_to_index,
 
            argument2_index: arg_from_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_binding_expr(ctx, id)
 
        self.progress_inference_rule_tri_equal_args(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let conditional_expr = &ctx.heap[id];
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        self.visit_expr(ctx, test_expr_id)?;
 
        let true_index = self.visit_expr(ctx, true_expr_id)?;
 
        let false_index = self.visit_expr(ctx, false_expr_id)?;
 

	
 
        // Note: the test to the conditional expression has already been forced
 
        // to the boolean type. So the only thing we need to do while progressing
 
        // is to apply an equal3 constraint to the arguments and the result of
 
        // the expression.
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::TriEqualAll(InferenceRuleTriEqualAll{
 
            template: InferenceRuleTemplate::new_none(),
 
            argument1_index: true_index,
 
            argument2_index: false_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_conditional_expr(ctx, id)
 
        self.progress_inference_rule_tri_equal_all(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> VisitExprResult {
 
        use BinaryOperator as BO;
 

	
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let binary_expr = &ctx.heap[id];
 
        let binary_op = binary_expr.operation;
 
        let lhs_expr_id = binary_expr.left;
 
        let rhs_expr_id = binary_expr.right;
 

	
 
        let parent_index = self.parent_index.replace(self_index);
 
        let old_parent = self.parent_index.replace(self_index);
 
        let left_index = self.visit_expr(ctx, lhs_expr_id)?;
 
        let right_index = self.visit_expr(ctx, rhs_expr_id)?;
 

	
 
        let inference_rule = match binary_op {
 
            BO::Concatenate =>
 
                InferenceRule::Concatenate(InferenceRuleTwoArgs{
 
                    argument1_index: left_index,
 
                    argument2_index: right_index,
 
                }),
 
            BO::LogicalAnd | BO::LogicalOr =>
 
                InferenceRule::TriEqualAll(InferenceRuleTriEqualAll{
 
                    template: InferenceRuleTemplate::new_forced(&BOOL_TEMPLATE),
 
                    argument1_index: left_index,
 
                    argument2_index: right_index,
 
                }),
 
            BO::BitwiseOr | BO::BitwiseXor | BO::BitwiseAnd | BO::Remainder | BO::ShiftLeft | BO::ShiftRight =>
 
                InferenceRule::TriEqualAll(InferenceRuleTriEqualAll{
 
                    template: InferenceRuleTemplate::new_template(&INTEGERLIKE_TEMPLATE),
 
                    argument1_index: left_index,
 
                    argument2_index: right_index,
 
                }),
 
            BO::Equality | BO::Inequality =>
 
                InferenceRule::TriEqualArgs(InferenceRuleTriEqualArgs{
 
                    argument_template: InferenceRuleTemplate::new_none(),
 
                    result_template: InferenceRuleTemplate::new_forced(&BOOL_TEMPLATE),
 
                    argument1_index: left_index,
 
                    argument2_index: right_index,
 
                }),
 
            BO::LessThan | BO::GreaterThan | BO::LessThanEqual | BO::GreaterThanEqual =>
 
                InferenceRule::TriEqualArgs(InferenceRuleTriEqualArgs{
 
                    argument_template: InferenceRuleTemplate::new_template(&NUMBERLIKE_TEMPLATE),
 
                    result_template: InferenceRuleTemplate::new_forced(&BOOL_TEMPLATE),
 
                    argument1_index: left_index,
 
                    argument2_index: right_index,
 
                }),
 
            BO::Add | BO::Subtract | BO::Multiply | BO::Divide =>
 
                InferenceRule::TriEqualAll(InferenceRuleTriEqualAll{
 
                    template: InferenceRuleTemplate::new_template(&NUMBERLIKE_TEMPLATE),
 
                    argument1_index: left_index,
 
                    argument2_index: right_index,
 
                }),
 
        };
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = inference_rule;
 

	
 
        self.parent_index = old_parent;
 
        self.progress_binary_expr(ctx, id)
 
        self.progress_inference_rule(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> VisitExprResult {
 
        use UnaryOperator as UO;
 

	
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let unary_expr = &ctx.heap[id];
 
        let operation = unary_expr.operation;
 
        let arg_expr_id = unary_expr.expression;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let argument_index = self.visit_expr(ctx, arg_expr_id)?;
 

	
 
        let template = match operation {
 
            UO::Positive | UO::Negative =>
 
                InferenceRuleTemplate::new_template(&NUMBERLIKE_TEMPLATE),
 
            UO::BitwiseNot =>
 
                InferenceRuleTemplate::new_template(&INTEGERLIKE_TEMPLATE),
 
            UO::LogicalNot =>
 
                InferenceRuleTemplate::new_forced(&BOOL_TEMPLATE),
 
        };
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::BiEqual(InferenceRuleBiEqual{
 
            template, argument_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_unary_expr(ctx, id)
 
        self.progress_inference_rule_bi_equal(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let indexing_expr = &ctx.heap[id];
 
        let subject_expr_id = indexing_expr.subject;
 
        let index_expr_id = indexing_expr.index;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let subject_index = self.visit_expr(ctx, subject_expr_id)?;
 
        let index_index = self.visit_expr(ctx, index_expr_id)?; // cool name, bro
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::IndexingExpr(InferenceRuleIndexingExpr{
 
            subject_index, index_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_indexing_expr(ctx, id)
 
        self.progress_inference_rule_indexing_expr(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let slicing_expr = &ctx.heap[id];
 
        let subject_expr_id = slicing_expr.subject;
 
        let from_expr_id = slicing_expr.from_index;
 
        let to_expr_id = slicing_expr.to_index;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let subject_index = self.visit_expr(ctx, subject_expr_id)?;
 
        let from_index = self.visit_expr(ctx, from_expr_id)?;
 
        let to_index = self.visit_expr(ctx, to_expr_id)?;
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::SlicingExpr(InferenceRuleSlicingExpr{
 
            subject_index, from_index, to_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_slicing_expr(ctx, id)
 
        self.progress_inference_rule_slicing_expr(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let select_expr = &ctx.heap[id];
 
        let subject_expr_id = select_expr.subject;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let subject_index = self.visit_expr(ctx, subject_expr_id)?;
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::SelectExpr(InferenceRuleSelectExpr{
 
            subject_index,
 
        });
 
        let inference_rule = match &ctx.heap[id].kind {
 
            SelectKind::StructField(field_identifier) =>
 
                InferenceRule::SelectStructField(InferenceRuleSelectStructField{
 
                    subject_index,
 
                    selected_field: field_identifier.clone(),
 
                }),
 
            SelectKind::TupleMember(member_index) =>
 
                InferenceRule::SelectTupleMember(InferenceRuleSelectTupleMember{
 
                    subject_index,
 
                    selected_index: *member_index,
 
                }),
 
        };
 
        node.inference_rule = inference_rule;
 

	
 
        self.parent_index = old_parent;
 
        self.progress_select_expr(ctx, id)
 
        self.progress_inference_rule(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 

	
 
        let literal_expr = &ctx.heap[id];
 
        match &literal_expr.value {
 
            Literal::Null => {
 
                let node = &mut self.infer_nodes[self_index];
 
                node.inference_rule = InferenceRule::MonoTemplate(InferenceRuleTemplate::new_template(&MESSAGE_TEMPLATE));
 
            },
 
            Literal::Integer(_) => {
 
                let node = &mut self.infer_nodes[self_index];
 
                node.inference_rule = InferenceRule::MonoTemplate(InferenceRuleTemplate::new_template(&INTEGERLIKE_TEMPLATE));
 
            },
 
            Literal::True | Literal::False => {
 
                let node = &mut self.infer_nodes[self_index];
 
                node.inference_rule = InferenceRule::MonoTemplate(InferenceRuleTemplate::new_forced(&BOOL_TEMPLATE));
 
            },
 
            Literal::Character(_) => {
 
                let node = &mut self.infer_nodes[self_index];
 
@@ -1739,173 +1782,206 @@ impl PassTyping {
 
                let mut expr_indices = self.index_buffer.start_section();
 
                for expr_id in expr_ids.iter_copied() {
 
                    let expr_index = self.visit_expr(ctx, expr_id)?;
 
                    expr_indices.push(expr_index);
 
                }
 
                expr_ids.forget();
 
                let element_indices = expr_indices.into_vec();
 

	
 
                // Assign rule and extra data index to inference node
 
                let extra_index = self.insert_initial_struct_polymorph_data(ctx, id);
 
                let node = &mut self.infer_nodes[self_index];
 
                node.poly_data_index = extra_index;
 
                node.inference_rule = InferenceRule::LiteralStruct(InferenceRuleLiteralStruct{
 
                    element_indices,
 
                });
 
            },
 
            Literal::Enum(_) => {
 
                // Enumerations do not carry any subexpressions, but may still
 
                // have a user-defined polymorphic marker variable. For this 
 
                // reason we may still have to apply inference to this 
 
                // polymorphic variable
 
                let extra_index = self.insert_initial_enum_polymorph_data(ctx, id);
 
                let node = &mut self.infer_nodes[self_index];
 
                node.poly_data_index = extra_index;
 
                node.inference_rule = InferenceRule::LiteralEnum(InferenceRuleLiteralEnum{
 

	
 
                });
 
                node.inference_rule = InferenceRule::LiteralEnum;
 
            },
 
            Literal::Union(literal) => {
 
                // May carry subexpressions and polymorphic arguments
 
                let expr_ids = self.expr_buffer.start_section_initialized(literal.values.as_slice());
 
                let extra_index = self.insert_initial_union_polymorph_data(ctx, id);
 

	
 
                let mut expr_indices = self.index_buffer.start_section();
 
                for expr_id in expr_ids.iter_copied() {
 
                    let expr_index = self.visit_expr(ctx, expr_id)?;
 
                    expr_indices.push(expr_index);
 
                }
 
                expr_ids.forget();
 
                let element_indices = expr_indices.into_vec();
 

	
 
                let node = &mut self.infer_nodes[self_index];
 
                node.poly_data_index = extra_index;
 
                node.inference_rule = InferenceRule::LiteralUnion(InferenceRuleLiteralUnion{
 
                    element_indices,
 
                });
 
            },
 
            Literal::Array(expressions) | Literal::Tuple(expressions) => {
 
                let expr_ids = self.expr_buffer.start_section_initialized(expressions.as_slice());
 
                let mut expr_indices = self.index_buffer.start_section();
 

	
 
                for expr_id in expr_ids.iter_copied() {
 
                    let expr_index = self.visit_expr(ctx, expr_id)?;
 
                    expr_indices.push(expr_index);
 
                }
 
                expr_ids.forget();
 
                let element_indices = expr_indices.into_vec();
 

	
 
                let node = &mut self.infer_nodes[self_index];
 
                node.poly_data_index = extra_index;
 
                node.inference_rule = InferenceRule::LiteralArray(InferenceRuleLiteralArray{
 
                    element_indices,
 
                })
 
            }
 
        }
 

	
 
        self.parent_index = old_parent;
 
        self.progress_literal_expr(ctx, id)
 
        self.progress_inference_rule(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_cast_expr(&mut self, ctx: &mut Ctx, id: CastExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let cast_expr = &ctx.heap[id];
 
        let subject_expr_id = cast_expr.subject;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let subject_index = self.visit_expr(ctx, subject_expr_id)?;
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::CastExpr(InferenceRuleCastExpr{
 
            subject_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_cast_expr(ctx, id)
 

	
 
        // The cast expression is a bit special at this point: the progression
 
        // function simply makes sure input/output types are compatible. But if
 
        // the programmer explicitly specified the output type, then we can
 
        // already perform that inference rule here.
 
        {
 
            let specified_type = self.determine_inference_type_from_parser_type_elements(&cast_expr.to_type.elements, true);
 
            let _progress = self.apply_template_constraint(ctx, self_index, &specified_type.parts)?;
 
        }
 

	
 
        self.progress_inference_rule_cast_expr(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 
        let extra_index = self.insert_initial_call_polymorph_data(ctx, id);
 

	
 
        // By default we set the polymorph idx for calls to 0. If the call
 
        // refers to a non-polymorphic function, then it will be "monomorphed"
 
        // once, hence we end up pointing to the correct instance.
 
        self.infer_nodes[self_index].field_or_monomorph_index = 0;
 

	
 
        // Visit all arguments
 
        let old_parent = self.parent_index.replace(self_index);
 

	
 
        let call_expr = &ctx.heap[id];
 
        let expr_ids = self.expr_buffer.start_section_initialized(call_expr.arguments.as_slice());
 
        let mut expr_indices = self.index_buffer.start_section();
 

	
 
        for arg_expr_id in expr_ids.iter_copied() {
 
            let expr_index = self.visit_expr(ctx, arg_expr_id)?;
 
            expr_indices.push(expr_index);
 
        }
 
        expr_ids.forget();
 
        let argument_indices = expr_indices.into_vec();
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.poly_data_index = extra_index;
 
        node.inference_rule = InferenceRule::CallExpr(InferenceRuleCallExpr{
 
            argument_indices,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_call_expr(ctx, id)
 
        self.progress_inference_rule_call_expr(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 

	
 
    fn visit_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_inference_node(ctx, upcast_id)?;
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let var_expr = &ctx.heap[id];
 
        debug_assert!(var_expr.declaration.is_some());
 
        let old_parent = self.parent_index.replace(self_index);
 

	
 
        // Not pretty: if a binding expression, then this is the first time we
 
        // encounter the variable, so we still need to insert the variable data.
 
        let declaration = &ctx.heap[var_expr.declaration.unwrap()];
 
        if !self.var_types.contains_key(&declaration.this)  {
 
            debug_assert!(declaration.kind == VariableKind::Binding);
 
        let mut var_data_index = None;
 
        for (index, var_data) in self.var_data.iter().enumerate() {
 
            if var_data.var_id == declaration.this {
 
                var_data_index = Some(index);
 
                break;
 
            }
 
        }
 

	
 
        let var_data_index = if let Some(var_data_index) = var_data_index {
 
            let var_data = &mut self.var_data[var_data_index];
 
            var_data.used_at.push(self_index);
 

	
 
            var_data_index
 
        } else {
 
            // If we're in a binding expression then it might the first time we
 
            // encounter the variable, so add a `VarData` entry.
 
            debug_assert_eq!(declaration.kind, VariableKind::Binding);
 
            let var_type = self.determine_inference_type_from_parser_type_elements(
 
                &declaration.parser_type.elements, true
 
            );
 
            self.var_types.insert(declaration.this, VarData{
 
            let var_data_index = self.var_data.len();
 
            self.var_data.push(VarData{
 
                var_id: declaration.this,
 
                var_type,
 
                used_at: vec![upcast_id],
 
                linked_var: None
 
                used_at: vec![self_index],
 
                linked_var: None,
 
            });
 
        } else {
 
            let var_data = self.var_types.get_mut(&declaration.this).unwrap();
 
            var_data.used_at.push(upcast_id);
 
        }
 

	
 
        self.progress_variable_expr(ctx, id)
 
            var_data_index
 
        };
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::VariableExpr(InferenceRuleVariableExpr{
 
            var_data_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_inference_rule_variable_expr(ctx, self_index)?;
 
        return Ok(self_index);
 
    }
 
}
 

	
 
// -----------------------------------------------------------------------------
 
// PassTyping - Type-inference progression
 
// -----------------------------------------------------------------------------
 

	
 
impl PassTyping {
 
    #[allow(dead_code)] // used when debug flag at the top of this file is true.
 
    fn debug_get_display_name(&self, ctx: &Ctx, expr_id: ExpressionId) -> String {
 
        let expr_idx = ctx.heap[expr_id].get_unique_id_in_definition();
 
        let expr_type = &self.infer_nodes[expr_idx as usize].expr_type;
 
        expr_type.display_name(&ctx.heap)
 
    }
 

	
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError> {
 
        // Keep inferring until we can no longer make any progress
 
        while !self.expr_queued.is_empty() {
 
            // Make as much progress as possible without forced integer
 
            // inference.
 
            while !self.expr_queued.is_empty() {
 
                let next_expr_idx = self.expr_queued.pop_front().unwrap();
 
                self.progress_expr(ctx, next_expr_idx)?;
 
            }
 
@@ -2079,97 +2155,151 @@ impl PassTyping {
 
        // - Write the arguments to the procedure
 
        target.arg_types.reserve(procedure_arguments.len());
 
        for argument_id in procedure_arguments {
 
            let mut concrete = ConcreteType::default();
 
            let argument_type = self.var_types.get(argument_id).unwrap();
 
            argument_type.var_type.write_concrete_type(&mut concrete);
 
            target.arg_types.push(concrete);
 
        }
 

	
 
        // - Write the expression data
 
        target.expr_data.reserve(self.infer_nodes.len());
 
        for infer_expr in self.infer_nodes.iter() {
 
            let mut concrete = ConcreteType::default();
 
            infer_expr.expr_type.write_concrete_type(&mut concrete);
 
            target.expr_data.push(MonomorphExpression{
 
                expr_type: concrete,
 
                field_or_monomorph_idx: infer_expr.field_or_monomorph_index,
 
                type_id: infer_expr.type_id,
 
            });
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_inference_rule(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        use InferenceRule as IR;
 

	
 
        let node = &self.infer_nodes[node_index];
 
        match &node.inference_rule {
 
            IR::Noop =>
 
                return Ok(()),
 
            IR::MonoTemplate(_) =>
 
                self.progress_inference_rule_mono_template(ctx, node_index),
 
            IR::BiEqual(_) =>
 
                self.progress_inference_rule_bi_equal(ctx, node_index),
 
            IR::TriEqualArgs(_) =>
 
                self.progress_inference_rule_tri_equal_args(ctx, node_index),
 
            IR::TriEqualAll(_) =>
 
                self.progress_inference_rule_tri_equal_all(ctx, node_index),
 
            IR::Concatenate(_) =>
 
                self.progress_inference_rule_concatenate(ctx, node_index),
 
            IR::IndexingExpr(_) =>
 
                self.progress_inference_rule_indexing_expr(ctx, node_index),
 
            IR::SlicingExpr(_) =>
 
                self.progress_inference_rule_slicing_expr(ctx, node_index),
 
            IR::SelectStructField(_) =>
 
                self.progress_inference_rule_select_struct_field(ctx, node_index),
 
            IR::SelectTupleMember(_) =>
 
                self.progress_inference_rule_select_tuple_member(ctx, node_index),
 
            IR::LiteralStruct(_) =>
 
                self.progress_inference_rule_literal_struct(ctx, node_index),
 
            IR::LiteralEnum =>
 
                self.progress_inference_rule_literal_enum(ctx, node_index),
 
            IR::LiteralUnion(_) =>
 
                self.progress_inference_rule_literal_union(ctx, node_index),
 
            IR::LiteralArray(_) =>
 
                self.progress_inference_rule_literal_array(ctx, node_index),
 
            IR::LiteralTuple(_) =>
 
                self.progress_inference_rule_literal_tuple(ctx, node_index),
 
            IR::CastExpr(_) =>
 
                self.progress_inference_rule_cast_expr(ctx, node_index),
 
            IR::CallExpr(_) =>
 
                self.progress_inference_rule_call_expr(ctx, node_index),
 
            IR::VariableExpr(_) =>
 
                self.progress_inference_rule_variable_expr(ctx, node_index),
 
        }
 
    }
 

	
 
    fn progress_inference_rule_mono_template(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_mono_template();
 

	
 
        let progress = self.progress_template(ctx, node_index, rule.application, rule.template)?;
 
        if progress { self.queue_node_parent(node_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_bi_equal(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_bi_equal();
 
        let arg_index = rule.argument_index;
 

	
 
        let base_progress = self.progress_template(ctx, node_index, rule.template.application, rule.template.template)?;
 
        let (node_progress, arg_progress) = self.apply_equal2_constraint(ctx, node_index, node_index, 0, arg_index, 0)?;
 

	
 
        if base_progress || node_progress { self.queue_node_parent(node_index); }
 
        if arg_progress { self.queue_node(arg_index); }
 

	
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_tri_equal_args(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_tri_equal_args();
 
        let arg1_index = rule.argument1_index;
 
        let arg2_index = rule.argument2_index;
 

	
 
        let self_template_progress = self.progress_template(ctx, node_index, rule.result_template.application, rule.result_template.template)?;
 
        let arg1_template_progress = self.progress_template(ctx, arg1_index, rule.argument_template.application, rule.argument_template.template)?;
 
        let (arg1_progress, arg2_progress) = self.apply_equal2_constraint(ctx, node_index, arg1_index, 0, arg2_index, 0)?;
 

	
 
        if self_template_progress { self.queue_node_parent(node_index); }
 
        if arg1_template_progress || arg1_progress { self.queue_node(arg1_index); }
 
        if arg2_template_progress || arg2_progress { self.queue_node(arg2_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_tri_equal_all(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_tri_equal_all();
 
        let arg1_index = rule.argument1_index;
 
        let arg2_index = rule.argument2_index;
 

	
 
        let template_progress = self.progress_template(ctx, node_index, rule.template.application, rule.template.template)?;
 
        let (node_progress, arg1_progress, arg2_progress) =
 
            self.apply_equal3_constraint(ctx, node_index, arg1_index, arg2_index, 0)?;
 

	
 
        if template_progress || node_progress { self.queue_node_parent(node_index); }
 
        if arg1_progress { self.queue_node(arg1_index); }
 
        if arg2_progress { self.queue_node(arg2_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_concatenate(&mut self, ctx: &mut Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
    fn progress_inference_rule_concatenate(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_concatenate();
 
        let arg1_index = rule.argument1_index;
 
        let arg2_index = rule.argument2_index;
 

	
 
        // Two cases: one of the arguments is a string (then all must be), or
 
        // one of the arguments is an array (and all must be arrays).
 
        let (expr_is_str, expr_is_not_str) = self.type_is_certainly_or_certainly_not_string(node_index);
 
        let (arg1_is_str, arg1_is_not_str) = self.type_is_certainly_or_certainly_not_string(arg1_index);
 
        let (arg2_is_str, arg2_is_not_str) = self.type_is_certainly_or_certainly_not_string(arg2_index);
 

	
 
        let someone_is_str = expr_is_str || arg1_is_str || arg2_is_str;
 
        let someone_is_not_str = expr_is_not_str || arg1_is_not_str || arg2_is_not_str;
 

	
 
        // Note: this statement is an expression returning the progression bools
 
        let (node_progress, arg1_progress, arg2_progress) = if someone_is_str {
 
            // One of the arguments is a string, then all must be strings
 
            self.apply_equal3_constraint(ctx, node_index, arg1_index, arg2_index, 0)?
 
        } else {
 
            let progress_expr = if someone_is_not_str {
 
                // Output must be a normal array
 
                self.apply_template_constraint(ctx, node_index, &ARRAY_TEMPLATE)?
 
            } else {
 
                // Output may still be anything
 
@@ -2249,49 +2379,49 @@ impl PassTyping {
 
            (
 
                node_template_progress || node_progress,
 
                subject_progress
 
            )
 
        } else {
 
            // Not sure yet
 
            let node_template_progress = self.apply_template_constraint(ctx, node_index, &ARRAYLIKE_TEMPLATE)?;
 
            let (node_progress, subject_progress) =
 
                self.apply_equal2_constraint(ctx, node_index, node_index, 1, subject_index, 1)?;
 

	
 
            (
 
                node_template_progress || node_progress,
 
                subject_progress
 
            )
 
        };
 

	
 
        if node_progress { self.queue_node_parent(node_index); }
 
        if subject_template_progress || subject_progress { self.queue_node(subject_index); }
 
        if from_template_progress || from_index_progress { self.queue_node(from_index_index); }
 
        if to_template_progress || to_index_progress { self.queue_node(to_index_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_select_struct_field(&mut self, ctx: &mut Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
    fn progress_inference_rule_select_struct_field(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_select_struct_field();
 

	
 
        let subject_index = rule.subject_index;
 

	
 
        fn get_definition_id_from_inference_type(inference_type: &InferenceType) -> Result<Option<DefinitionId>, ()> {
 
            for part in inference_type.parts.iter() {
 
                if part.is_marker() { continue; }
 
                if !part.is_concrete() { break; }
 

	
 
                if let InferenceTypePart::Instance(definition_id, _) = part {
 
                    return Ok(Some(*definition_id));
 
                } else {
 
                    return Err(())
 
                }
 
            }
 

	
 
            // Nothing is known yet
 
            return Ok(None);
 
        }
 

	
 
        if node.field_or_monomorph_index < 0 {
 
            // Don't know the subject definition, hence the field yet. Try to
 
            // determine it.
 
@@ -2362,73 +2492,58 @@ impl PassTyping {
 
        let subject_expr_id = self.infer_nodes[subject_index].expr_id;
 
        let mut poly_progress_section = self.poly_progress_buffer.start_section();
 

	
 
        let (_, progress_subject_1) = self.apply_polydata_equal2_constraint(
 
            ctx, node_index, subject_expr_id, "selected struct's",
 
            PolyDataTypeIndex::Associated(0), 0, subject_index, 0, &mut poly_progress_section
 
        )?;
 
        let (_, progress_field_1) = self.apply_polydata_equal2_constraint(
 
            ctx, node_index, field_expr_id, "selected field's",
 
            PolyDataTypeIndex::Returned, 0, node_index, 0, &mut poly_progress_section
 
        )?;
 

	
 
        // Maybe make progress on types due to inferred polymorphic variables
 
        let progress_subject_2 = self.apply_polydata_polyvar_constraint(
 
            ctx, node_index, PolyDataTypeIndex::Associated(0), subject_index, &poly_progress_section
 
        );
 
        let progress_field_2 = self.apply_polydata_polyvar_constraint(
 
            ctx, node_index, PolyDataTypeIndex::Returned, node_index, &poly_progress_section
 
        );
 

	
 
        if progress_subject_1 || progress_subject_2 { self.queue_node(subject_index); }
 
        if progress_field_1 || progress_field_2 { self.queue_node_parent(node_index); }
 

	
 
        poly_progress_section.forget();
 

	
 
        self.finish_polydata_constraint(node_index);
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_select_tuple_member(&mut self, ctx: &mut Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
    fn progress_inference_rule_select_tuple_member(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_select_tuple_member();
 
        let subject_index = rule.subject_index;
 
        let tuple_member_index = rule.selected_index;
 

	
 
        fn get_tuple_size_from_inference_type(inference_type: &InferenceType) -> Result<Option<u32>, ()> {
 
            for part in &inference_type.parts {
 
                if part.is_marker() { continue; }
 
                if !part.is_concrete() { break; }
 

	
 
                if let InferenceTypePart::Tuple(size) = part {
 
                    return Ok(Some(*size));
 
                } else {
 
                    return Err(()); // not a tuple!
 
                }
 
            }
 

	
 
            return Ok(None);
 
        }
 

	
 
        if node.field_or_monomorph_index < 0 {
 
            let subject_type = &self.infer_nodes[subject_index].expr_type;
 
            let tuple_size = get_tuple_size_from_inference_type(subject_type);
 
            let tuple_size = match tuple_size {
 
                Ok(Some(tuple_size)) => {
 
                    tuple_size
 
                },
 
                Ok(None) => {
 
                    // We can't infer anything yet
 
                    return Ok(())
 
                },
 
                Err(()) => {
 
                    let select_expr_span = ctx.heap[node.expr_id].full_span();
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module().source, select_expr_span, format!(
 
                            "tuple element select cannot be applied to a subject of type '{}'",
 
                            subject_type.display_name(&ctx.heap)
 
                        )
 
                    ));
 
                }
 
            };
 

	
 
            // If here then we at least have the tuple size. Now check if the
 
            // index doesn't exceed that size.
 
@@ -2491,164 +2606,409 @@ impl PassTyping {
 
            ctx, node_index, node.expr_id, "struct literal's",
 
            PolyDataTypeIndex::Returned, 0, node_index, 0, &mut poly_progress_section
 
        )?;
 

	
 
        // And the other way around: if any of our polymorphic variables are
 
        // more specific then they were before, then we forward that information
 
        // back to our struct/fields.
 
        for (field_index, field_node_index) in rule.element_indices.iter().copied().enumerate() {
 
            let progress_field = self.apply_polydata_polyvar_constraint(
 
                ctx, node_index, PolyDataTypeIndex::Associated(field_index),
 
                field_node_index, &poly_progress_section
 
            );
 

	
 
            if progress_field { self.queue_node(field_node_index); }
 
        }
 

	
 
        let progress_literal_2 = self.apply_polydata_polyvar_constraint(
 
            ctx, node_index, PolyDataTypeIndex::Returned,
 
            node_index, &poly_progress_section
 
        );
 

	
 
        if progress_literal_1 || progress_literal_2 { self.queue_node_parent(node_index); }
 

	
 
        poly_progress_section.forget();
 

	
 
        self.finish_polydata_constraint(node_index);
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_literal_enum(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_literal_enum();
 

	
 
        let mut poly_progress_section = self.poly_progress_buffer.start_section();
 

	
 
        // An enum literal type is simply, well, the enum's type. However, it
 
        // might still have polymorphic variables, hence the use of `PolyData`.
 
        let (_, progress_literal_1) = self.apply_polydata_equal2_constraint(
 
            ctx, node_index, node.expr_id, "enum literal's",
 
            PolyDataTypeIndex::Returned, 0, node_index, 0, &mut poly_progress_section
 
        )?;
 

	
 
        let progress_literal_2 = self.apply_polydata_polyvar_constraint(
 
            ctx, node_index, PolyDataTypeIndex::Returned, node_index, &poly_progress_section
 
        );
 

	
 
        if progress_literal_1 || progress_literal_2 { self.queue_node_parent(node_index); }
 

	
 
        poly_progress_section.forget();
 
        self.finish_polydata_constraint(node_index);
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_literal_union(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_literal_union();
 

	
 
        // Infer type of any embedded values in the union variant. At the same
 
        // time progress the polymorphic variables associated with the union.
 
        let mut poly_progress_section = self.poly_progress_buffer.start_section();
 

	
 
        for (embedded_index, embedded_node_index) in rule.element_indices.iter().copied().enumerate() {
 
            let embedded_node_expr_id = self.infer_nodes[embedded_node_index].expr_id;
 
            let (_, progress_embedded) = self.apply_polydata_equal2_constraint(
 
                ctx, node_index, embedded_node_expr_id, "embedded value's",
 
                PolyDataTypeIndex::Associated(embedded_index), 0,
 
                embedded_node_index, 0, &mut poly_progress_section
 
            )?;
 

	
 
            if progress_embedded { self.queue_node(embedded_node_index); }
 
        }
 

	
 
        let (_, progress_literal_1) = self.apply_polydata_equal2_constraint(
 
            ctx, node_index, node.expr_id, "union's",
 
            PolyDataTypeIndex::Returned, 0, node_index, 0, &mut poly_progress_section
 
        )?;
 

	
 
        // Propagate progress in the polymorphic variables to the expressions
 
        // that constitute the union literal.
 
        for (embedded_index, embedded_node_index) in rule.element_indices.iter().copied().enumerate() {
 
            let progress_embedded = self.apply_polydata_polyvar_constraint(
 
                ctx, node_index, PolyDataTypeIndex::Associated(embedded_index),
 
                embedded_node_index, &poly_progress_section
 
            );
 

	
 
            if progress_embedded { self.queue_node(embedded_node_index); }
 
        }
 

	
 
        let progress_literal_2 = self.apply_polydata_polyvar_constraint(
 
            ctx, node_index, PolyDataTypeIndex::Returned, node_index, &poly_progress_section
 
        );
 

	
 
        if progress_literal_1 || progress_literal_2 { self.queue_node_parent(node_index); }
 

	
 
        poly_progress_section.forget();
 
        self.finish_polydata_constraint(node_index);
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_literal_array(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_literal_array();
 

	
 
        // Apply equality rule to all of the elements that form the array
 
        let argument_node_indices = self.index_buffer.start_section_initialized(&rule.element_indices);
 
        let mut argument_progress_section = self.bool_buffer.start_section();
 
        self.apply_equal_n_constraint(ctx, node_index, &argument_node_indices, &mut argument_progress_section)?;
 

	
 
        debug_assert_eq!(argument_node_indices.len(), argument_progress_section.len());
 
        for argument_index in 0..argument_node_indices.len() {
 
            let argument_node_index = argument_node_indices[argument_index];
 
            let progress = argument_progress_section[argument_index];
 

	
 
            if progress { self.queue_node(argument_node_index); }
 
        }
 

	
 
        // If elements are of type `T`, then the array is of type `Array<T>`, so:
 
        let mut progress_literal = self.apply_template_constraint(ctx, node_index, &ARRAY_TEMPLATE)?;
 
        if argument_node_indices.len() != 0 {
 
            let argument_node_index = argument_node_indices[0];
 
            let (progress_literal_inner, progress_argument) = self.apply_equal2_constraint(
 
                ctx, node_index, node_index, 1, argument_node_index, 0
 
            )?;
 

	
 
            progress_literal = progress_literal || progress_literal_inner;
 

	
 
            // It is possible that the `Array<T>` has a more progress `T` then
 
            // the arguments. So in the case we progress our argument type we
 
            // simply queue this rule again
 
            if progress_argument { self.queue_expr(node_index); }
 
        }
 

	
 
        argument_node_indices.forget();
 
        argument_progress_section.forget();
 

	
 
        if progress_literal { self.queue_node_parent(node_index); }
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_literal_tuple(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_literal_tuple();
 
        
 

	
 
        let element_indices = self.index_buffer.start_section_initialized(&rule.element_indices);
 

	
 
        // Check if we need to apply the initial tuple template type. Note that
 
        // this is a hacky check.
 
        let num_tuple_elements = rule.element_indices.len();
 
        let mut template_type = Vec::with_capacity(num_tuple_elements + 1); // TODO: @performance
 
        template_type.push(InferenceTypePart::Tuple(num_tuple_elements as u32));
 
        for _ in 0..num_tuple_elements {
 
            template_type.push(InferenceTypePart::Unknown);
 
        }
 

	
 
        let mut progress_literal = self.apply_template_constraint(ctx, node_index, &template_type)?;
 

	
 
        // Because of the (early returning error) check above, we're certain
 
        // that the tuple has the correct number of elements. Now match each
 
        // element expression type to the tuple subtype.
 
        let mut element_subtree_start_index = 1; // first element is InferenceTypePart::Tuple
 
        for element_node_index in element_indices.iter_copied() {
 
            let (progress_literal_element, progress_element) = self.apply_equal2_constraint(
 
                ctx, node_index, node_index, element_subtree_start_index, element_node_index, 0
 
            )?;
 

	
 
            progress_literal = progress_literal || progress_literal_element;
 
            if progress_element {
 
                self.queue_node(element_node_index);
 
            }
 

	
 
            // Prepare for next element
 
            let subtree_end_index = InferenceType::find_subtree_end_idx(&node.expr_type.parts, element_subtree_start_index);
 
            element_subtree_start_index = subtree_end_index;
 
        }
 
        debug_assert_eq!(element_subtree_end_index, node.expr_type.parts.len());
 

	
 
        if progress_literal { self.queue_node_parent(node_index); }
 

	
 
        element_indices.forget();
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_cast_expr(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_cast_expr();
 
        let subject_index = rule.subject_index;
 
        let subject = &self.infer_nodes[subject_index];
 

	
 
        // Make sure that both types are completely done. Note: a cast
 
        // expression cannot really infer anything between the subject and the
 
        // output type, we can only make sure that, at the end, the cast is
 
        // correct.
 
        if !node.expr_type.is_done || !subject.expr_type.is_done {
 
            return Ok(());
 
        }
 

	
 
        // Both types are known, currently the only valid casts are bool,
 
        // integer and character casts.
 
        fn is_bool_int_or_char(parts: &[InferenceTypePart]) -> bool {
 
            let mut index = 0;
 
            while index < parts.len() {
 
                let part = &parts[index];
 
                if !part.is_marker() { break; }
 
                index += 1;
 
            }
 

	
 
            debug_assert!(index != parts.len());
 
            let part = &parts[index];
 
            if (
 
                *part == InferenceTypePart::Bool ||
 
                *part == InferenceTypePart::Character ||
 
                part.is_concrete_integer()
 
            ) {
 
                debug_assert!(index + 1 == parts.len()); // type is done, first part does not have children -> must be at end
 
                return true;
 
            } else {
 
                return false;
 
            }
 
        }
 

	
 
        let is_valid = if is_bool_int_or_char(&node.expr_type.parts) && is_bool_int_or_char(&subject.expr_type.parts) {
 
            true
 
        } else if InferenceType::check_subtrees(&node.expr_type.parts, 0, &subject.expr_type.parts, 0) {
 
            // again: check_subtrees is sufficient since both types are done
 
            true
 
        } else {
 
            false
 
        };
 

	
 
        if !is_valid {
 
            let cast_expr = &ctx.heap[node.expr_id];
 
            let subject_expr = &ctx.heap[subject.expr_id];
 
            return Err(ParseError::new_error_str_at_span(
 
                &ctx.module().source, cast_expr.full_span(), "invalid casting operation"
 
            ).with_info_at_span(
 
                &ctx.module.source, subject_expr.full_span(), format!(
 
                    "cannot cast the argument type '{}' to the type '{}'",
 
                    subject.expr_type.display_name(&ctx.heap),
 
                    node.expr_type.display_name(&ctx.heap)
 
                )
 
            ));
 
        }
 

	
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_call_expr(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_call_expr();
 

	
 
        let mut poly_progress_section = self.poly_progress_buffer.start_section();
 
        let argument_node_indices = self.index_buffer.start_section_initialized(&rule.argument_indices);
 

	
 
        // Perform inference on arguments to function, while trying to figure
 
        // out the polymorphic variables
 
        for (argument_index, argument_node_index) in argument_node_indices.iter_copied().enumerate() {
 
            let argument_expr_id = self.infer_nodes[argument_node_index].expr_id;
 
            let (_, progress_argument) = self.apply_polydata_equal2_constraint(
 
                ctx, node_index, argument_expr_id, "argument's",
 
                PolyDataTypeIndex::Associated(argument_index), 0,
 
                argument_node_index, 0, &mut poly_progress_section
 
            )?;
 

	
 
            if progress_argument { self.queue_node(argument_node_index); }
 
        }
 

	
 
        // Same for the return type.
 
        let call_expr_id = node.expr_id;
 
        let (_, progress_call_1) = self.apply_polydata_equal2_constraint(
 
            ctx, node_index, call_expr_id, "return",
 
            PolyDataTypeIndex::Returned, 0,
 
            node_index, 0, &mut poly_progress_section
 
        )?;
 

	
 
        // We will now apply any progression in the polymorphic variable type
 
        // back to the arguments.
 
        for (argument_index, argument_node_index) in argument_node_indices.iter_copied().enumerate() {
 
            let progress_argument = self.apply_polydata_polyvar_constraint(
 
                ctx, node_index, PolyDataTypeIndex::Associated(argument_index),
 
                argument_node_index, &poly_progress_section
 
            );
 

	
 
            if progress_argument { self.queue_node(argument_node_index); }
 
        }
 

	
 
        // And back to the return type.
 
        let progress_call_2 = self.apply_polydata_polyvar_constraint(
 
            ctx, node_index, PolyDataTypeIndex::Returned,
 
            node_index, &poly_progress_section
 
        );
 

	
 
        if progress_call_1 || progress_call_2 { self.queue_node_parent(node_index); }
 

	
 
        poly_progress_section.forget();
 
        argument_node_indices.forget();
 

	
 
        self.finish_polydata_constraint(node_index);
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_variable_expr(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &mut self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_variable_expr();
 
        let var_data_index = rule.var_data_index;
 
        let var_data = &mut self.var_data[var_data_index];
 

	
 
        // Apply inference to the shared variable type and the expression type
 
        let shared_type: *mut _ = &mut var_data.var_type;
 
        let expr_type: *mut _ = &mut node.expr_type;
 

	
 
        let inference_result = unsafe {
 
            // safety: vectors exist in different storage vectors, so cannot alias
 
            InferenceType::infer_subtrees_for_both_types(shared_type, 0, expr_type, 0)
 
        };
 

	
 
        if inference_result == DualInferenceResult::Incompatible {
 
            return Err(self.construct_variable_type_error(ctx, node_index));
 
        }
 

	
 
        let progress_var_data = inference_result.modified_lhs();
 
        let progress_expr = inference_result.modified_rhs();
 

	
 
        if progress_var_data {
 
            // We progressed the type of the shared variable, so propagate this
 
            // to all associated variable expressions (and relatived variables).
 
            for other_node_index in var_data.used_at.iter().copied() {
 
                if other_node_index != node_index {
 
                    self.queue_node(other_node_index);
 
                }
 
            }
 

	
 
            if let Some(linked_var_data_index) = var_data.linked_var {
 
                // Only perform one-way inference, progressing the linked
 
                // variable.
 
                // note: because this "linking" is used only for channels, we
 
                // will start inference one level below the top-level in the
 
                // type tree (i.e. ensure `T` in `in<T>` and `out<T>` is equal).
 
                debug_assert!(
 
                    var_data.var_type.parts[0] == InferenceTypePart::Input ||
 
                    var_data.var_type.parts[0] == InferenceTypePart::Output
 
                );
 
                let this_var_type: *const _ = &var_data.var_type;
 
                let linked_var_data = &mut self.var_data[linked_var_data_index];
 
                debug_assert!(
 
                    linked_var_data.var_type.parts[0] == InferenceTypePart::Input ||
 
                    linked_var_data.var_type.parts[0] == InferenceTypePart::Output
 
                );
 

	
 
                // safety: by construction var_data_index and linked_var_data_index cannot be the
 
                // same, hence we're not aliasing here.
 
                let inference_result = InferenceType::infer_subtree_for_single_type(
 
                    &mut linked_var_data.var_type, 1,
 
                    unsafe{ &(*this_var_type).parts }, 1, false
 
                );
 
                match inference_result {
 
                    SingleInferenceResult::Modified => {
 
                        for used_at in linked_var_data.used_at.iter().copied() {
 
                            self.queue_node(used_at);
 
                        }
 
                    },
 
                    SingleInferenceResult::Unmodified => {},
 
                    SingleInferenceResult::Incompatible => {
 
                        let var_data_this = &self.var_data[var_data_index];
 
                        let var_decl_this = &ctx.heap[var_data_this.var_id];
 
                        let var_data_linked = &self.var_data[linked_var_data_index];
 
                        let var_decl_linked = &ctx.heap[var_data_linked.var_id];
 

	
 
                        return Err(ParseError::new_error_at_span(
 
                            &ctx.module().source, var_decl_this.identifier.span, format!(
 
                                "conflicting types for this channel, this port has type '{}'",
 
                                var_data_this.var_type.display_name(&ctx.heap)
 
                            )
 
                        ).with_info_at_span(
 
                            &ctx.module().source, var_decl_linked.identifier.span, format!(
 
                                "while this port has type '{}'",
 
                                var_data_linked.var_type.display_name(&ctx.heap)
 
                            )
 
                        ));
 
                    }
 
                }
 
            }
 
        }
 

	
 
        if progress_expr { self.queue_node_parent(node_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_template(&mut self, ctx: &Ctx, node_index: InferNodeIndex, application: InferenceRuleTemplateApplication, template: &[InferenceTypePart]) -> Result<bool, ParseError> {
 
        use InferenceRuleTemplateApplication as TA;
 

	
 
        match application {
 
            TA::None => Ok(false),
 
            TA::Template => self.apply_template_constraint(ctx, node_index, template),
 
            TA::Forced => self.apply_forced_constraint(ctx, node_index, template),
 
        }
 
    }
 

	
 
    fn progress_expr(&mut self, ctx: &mut Ctx, idx: i32) -> Result<(), ParseError> {
 
        let id = self.infer_nodes[idx as usize].expr_id;
 
        match &ctx.heap[id] {
 
            Expression::Assignment(expr) => {
 
                let id = expr.this;
 
                self.progress_assignment_expr(ctx, id)
 
            },
 
            Expression::Binding(expr) => {
 
                let id = expr.this;
 
                self.progress_binding_expr(ctx, id)
 
            },
 
            Expression::Conditional(expr) => {
 
@@ -3990,49 +4350,49 @@ impl PassTyping {
 
    /// The "parent" `expr_id` is provided to construct errors.
 
    fn apply_equal2_constraint(
 
        &mut self, ctx: &Ctx, node_index: InferNodeIndex,
 
        arg1_index: InferNodeIndex, arg1_start_idx: usize,
 
        arg2_index: InferNodeIndex, arg2_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError> {
 
        let arg1_type: *mut _ = &mut self.infer_nodes[arg1_index].expr_type;
 
        let arg2_type: *mut _ = &mut self.infer_nodes[arg2_index].expr_type;
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            arg1_type, arg1_start_idx,
 
            arg2_type, arg2_start_idx
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, node_index, arg1_index, arg2_index));
 
        }
 

	
 
        Ok((infer_res.modified_lhs(), infer_res.modified_rhs()))
 
    }
 

	
 
    /// Applies an equal2 constraint between a member of the `PolyData` struct,
 
    /// and another inferred type. If any progress is made in the `PolyData`
 
    /// struct then the affected polymorphic variables are updated as well.
 
    ///
 
    /// Because a lot of types/expressions are involved in polymorphic type
 
    /// Because a lot of types/expressions are involved in polymorphic typFe
 
    /// inference, some explanation: "outer_node" refers to the main expression
 
    /// that is the root cause of type inference (e.g. a struct literal
 
    /// expression, or a tuple member select expression). Associated with that
 
    /// outer node is `PolyData`, so that is what the "poly_data" variables
 
    /// are referring to. We are applying equality between a "poly_data" type
 
    /// and an associated expression (not necessarily the "outer_node", e.g.
 
    /// the expression that constructs the value of a struct field). Hence the
 
    /// "associated" variables.
 
    ///
 
    /// Finally, when an error occurs we'll first show the outer node's
 
    /// location. As info, the `error_location_expr_id` span is shown,
 
    /// indicating that the "`error_type_name` type has been resolved to
 
    /// `outer_node_type`, but this expression has been resolved to
 
    /// `associated_node_type`".
 
    fn apply_polydata_equal2_constraint(
 
        &mut self, ctx: &Ctx,
 
        outer_node_index: InferNodeIndex, error_location_expr_id: ExpressionId, error_type_name: &str,
 
        poly_data_type_index: PolyDataTypeIndex, poly_data_start_index: usize,
 
        associated_node_index: InferNodeIndex, associated_node_start_index: usize,
 
        poly_progress_section: &mut ScopedSection<u32>,
 
    ) -> Result<(bool, bool), ParseError> {
 
        let poly_data_index = self.infer_nodes[outer_node_index].poly_data_index;
 
        let poly_data_type = self.poly_data[poly_data_index].get_type_mut(poly_data_type_index);
 
        let associated_type: *mut _ = &mut self.infer_nodes[associated_node_index].expr_type;
 
@@ -4103,93 +4463,108 @@ impl PassTyping {
 
    ///
 
    /// At this point one should call this function to apply the progress in
 
    /// these polymorphic variables back onto the types that are functions of
 
    /// these polymorphic variables.
 
    ///
 
    /// An example: a struct literal with a polymorphic variable `T` may have
 
    /// two fields `foo` and `bar` each with different types that are a function
 
    /// of the polymorhic variable `T`. If the expressions constructing the
 
    /// value for the field `foo` causes the type `T` to progress, then we can
 
    /// also progress the type of the expression that constructs `bar`.
 
    ///
 
    /// And so we have `outer_node_index` + `poly_data_type_index` pointing to
 
    /// the appropriate type in the `PolyData` struct. Which will be updated
 
    /// first using the polymorphic variables. If we happen to have updated that
 
    /// type, then we should also progress the associated expression, hence the
 
    /// `associated_node_index`.
 
    fn apply_polydata_polyvar_constraint(
 
        &mut self, ctx: &Ctx,
 
        outer_node_index: InferNodeIndex, poly_data_type_index: PolyDataTypeIndex,
 
        associated_node_index: InferNodeIndex, poly_progress_section: &ScopedSection<u32>
 
    ) -> bool {
 
        let poly_data_index = self.infer_nodes[outer_node_index].poly_data_index;
 
        let poly_data = &mut self.poly_data[poly_data_index];
 

	
 
        // Early exit, most common case (literals or functions calls which are
 
        // actually not polymorphic)
 
        if !poly_data.first_rule_application && poly_progress_section.len() == 0 {
 
            return false;
 
        }
 

	
 
        // safety: we're borrowing from two distinct fields, so should be fine
 
        let poly_data_type = poly_data.get_type_mut(poly_data_type_index);
 
        let mut last_start_index = 0;
 
        let mut modified_poly_type = false;
 

	
 
        while let Some((poly_var_index, poly_var_start_index)) = poly_data_type.find_marker(last_start_index) {
 
            let poly_var_end_index = InferenceType::find_subtree_end_idx(&poly_data_type.parts, poly_var_start_index);
 
            if poly_progress_section.contains(&poly_var_index) {
 

	
 
            if poly_data.first_rule_application || poly_progress_section.contains(&poly_var_index) {
 
                // We have updated this polymorphic variable, so try updating it
 
                // in the PolyData type
 
                let modified_in_poly_data = match InferenceType::infer_subtree_for_single_type(
 
                    poly_data_type, poly_var_start_index, &poly_data.poly_vars[poly_var_index as usize].parts, 0, false
 
                ) {
 
                    SingleInferenceResult::Modified => true,
 
                    SingleInferenceResult::Unmodified => false,
 
                    SingleInferenceResult::Incompatible => {
 
                        // practically impossible: before calling this function we gather all the
 
                        // data on the polymorphic variables from the associated expressions. So if
 
                        // the polymorphic variables in those expressions were not mutually
 
                        // compatible, we must have encountered that error already.
 
                        unreachable!()
 
                    },
 
                };
 

	
 
                modified_poly_type = modified_poly_type || modified_in_poly_data;
 
            }
 

	
 
            last_start_index = poly_var_end_index;
 
        }
 

	
 
        if modified_poly_type {
 
            let associated_type = &mut self.infer_nodes[associated_node_index].expr_type;
 
            match InferenceType::infer_subtree_for_single_type(
 
                associated_type, 0, &poly_data_type.parts, 0, true
 
            ) {
 
                SingleInferenceResult::Modified => return true,
 
                SingleInferenceResult::Unmodified => return false,
 
                SingleInferenceResult::Incompatible => unreachable!(), // same as above
 
            }
 
        } else {
 
            // Did not update associated type
 
            return false;
 
        }
 
    }
 

	
 
    /// Should be called after completing one full round of applying polydata
 
    /// constraints.
 
    fn finish_polydata_constraint(&mut self, outer_node_index: InferNodeIndex) {
 
        let poly_data_index = self.infer_nodes[outer_node_index].poly_data_index;
 
        let poly_data = &mut self.poly_data[poly_data_index];
 
        poly_data.first_rule_application = false;
 
    }
 

	
 
    /// Applies an equal2 constraint between a signature type (e.g. a function
 
    /// argument or struct field) and an expression whose type should match that
 
    /// expression. If we make progress on the signature, then we try to see if
 
    /// any of the embedded polymorphic types can be progressed.
 
    ///
 
    /// `outer_expr_id` is the main expression we're progressing (e.g. a 
 
    /// function call), while `expr_id` is the embedded expression we're 
 
    /// matching against the signature. `expression_type` and 
 
    /// `expression_start_idx` belong to `expr_id`.
 
    fn apply_equal2_signature_constraint(
 
        ctx: &Ctx, outer_expr_id: ExpressionId, expr_id: Option<ExpressionId>,
 
        polymorph_data: &mut PolyData, polymorph_progress: &mut HashSet<u32>,
 
        signature_type: *mut InferenceType, signature_start_idx: usize,
 
        expression_type: *mut InferenceType, expression_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError> {
 
        // Safety: all pointers distinct
 

	
 
        // Infer the signature and expression type
 
        let infer_res = unsafe { 
 
            InferenceType::infer_subtrees_for_both_types(
 
                signature_type, signature_start_idx,
 
                expression_type, expression_start_idx
 
            )
 
        };
 
@@ -4939,48 +5314,71 @@ impl PassTyping {
 
        ).with_info_at_span(
 
            &ctx.module().source, arg2.full_span(), format!(
 
                "But this expression has type '{}'",
 
                arg2_node.expr_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_template_type_error(
 
        &self, ctx: &Ctx, node_index: InferNodeIndex, template: &[InferenceTypePart]
 
    ) -> ParseError {
 
        let node = &self.infer_nodes[node_index];
 
        let expr = &ctx.heap[node.expr_id];
 
        let expr_type = &node.expr_type;
 

	
 
        return ParseError::new_error_at_span(
 
            &ctx.module().source, expr.full_span(), format!(
 
                "incompatible types: got a '{}' but expected a '{}'",
 
                expr_type.display_name(&ctx.heap), 
 
                InferenceType::partial_display_name(&ctx.heap, template)
 
            )
 
        )
 
    }
 

	
 
    fn construct_variable_type_error(
 
        &self, ctx: &Ctx, node_index: InferNodeIndex,
 
    ) -> ParseError {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_variable_expr();
 

	
 
        let var_data = &self.var_data[rule.var_data_index];
 
        let var_decl = &ctx.heap[var_data.var_id];
 
        let var_expr = &ctx.heap[node.expr_id];
 

	
 
        return ParseError::new_error_at_span(
 
            &ctx.module().source, var_decl.identifier.span, format!(
 
                "conflicting types for this variable, previously assigned the type '{}'",
 
                var_data.var_type.display_name(&ctx.heap)
 
            )
 
        ).with_info_at_span(
 
            &ctx.module().source, var_expr.full_span(), format!(
 
                "but inferred to have incompatible type '{}' here",
 
                node.expr_type.display_name(&ctx.heap)
 
            )
 
        );
 
    }
 

	
 
    /// Constructs a human interpretable error in the case that type inference
 
    /// on a polymorphic variable to a function call or literal construction 
 
    /// failed. This may only be caused by a pair of inference types (which may 
 
    /// come from arguments or the return type) having two different inferred 
 
    /// values for that polymorphic variable.
 
    ///
 
    /// So we find this pair and construct the error using it.
 
    ///
 
    /// We assume that the expression is a function call or a struct literal,
 
    /// and that an actual error has occurred.
 
    fn construct_poly_arg_error(
 
        ctx: &Ctx, poly_data: &PolyData, expr_id: ExpressionId
 
    ) -> ParseError {
 
        // Helper function to check for polymorph mismatch between two inference
 
        // types.
 
        fn has_poly_mismatch<'a>(type_a: &'a InferenceType, type_b: &'a InferenceType) -> Option<(u32, &'a [InferenceTypePart], &'a [InferenceTypePart])> {
 
            if !type_a.has_marker || !type_b.has_marker {
 
                return None
 
            }
 

	
 
            for (marker_a, section_a) in type_a.marker_iter() {
 
                for (marker_b, section_b) in type_b.marker_iter() {
 
                    if marker_a != marker_b {
 
                        // Not the same polymorphic variable
 
@@ -5173,48 +5571,63 @@ impl PassTyping {
 
                            "The polymorphic variable has type '{}' (which might have been partially inferred) while the argument inferred it to '{}'",
 
                            InferenceType::partial_display_name(&ctx.heap, poly_section),
 
                            InferenceType::partial_display_name(&ctx.heap, arg_section)
 
                        )
 
                    );
 
            }
 
        }
 

	
 
        if let Some((poly_idx, poly_section, ret_section)) = has_explicit_poly_mismatch(&poly_data.poly_vars, &poly_data.returned) {
 
            return construct_main_error(ctx, poly_data, poly_idx, expr)
 
                .with_info_at_span(
 
                    &ctx.module().source, expr.full_span(), format!(
 
                        "The polymorphic variable has type '{}' (which might have been partially inferred) while the {} inferred it to '{}'",
 
                        InferenceType::partial_display_name(&ctx.heap, poly_section),
 
                        expr_return_name,
 
                        InferenceType::partial_display_name(&ctx.heap, ret_section)
 
                    )
 
                )
 
        }
 

	
 
        unreachable!("construct_poly_arg_error without actual error found?")
 
    }
 
}
 

	
 
fn get_tuple_size_from_inference_type(inference_type: &InferenceType) -> Result<Option<u32>, ()> {
 
    for part in &inference_type.parts {
 
        if part.is_marker() { continue; }
 
        if !part.is_concrete() { break; }
 

	
 
        if let InferenceTypePart::Tuple(size) = part {
 
            return Ok(Some(*size));
 
        } else {
 
            return Err(()); // not a tuple!
 
        }
 
    }
 

	
 
    return Ok(None);
 
}
 

	
 
#[cfg(test)]
 
mod tests {
 
    use super::*;
 
    use crate::protocol::arena::Id;
 
    use InferenceTypePart as ITP;
 
    use InferenceType as IT;
 

	
 
    #[test]
 
    fn test_single_part_inference() {
 
        // lhs argument inferred from rhs
 
        let pairs = [
 
            (ITP::NumberLike, ITP::UInt8),
 
            (ITP::IntegerLike, ITP::SInt32),
 
            (ITP::Unknown, ITP::UInt64),
 
            (ITP::Unknown, ITP::Bool)
 
        ];
 
        for (lhs, rhs) in pairs.iter() {
 
            // Using infer-both
 
            let mut lhs_type = IT::new(false, false, vec![lhs.clone()]);
 
            let mut rhs_type = IT::new(false, true, vec![rhs.clone()]);
 
            let result = unsafe{ IT::infer_subtrees_for_both_types(
 
                &mut lhs_type, 0, &mut rhs_type, 0
 
            ) };
 
            assert_eq!(DualInferenceResult::First, result);
0 comments (0 inline, 0 general)