Changeset - b3a68f0f8b36
[Not reviewed]
0 2 0
mh - 3 years ago 2022-02-18 14:50:05
contact@maxhenger.nl
WIP: Reimplementing indexing of inference rules
2 files changed with 630 insertions and 168 deletions:
0 comments (0 inline, 0 general)
src/macros.rs
Show inline comments
 
@@ -19,3 +19,16 @@ macro_rules! dbg_code {
 
        #[cfg(debug_assertions)] $code
 
    }
 
}
 

	
 
// Given a function name, return type and variant, will generate the all-so
 
// common `union_value.as_variant()` method.
 
macro_rules! union_cast_method_impl {
 
    ($func_name:ident, $ret_type:ty, $variant:path) => {
 
        fn $func_name(&self) -> &$ret_type {
 
            match self {
 
                $variant(content) => return content,
 
                _ => unreachable!(),
 
            }
 
        }
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/pass_typing.rs
Show inline comments
 
@@ -808,7 +808,7 @@ enum SingleInferenceResult {
 
// PassTyping - Public Interface
 
// -----------------------------------------------------------------------------
 

	
 
type InferIndex = usize;
 
type InferNodeIndex = usize;
 
type ExtraIndex = usize;
 

	
 
enum DefinitionType{
 
@@ -841,24 +841,12 @@ struct InferenceNode {
 
    expr_type: InferenceType,       // result type from expression
 
    expr_id: ExpressionId,          // expression that is evaluated
 
    inference_rule: InferenceRule,
 
    field_or_monomorph_idx: i32,    // index of field
 
    extra_data_idx: i32,            // index of extra data needed for inference
 
    parent_index: Option<InferNodeIndex>,
 
    field_or_monomorph_index: i32,    // index of field
 
    extra_index: ExtraIndex,            // index of extra data needed for inference
 
    type_id: TypeId,                // when applicable indexes into type table
 
}
 

	
 
impl Default for InferenceNode {
 
    fn default() -> Self {
 
        Self{
 
            expr_type: InferenceType::default(),
 
            expr_id: ExpressionId::new_invalid(),
 
            inference_rule: InferenceRule::Noop,
 
            field_or_monomorph_idx: -1,
 
            extra_data_idx: -1,
 
            type_id: TypeId::new_invalid(),
 
        }
 
    }
 
}
 

	
 
/// Inferencing rule to apply. Some of these are reasonably generic. Other ones
 
/// require so much custom logic that we'll not try to come up with an
 
/// abstraction.
 
@@ -871,15 +859,28 @@ enum InferenceRule {
 
    Concatenate(InferenceRuleTwoArgs),
 
    IndexingExpr(InferenceRuleIndexingExpr),
 
    SlicingExpr(InferenceRuleSlicingExpr),
 
    SelectExpr(InferenceRuleSelectExpr),
 
    LiteralStruct,
 
    LiteralEnum,
 
    LiteralUnion,
 
    LiteralArray,
 
    LiteralTuple,
 
    CastExpr,
 
    CallExpr,
 
    VariableExpr,
 
    SelectStructField(InferenceRuleSelectStructField),
 
    SelectTupleMember(InferenceRuleSelectTupleMember),
 
    LiteralStruct(InferenceRuleLiteralStruct),
 
    LiteralEnum(InferenceRuleLiteralEnum),
 
    LiteralUnion(InferenceRuleLiteralUnion),
 
    LiteralArray(InferenceRuleLiteralArray),
 
    LiteralTuple(InferenceRuleLiteralTuple),
 
    CastExpr(InferenceRuleCastExpr),
 
    CallExpr(InferenceRuleCallExpr),
 
    VariableExpr(InferenceRuleVariableExpr),
 
}
 

	
 
impl InferenceRule {
 
    union_cast_method_impl!(as_mono_template, InferenceRuleTemplate, InferenceRule::MonoTemplate);
 
    union_cast_method_impl!(as_bi_equal, InferenceRuleBiEqual, InferenceRule::BiEqual);
 
    union_cast_method_impl!(as_tri_equal_args, InferenceRuleTriEqualArgs, InferenceRule::TriEqualArgs);
 
    union_cast_method_impl!(as_tri_equal_all, InferenceRuleTriEqualAll, InferenceRule::TriEqualAll);
 
    union_cast_method_impl!(as_concatenate, InferenceRuleTwoArgs, InferenceRule::Concatenate);
 
    union_cast_method_impl!(as_indexing_expr, InferenceRuleIndexingExpr, InferenceRule::IndexingExpr);
 
    union_cast_method_impl!(as_slicing_expr, InferenceRuleSlicingExpr, InferenceRule::SlicingExpr);
 
    union_cast_method_impl!(as_select_struct_field, InferenceRuleSelectStructField, InferenceRule::SelectStructField);
 
    union_cast_method_impl!(as_select_union_member, InferenceRuleSelectTupleMember, InferenceRule::SelectTupleMember);
 
}
 

	
 
struct InferenceRuleTemplate {
 
@@ -910,49 +911,104 @@ impl InferenceRuleTemplate {
 
    }
 
}
 

	
 
#[derive(Clone, Copy)]
 
enum InferenceRuleTemplateApplication {
 
    None, // do not apply template, silly, but saves some bytes
 
    Forced,
 
    Template,
 
}
 

	
 
/// Type equality applied to 'self' and the argument. An optional template will
 
/// be applied to 'self' first. Example: "bitwise not"
 
struct InferenceRuleBiEqual {
 
    template: InferenceRuleTemplate,
 
    argument_index: InferIndex,
 
    argument_index: InferNodeIndex,
 
}
 

	
 
/// Type equality applied to two arguments. Template can be applied to 'self'
 
/// (generally forced, since this rule does not apply a type equality constraint
 
/// to 'self') and the two arguments. Example: "equality operator"
 
struct InferenceRuleTriEqualArgs {
 
    argument_template: InferenceRuleTemplate,
 
    result_template: InferenceRuleTemplate,
 
    argument1_index: InferIndex,
 
    argument2_index: InferIndex,
 
    argument1_index: InferNodeIndex,
 
    argument2_index: InferNodeIndex,
 
}
 

	
 
/// Type equality applied to 'self' and two arguments. Template may be
 
/// optionally applied to 'self'. Example: "addition operator"
 
struct InferenceRuleTriEqualAll {
 
    template: InferenceRuleTemplate,
 
    argument1_index: InferIndex,
 
    argument2_index: InferIndex,
 
    argument1_index: InferNodeIndex,
 
    argument2_index: InferNodeIndex,
 
}
 

	
 
// generic two-argument (excluding expression itself) inference rule arguments
 
/// Information for an inference rule that is applied to 'self' and two
 
/// arguments, see `InferenceRule` for its meaning.
 
struct InferenceRuleTwoArgs {
 
    argument1_index: InferIndex,
 
    argument2_index: InferIndex,
 
    argument1_index: InferNodeIndex,
 
    argument2_index: InferNodeIndex,
 
}
 

	
 
struct InferenceRuleIndexingExpr {
 
    subject_index: InferIndex,
 
    index_index: InferIndex,
 
    subject_index: InferNodeIndex,
 
    index_index: InferNodeIndex,
 
}
 

	
 
struct InferenceRuleSlicingExpr {
 
    subject_index: InferIndex,
 
    from_index: InferIndex,
 
    to_index: InferIndex,
 
    subject_index: InferNodeIndex,
 
    from_index: InferNodeIndex,
 
    to_index: InferNodeIndex,
 
}
 

	
 
struct InferenceRuleSelectStructField {
 
    subject_index: InferNodeIndex,
 
    selected_field: Identifier,
 
}
 

	
 
struct InferenceRuleSelectTupleMember {
 
    subject_index: InferNodeIndex,
 
    selected_index: u64,
 
}
 

	
 
struct InferenceRuleLiteralStruct {
 
    element_indices: Vec<InferNodeIndex>,
 
}
 

	
 
struct InferenceRuleLiteralEnum {
 

	
 
}
 

	
 
struct InferenceRuleLiteralUnion {
 
    element_indices: Vec<InferNodeIndex>
 
}
 

	
 
struct InferenceRuleLiteralArray {
 
    element_indices: Vec<InferNodeIndex>
 
}
 

	
 
struct InferenceRuleLiteralTuple {
 
    element_indices: Vec<InferNodeIndex>
 
}
 

	
 
struct InferenceRuleSelectExpr {
 
    subject_index: InferIndex,
 
struct InferenceRuleCastExpr {
 
    subject_index: InferNodeIndex,
 
}
 

	
 
struct InferenceRuleCallExpr {
 
    argument_indices: Vec<InferNodeIndex>
 
}
 

	
 
/// Data associated with a variable expression: an expression that reads the
 
/// value from a variable.
 
struct InferenceRuleVariableExpr {
 
    variable_index: InferNodeIndex,
 
}
 

	
 
/// Data associated with a variable: keeping track of all the variable
 
/// expressions in which it is used.
 
struct InferenceRuleVariable {
 
    used_at_indices: Vec<InferNodeIndex>,
 
}
 

	
 
/// This particular visitor will recurse depth-first into the AST and ensures
 
@@ -962,11 +1018,14 @@ pub(crate) struct PassTyping {
 
    reserved_type_id: TypeId,
 
    definition_type: DefinitionType,
 
    poly_vars: Vec<ConcreteType>,
 
    // Temporary variables during construction of inference rules
 
    parent_index: Option<InferNodeIndex>,
 
    // Buffers for iteration over various types
 
    var_buffer: ScopedBuffer<VariableId>,
 
    expr_buffer: ScopedBuffer<ExpressionId>,
 
    stmt_buffer: ScopedBuffer<StatementId>,
 
    bool_buffer: ScopedBuffer<bool>,
 
    index_buffer: ScopedBuffer<usize>,
 
    // Mapping from parser type to inferred type. We attempt to continue to
 
    // specify these types until we're stuck or we've fully determined the type.
 
    var_types: HashMap<VariableId, VarData>,            // types of variables
 
@@ -980,7 +1039,6 @@ pub(crate) struct PassTyping {
 
// TODO: @Rename, this is used for a lot of type inferencing. It seems like
 
//  there is a different underlying architecture waiting to surface.
 
struct ExtraData {
 
    expr_id: ExpressionId, // the expression with which this data is associated
 
    definition_id: DefinitionId, // the definition, only used for user feedback
 
    /// Progression of polymorphic variables (if any)
 
    poly_vars: Vec<InferenceType>,
 
@@ -1026,6 +1084,7 @@ impl PassTyping {
 
            reserved_type_id: TypeId::new_invalid(),
 
            definition_type: DefinitionType::Function(FunctionDefinitionId::new_invalid()),
 
            poly_vars: Vec::new(),
 
            parent_index: None,
 
            var_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_LARGE),
 
            expr_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_LARGE),
 
            stmt_buffer: ScopedBuffer::with_capacity(BUFFER_INIT_CAP_LARGE),
 
@@ -1115,7 +1174,7 @@ impl PassTyping {
 
// -----------------------------------------------------------------------------
 

	
 
type VisitorResult = Result<(), ParseError>;
 
type VisitExprResult = Result<InferIndex, ParseError>;
 
type VisitExprResult = Result<InferNodeIndex, ParseError>;
 

	
 
impl PassTyping {
 
    // Definitions
 
@@ -1154,6 +1213,7 @@ impl PassTyping {
 

	
 
        // Visit the body and all of its expressions
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.parent_index = None;
 
        self.visit_block_stmt(ctx, body_stmt_id)
 
    }
 

	
 
@@ -1194,6 +1254,7 @@ impl PassTyping {
 

	
 
        // Visit all of the expressions within the body
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.parent_index = None;
 
        self.visit_block_stmt(ctx, body_stmt_id)
 
    }
 

	
 
@@ -1367,13 +1428,14 @@ impl PassTyping {
 
        use AssignmentOperator as AO;
 

	
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let assign_expr = &ctx.heap[id];
 
        let assign_op = assign_expr.operation;
 
        let left_expr_id = assign_expr.left;
 
        let right_expr_id = assign_expr.right;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let left_index = self.visit_expr(ctx, left_expr_id)?;
 
        let right_index = self.visit_expr(ctx, right_expr_id)?;
 

	
 
@@ -1397,17 +1459,19 @@ impl PassTyping {
 
            argument2_index: right_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_assignment_expr(ctx, id)
 
    }
 

	
 
    fn visit_binding_expr(&mut self, ctx: &mut Ctx, id: BindingExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let binding_expr = &ctx.heap[id];
 
        let bound_to_id = binding_expr.bound_to;
 
        let bound_from_id = binding_expr.bound_from;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let arg_to_index = self.visit_expr(ctx, bound_to_id)?;
 
        let arg_from_index = self.visit_expr(ctx, bound_from_id)?;
 

	
 
@@ -1419,18 +1483,20 @@ impl PassTyping {
 
            argument2_index: arg_from_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_binding_expr(ctx, id)
 
    }
 

	
 
    fn visit_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let conditional_expr = &ctx.heap[id];
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        self.visit_expr(ctx, test_expr_id)?;
 
        let true_index = self.visit_expr(ctx, true_expr_id)?;
 
        let false_index = self.visit_expr(ctx, false_expr_id)?;
 
@@ -1446,6 +1512,7 @@ impl PassTyping {
 
            argument2_index: false_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_conditional_expr(ctx, id)
 
    }
 

	
 
@@ -1453,13 +1520,14 @@ impl PassTyping {
 
        use BinaryOperator as BO;
 

	
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let binary_expr = &ctx.heap[id];
 
        let binary_op = binary_expr.operation;
 
        let lhs_expr_id = binary_expr.left;
 
        let rhs_expr_id = binary_expr.right;
 

	
 
        let parent_index = self.parent_index.replace(self_index);
 
        let left_index = self.visit_expr(ctx, lhs_expr_id)?;
 
        let right_index = self.visit_expr(ctx, rhs_expr_id)?;
 

	
 
@@ -1506,6 +1574,7 @@ impl PassTyping {
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = inference_rule;
 

	
 
        self.parent_index = old_parent;
 
        self.progress_binary_expr(ctx, id)
 
    }
 

	
 
@@ -1513,12 +1582,13 @@ impl PassTyping {
 
        use UnaryOperator as UO;
 

	
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let unary_expr = &ctx.heap[id];
 
        let operation = unary_expr.operation;
 
        let arg_expr_id = unary_expr.expression;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let argument_index = self.visit_expr(ctx, arg_expr_id)?;
 

	
 
        let template = match operation {
 
@@ -1535,17 +1605,19 @@ impl PassTyping {
 
            template, argument_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_unary_expr(ctx, id)
 
    }
 

	
 
    fn visit_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let indexing_expr = &ctx.heap[id];
 
        let subject_expr_id = indexing_expr.subject;
 
        let index_expr_id = indexing_expr.index;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let subject_index = self.visit_expr(ctx, subject_expr_id)?;
 
        let index_index = self.visit_expr(ctx, index_expr_id)?; // cool name, bro
 

	
 
@@ -1554,18 +1626,20 @@ impl PassTyping {
 
            subject_index, index_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_indexing_expr(ctx, id)
 
    }
 

	
 
    fn visit_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let slicing_expr = &ctx.heap[id];
 
        let subject_expr_id = slicing_expr.subject;
 
        let from_expr_id = slicing_expr.from_index;
 
        let to_expr_id = slicing_expr.to_index;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let subject_index = self.visit_expr(ctx, subject_expr_id)?;
 
        let from_index = self.visit_expr(ctx, from_expr_id)?;
 
        let to_index = self.visit_expr(ctx, to_expr_id)?;
 
@@ -1575,16 +1649,18 @@ impl PassTyping {
 
            subject_index, from_index, to_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_slicing_expr(ctx, id)
 
    }
 

	
 
    fn visit_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let select_expr = &ctx.heap[id];
 
        let subject_expr_id = select_expr.subject;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 
        let subject_index = self.visit_expr(ctx, subject_expr_id)?;
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
@@ -1592,12 +1668,15 @@ impl PassTyping {
 
            subject_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_select_expr(ctx, id)
 
    }
 

	
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        let self_index = self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let old_parent = self.parent_index.replace(self_index);
 

	
 
        let literal_expr = &ctx.heap[id];
 
        match &literal_expr.value {
 
@@ -1622,82 +1701,138 @@ impl PassTyping {
 
                node.inference_rule = InferenceRule::MonoTemplate(InferenceRuleTemplate::new_forced(&STRING_TEMPLATE));
 
            },
 
            Literal::Struct(literal) => {
 
                // Visit field expressions
 
                let mut expr_ids = self.expr_buffer.start_section();
 
                for field in &literal.fields {
 
                    expr_ids.push(field.value);
 
                }
 
                self.insert_initial_struct_polymorph_data(ctx, id);
 

	
 
                let mut expr_indices = self.index_buffer.start_section();
 
                for expr_id in expr_ids.iter_copied() {
 
                    self.visit_expr(ctx, expr_id)?;
 
                    let expr_index = self.visit_expr(ctx, expr_id)?;
 
                    expr_indices.push(expr_index);
 
                }
 
                expr_ids.forget();
 
                let element_indices = expr_indices.into_vec();
 

	
 
                // Assign rule and extra data index to inference node
 
                let extra_index = self.insert_initial_struct_polymorph_data(ctx, id);
 
                let node = &mut self.infer_nodes[self_index];
 
                node.extra_index = extra_index;
 
                node.inference_rule = InferenceRule::LiteralStruct(InferenceRuleLiteralStruct{
 
                    element_indices,
 
                });
 
            },
 
            Literal::Enum(_) => {
 
                // Enumerations do not carry any subexpressions, but may still
 
                // have a user-defined polymorphic marker variable. For this 
 
                // reason we may still have to apply inference to this 
 
                // polymorphic variable
 
                self.insert_initial_enum_polymorph_data(ctx, id);
 
                let extra_index = self.insert_initial_enum_polymorph_data(ctx, id);
 
                let node = &mut self.infer_nodes[self_index];
 
                node.extra_index = extra_index;
 
                node.inference_rule = InferenceRule::LiteralEnum(InferenceRuleLiteralEnum{
 

	
 
                });
 
            },
 
            Literal::Union(literal) => {
 
                // May carry subexpressions and polymorphic arguments
 
                let expr_ids = self.expr_buffer.start_section_initialized(literal.values.as_slice());
 
                self.insert_initial_union_polymorph_data(ctx, id);
 
                let extra_index = self.insert_initial_union_polymorph_data(ctx, id);
 

	
 
                let mut expr_indices = self.index_buffer.start_section();
 
                for expr_id in expr_ids.iter_copied() {
 
                    self.visit_expr(ctx, expr_id)?;
 
                    let expr_index = self.visit_expr(ctx, expr_id)?;
 
                    expr_indices.push(expr_index);
 
                }
 
                expr_ids.forget();
 
                let element_indices = expr_indices.into_vec();
 

	
 
                let node = &mut self.infer_nodes[self_index];
 
                node.extra_index = extra_index;
 
                node.inference_rule = InferenceRule::LiteralUnion(InferenceRuleLiteralUnion{
 
                    element_indices,
 
                });
 
            },
 
            Literal::Array(expressions) | Literal::Tuple(expressions) => {
 
                let expr_ids = self.expr_buffer.start_section_initialized(expressions.as_slice());
 
                let mut expr_indices = self.index_buffer.start_section();
 

	
 
                for expr_id in expr_ids.iter_copied() {
 
                    self.visit_expr(ctx, expr_id)?;
 
                    let expr_index = self.visit_expr(ctx, expr_id)?;
 
                    expr_indices.push(expr_index);
 
                }
 
                expr_ids.forget();
 
                let element_indices = expr_indices.into_vec();
 

	
 
                let node = &mut self.infer_nodes[self_index];
 
                node.extra_index = extra_index;
 
                node.inference_rule = InferenceRule::LiteralArray(InferenceRuleLiteralArray{
 
                    element_indices,
 
                })
 
            }
 
        }
 

	
 
        self.parent_index = old_parent;
 
        self.progress_literal_expr(ctx, id)
 
    }
 

	
 
    fn visit_cast_expr(&mut self, ctx: &mut Ctx, id: CastExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let cast_expr = &ctx.heap[id];
 
        let subject_expr_id = cast_expr.subject;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        let old_parent = self.parent_index.replace(self_index);
 
        let subject_index = self.visit_expr(ctx, subject_expr_id)?;
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.inference_rule = InferenceRule::CastExpr(InferenceRuleCastExpr{
 
            subject_index,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_cast_expr(ctx, id)
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        self.insert_initial_call_polymorph_data(ctx, id);
 
        let self_index = self.insert_initial_inference_node(ctx, upcast_id)?;
 
        let extra_index = self.insert_initial_call_polymorph_data(ctx, id);
 

	
 
        // By default we set the polymorph idx for calls to 0. If the call ends
 
        // up not being a polymorphic one, then we will select the default
 
        // expression types in the type table
 
        let call_expr = &ctx.heap[id];
 
        self.infer_nodes[call_expr.unique_id_in_definition as usize].field_or_monomorph_idx = 0;
 
        // By default we set the polymorph idx for calls to 0. If the call
 
        // refers to a non-polymorphic function, then it will be "monomorphed"
 
        // once, hence we end up pointing to the correct instance.
 
        self.infer_nodes[self_index].field_or_monomorph_index = 0;
 

	
 
        // Visit all arguments
 
        let old_parent = self.parent_index.replace(self_index);
 

	
 
        let call_expr = &ctx.heap[id];
 
        let expr_ids = self.expr_buffer.start_section_initialized(call_expr.arguments.as_slice());
 
        let mut expr_indices = self.index_buffer.start_section();
 

	
 
        for arg_expr_id in expr_ids.iter_copied() {
 
            self.visit_expr(ctx, arg_expr_id)?;
 
            let expr_index = self.visit_expr(ctx, arg_expr_id)?;
 
            expr_indices.push(expr_index);
 
        }
 
        expr_ids.forget();
 
        let argument_indices = expr_indices.into_vec();
 

	
 
        let node = &mut self.infer_nodes[self_index];
 
        node.extra_index = extra_index;
 
        node.inference_rule = InferenceRule::CallExpr(InferenceRuleCallExpr{
 
            argument_indices,
 
        });
 

	
 
        self.parent_index = old_parent;
 
        self.progress_call_expr(ctx, id)
 
    }
 

	
 
    fn visit_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> VisitExprResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        self.insert_initial_inference_node(ctx, upcast_id)?;
 

	
 
        let var_expr = &ctx.heap[id];
 
        debug_assert!(var_expr.declaration.is_some());
 
@@ -1887,7 +2022,7 @@ impl PassTyping {
 
                    infer_expr.type_id = type_id;
 
                },
 
                Expression::Select(_) => {
 
                    debug_assert!(infer_expr.field_or_monomorph_idx >= 0);
 
                    debug_assert!(infer_expr.field_or_monomorph_index >= 0);
 
                },
 
                _ => {
 
                    unreachable!("handling extra data for expression {:?}", &ctx.heap[extra_data.expr_id]);
 
@@ -1928,7 +2063,7 @@ impl PassTyping {
 
            infer_expr.expr_type.write_concrete_type(&mut concrete);
 
            target.expr_data.push(MonomorphExpression{
 
                expr_type: concrete,
 
                field_or_monomorph_idx: infer_expr.field_or_monomorph_idx,
 
                field_or_monomorph_idx: infer_expr.field_or_monomorph_index,
 
                type_id: infer_expr.type_id,
 
            });
 
        }
 
@@ -1936,6 +2071,331 @@ impl PassTyping {
 
        Ok(())
 
    }
 

	
 
    fn progress_inference_rule_bi_equal(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_bi_equal();
 
        let arg_index = rule.argument_index;
 

	
 
        let base_progress = self.progress_template(ctx, node_index, rule.template.application, rule.template.template)?;
 
        let (node_progress, arg_progress) = self.apply_equal2_constraint(ctx, node_index, node_index, 0, arg_index, 0)?;
 

	
 
        if base_progress || node_progress { self.queue_node_parent(node_index); }
 
        if arg_progress { self.queue_node(arg_index); }
 

	
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_tri_equal_args(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_tri_equal_args();
 
        let arg1_index = rule.argument1_index;
 
        let arg2_index = rule.argument2_index;
 

	
 
        let self_template_progress = self.progress_template(ctx, node_index, rule.result_template.application, rule.result_template.template)?;
 
        let arg1_template_progress = self.progress_template(ctx, arg1_index, rule.argument_template.application, rule.argument_template.template)?;
 
        let (arg1_progress, arg2_progress) = self.apply_equal2_constraint(ctx, node_index, arg1_index, 0, arg2_index, 0)?;
 

	
 
        if self_template_progress { self.queue_node_parent(node_index); }
 
        if arg1_template_progress || arg1_progress { self.queue_node(arg1_index); }
 
        if arg2_template_progress || arg2_progress { self.queue_node(arg2_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_tri_equal_all(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_tri_equal_all();
 
        let arg1_index = rule.argument1_index;
 
        let arg2_index = rule.argument2_index;
 

	
 
        let template_progress = self.progress_template(ctx, node_index, rule.template.application, rule.template.template)?;
 
        let (node_progress, arg1_progress, arg2_progress) =
 
            self.apply_equal3_constraint(ctx, node_index, arg1_index, arg2_index, 0)?;
 

	
 
        if template_progress || node_progress { self.queue_node_parent(node_index); }
 
        if arg1_progress { self.queue_node(arg1_index); }
 
        if arg2_progress { self.queue_node(arg2_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_concatenate(&mut self, ctx: &mut Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_concatenate();
 
        let arg1_index = rule.argument1_index;
 
        let arg2_index = rule.argument2_index;
 

	
 
        // Two cases: one of the arguments is a string (then all must be), or
 
        // one of the arguments is an array (and all must be arrays).
 
        let (expr_is_str, expr_is_not_str) = self.type_is_certainly_or_certainly_not_string(node_index);
 
        let (arg1_is_str, arg1_is_not_str) = self.type_is_certainly_or_certainly_not_string(arg1_index);
 
        let (arg2_is_str, arg2_is_not_str) = self.type_is_certainly_or_certainly_not_string(arg2_index);
 

	
 
        let someone_is_str = expr_is_str || arg1_is_str || arg2_is_str;
 
        let someone_is_not_str = expr_is_not_str || arg1_is_not_str || arg2_is_not_str;
 

	
 
        // Note: this statement is an expression returning the progression bools
 
        let (node_progress, arg1_progress, arg2_progress) = if someone_is_str {
 
            // One of the arguments is a string, then all must be strings
 
            self.apply_equal3_constraint(ctx, node_index, arg1_index, arg2_index, 0)?
 
        } else {
 
            let progress_expr = if someone_is_not_str {
 
                // Output must be a normal array
 
                self.apply_template_constraint(ctx, node_index, &ARRAY_TEMPLATE)?
 
            } else {
 
                // Output may still be anything
 
                self.apply_template_constraint(ctx, node_index, &ARRAYLIKE_TEMPLATE)?
 
            };
 

	
 
            let progress_arg1 = self.apply_template_constraint(ctx, arg1_index, &ARRAYLIKE_TEMPLATE)?;
 
            let progress_arg2 = self.apply_template_constraint(ctx, arg2_index, &ARRAYLIKE_TEMPLATE)?;
 

	
 
            // If they're all arraylike, then we want the subtype to match
 
            let (subtype_expr, subtype_arg1, subtype_arg2) =
 
                self.apply_equal3_constraint(ctx, node_index, arg1_index, arg2_index, 1)?;
 

	
 
            (progress_expr || subtype_expr, progress_arg1 || subtype_arg1, progress_arg2 || subtype_arg2)
 
        };
 

	
 
        if node_progress { self.queue_node_parent(node_index); }
 
        if arg1_progress { self.queue_node(arg1_index); }
 
        if arg2_progress { self.queue_node(arg2_index); }
 

	
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_indexing_expr(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_indexing_expr();
 
        let subject_index = rule.subject_index;
 
        let index_index = rule.index_index; // which one?
 

	
 
        // Subject is arraylike, index in integerlike
 
        let subject_template_progress = self.apply_template_constraint(ctx, subject_index, &ARRAYLIKE_TEMPLATE)?;
 
        let index_template_progress = self.apply_template_constraint(ctx, index_index, &INTEGERLIKE_TEMPLATE)?;
 

	
 
        // If subject is type `Array<T>`, then expr type is `T`
 
        let (node_progress, subject_progress) =
 
            self.apply_equal2_constraint(ctx, node_index, node_index, 0, subject_index, 1)?;
 

	
 
        if node_progress { self.queue_node_parent(node_index); }
 
        if subject_template_progress || subject_progress { self.queue_node(subject_index); }
 
        if index_template_progress { self.queue_node(index_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_slicing_expr(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_slicing_expr();
 
        let subject_index = rule.subject_index;
 
        let from_index_index = rule.from_index;
 
        let to_index_index = rule.to_index;
 

	
 
        debug_log!("Rule slicing [node: {}, expr: {}]", node_index, node.expr_id.index);
 

	
 
        // Subject is arraylike, indices are integerlike
 
        let subject_template_progress = self.apply_template_constraint(ctx, subject_index, &ARRAYLIKE_TEMPLATE)?;
 
        let from_template_progress = self.apply_template_constraint(ctx, from_index_index, &INTEGERLIKE_TEMPLATE)?;
 
        let to_template_progress = self.apply_template_constraint(ctx, to_index_index, &INTEGERLIKE_TEMPLATE)?;
 
        let (from_index_progress, to_index_progress) =
 
            self.apply_equal2_constraint(ctx, node_index, from_index_index, 0, to_index_index, 0)?;
 

	
 
        // Same as array indexing: result depends on whether subject is string
 
        // or array
 
        let (is_string, is_not_string) = self.type_is_certainly_or_certainly_not_string(node_index);
 
        let (node_progress, subject_progress) = if is_string {
 
            // Certainly a string
 
            (
 
                self.apply_forced_constraint(ctx, node_index, &STRING_TEMPLATE)?,
 
                false
 
            )
 
        } else if is_not_string {
 
            // Certainly not a string, apply template constraint. Then make sure
 
            // that if we have an `Array<T>`, that the slice produces `Slice<T>`
 
            let node_template_progress = self.apply_template_constraint(ctx, node_index, &SLICE_TEMPLATE)?;
 
            let (node_progress, subject_progress) =
 
                self.apply_equal2_constraint(ctx, node_index, node_index, 1, subject_index, 1)?;
 

	
 
            (
 
                node_template_progress || node_progress,
 
                subject_progress
 
            )
 
        } else {
 
            // Not sure yet
 
            let node_template_progress = self.apply_template_constraint(ctx, node_index, &ARRAYLIKE_TEMPLATE)?;
 
            let (node_progress, subject_progress) =
 
                self.apply_equal2_constraint(ctx, node_index, node_index, 1, subject_index, 1)?;
 

	
 
            (
 
                node_template_progress || node_progress,
 
                subject_progress
 
            )
 
        };
 

	
 
        if node_progress { self.queue_node_parent(node_index); }
 
        if subject_template_progress || subject_progress { self.queue_node(subject_index); }
 
        if from_template_progress || from_index_progress { self.queue_node(from_index_index); }
 
        if to_template_progress || to_index_progress { self.queue_node(to_index_index); }
 

	
 
        return Ok(());
 
    }
 

	
 
    fn progress_inference_rule_select_struct_field(&mut self, ctx: &mut Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 
        let node = &self.infer_nodes[node_index];
 
        let rule = node.inference_rule.as_select_struct_field();
 

	
 
        let subject_index = rule.subject_index;
 

	
 
        fn get_definition_id_from_inference_type(inference_type: &InferenceType) -> Result<Option<DefinitionId>, ()> {
 
            for part in inference_type.parts.iter() {
 
                if part.is_marker() { continue; }
 
                if !part.is_concrete() { break; }
 

	
 
                if let InferenceTypePart::Instance(definition_id, _) = part {
 
                    return Ok(Some(*definition_id));
 
                } else {
 
                    return Err(())
 
                }
 
            }
 

	
 
            // Nothing is known yet
 
            return Ok(None);
 
        }
 

	
 
        if node.field_or_monomorph_index < 0 {
 
            // Don't know the subject definition, hence the field yet. Try to
 
            // determine it.
 
            let subject_node = &self.infer_nodes[subject_index];
 
            match get_definition_id_from_inference_type(&subject_node.expr_type) {
 
                Ok(Some(definition_id)) => {
 
                    // Determined definition of subject for the first time.
 
                    let base_definition = ctx.types.get_base_definition(&definition_id).unwrap();
 
                    let struct_definition = if let DefinedTypeVariant::Struct(struct_definition) = &base_definition.definition {
 
                        struct_definition
 
                    } else {
 
                        return Err(ParseError::new_error_at_span(
 
                            &ctx.module().source, rule.selected_field.span, format!(
 
                                "Can only apply field access to structs, got a subject of type '{}'",
 
                                subject_type.display_name(&ctx.heap)
 
                            )
 
                        ));
 
                    };
 

	
 
                    // Seek the field that is referenced by the select
 
                    // expression
 
                    let mut field_found = false;
 
                    for (field_index, field) in struct_definition.fields.iter().enumerate() {
 
                        if field.identifier.value == rule.selected_field.value {
 
                            // Found the field of interest
 
                            field_found = true;
 
                            let node = &mut self.infer_nodes[node_index];
 
                            node.field_or_monomorph_index = field_index as i32;
 
                            break;
 
                        }
 
                    }
 

	
 
                    if !field_found {
 
                        let struct_definition = ctx.heap[definition_id].as_struct();
 
                        return Err(ParseError::new_error_at_span(
 
                            &ctx.module().source, rule.selected_field.span, format!(
 
                                "this field does not exist on the struct '{}'",
 
                                ast_struct_def.identifier.value.as_str()
 
                            )
 
                        ));
 
                    }
 

	
 
                    // Insert the initial data needed to infer polymorphic
 
                    // fields
 
                    let extra_index = self.insert_initial_select_polymorph_data(ctx, node_index, definition_id);
 
                    let node = &mut self.infer_nodes[node_index];
 
                    node.extra_index = extra_index;
 
                },
 
                Ok(None) => {
 
                    // We don't know what to do yet, because we don't know the
 
                    // subject type yet.
 
                    return Ok(())
 
                },
 
                Err(()) => {
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module().source, rule.selected_field.span, format!(
 
                            "Can only apply field access to structs, got a subject of type '{}'",
 
                            subject_type.display_name(&ctx.heap)
 
                        )
 
                    ));
 
                },
 
            }
 
        }
 

	
 
        // If here then the field index is known, hence we can start inferring
 
        // the type of the selected field
 
        let node = &self.infer_nodes[node_index];
 
        let poly_data = &mut self.extra_data[node.extra_index as usize];
 
        let mut poly_progress = HashSet::new(); // TODO: @Performance
 

	
 
        // Apply to struct's type
 
        let signature_type: *mut _ = &mut poly_data.embedded[0];
 
        let subject_type: *mut _ = &mut self.infer_nodes[subject_expr_idx as usize].expr_type;
 

	
 
        let (_, progress_subject) = Self::apply_equal2_signature_constraint(
 
            ctx, upcast_id, Some(subject_id), poly_data, &mut poly_progress,
 
            signature_type, 0, subject_type, 0
 
        )?;
 

	
 
        if progress_subject {
 
            self.expr_queued.push_back(subject_expr_idx);
 
        }
 

	
 
        // Apply to field's type
 
        let signature_type: *mut _ = &mut poly_data.returned;
 
        let expr_type: *mut _ = &mut self.infer_nodes[expr_idx as usize].expr_type;
 

	
 
        let (_, progress_expr) = Self::apply_equal2_signature_constraint(
 
            ctx, upcast_id, None, poly_data, &mut poly_progress,
 
            signature_type, 0, expr_type, 0
 
        )?;
 

	
 
        if progress_expr {
 
            if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                let parent_idx = ctx.heap[parent_id].get_unique_id_in_definition();
 
                self.expr_queued.push_back(parent_idx);
 
            }
 
        }
 

	
 
        // Reapply progress in polymorphic variables to struct's type
 
        let signature_type: *mut _ = &mut poly_data.embedded[0];
 
        let subject_type: *mut _ = &mut self.infer_nodes[subject_expr_idx as usize].expr_type;
 

	
 
        let progress_subject = Self::apply_equal2_polyvar_constraint(
 
            poly_data, &poly_progress, signature_type, subject_type
 
        );
 

	
 
        let signature_type: *mut _ = &mut poly_data.returned;
 
        let expr_type: *mut _ = &mut self.infer_nodes[expr_idx as usize].expr_type;
 

	
 
        let progress_expr = Self::apply_equal2_polyvar_constraint(
 
            poly_data, &poly_progress, signature_type, expr_type
 
        );
 

	
 
        (progress_subject, progress_expr)
 

	
 
        return Ok(())
 
    }
 

	
 
    fn progress_inference_rule_select_tuple_member(&mut self, ctx: &mut Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> {
 

	
 
    }
 

	
 
    fn progress_template(&mut self, ctx: &Ctx, node_index: InferNodeIndex, application: InferenceRuleTemplateApplication, template: &[InferenceTypePart]) -> Result<bool, ParseError> {
 
        use InferenceRuleTemplateApplication as TA;
 

	
 
        match application {
 
            TA::None => Ok(false),
 
            TA::Template => self.apply_template_constraint(ctx, node_index, template),
 
            TA::Forced => self.apply_forced_constraint(ctx, node_index, template),
 
        }
 
    }
 

	
 
    fn progress_expr(&mut self, ctx: &mut Ctx, idx: i32) -> Result<(), ParseError> {
 
        let id = self.infer_nodes[idx as usize].expr_id;
 
        match &ctx.heap[id] {
 
@@ -1990,7 +2450,7 @@ impl PassTyping {
 
        }
 
    }
 

	
 
    fn progress_assignment_expr(&mut self, ctx: &mut Ctx, infer_index: InferIndex) -> Result<(), ParseError> {
 
    fn progress_assignment_expr(&mut self, ctx: &mut Ctx, infer_index: InferNodeIndex) -> Result<(), ParseError> {
 
        use AssignmentOperator as AO;
 

	
 

	
 
@@ -2030,7 +2490,7 @@ impl PassTyping {
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.debug_get_display_name(ctx, upcast_id));
 

	
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_expr { self.queue_node_parent(ctx, upcast_id); }
 
        if progress_forced || progress_arg1 { self.queue_expr(ctx, arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(ctx, arg2_expr_id); }
 

	
 
@@ -2048,7 +2508,7 @@ impl PassTyping {
 
        let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
        let (progress_from, progress_to) = self.apply_equal2_constraint(ctx, upcast_id, bound_from_id, 0, bound_to_id, 0)?;
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_expr { self.queue_node_parent(ctx, upcast_id); }
 
        if progress_from { self.queue_expr(ctx, bound_from_id); }
 
        if progress_to { self.queue_expr(ctx, bound_to_id); }
 

	
 
@@ -2081,7 +2541,7 @@ impl PassTyping {
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.debug_get_display_name(ctx, arg2_expr_id));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.debug_get_display_name(ctx, upcast_id));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_expr { self.queue_node_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(ctx, arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(ctx, arg2_expr_id); }
 

	
 
@@ -2187,7 +2647,7 @@ impl PassTyping {
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.debug_get_display_name(ctx, arg2_id));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.debug_get_display_name(ctx, upcast_id));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_expr { self.queue_node_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(ctx, arg1_id); }
 
        if progress_arg2 { self.queue_expr(ctx, arg2_id); }
 

	
 
@@ -2235,7 +2695,7 @@ impl PassTyping {
 
        debug_log!("   - Arg  type [{}]: {}", progress_arg, self.debug_get_display_name(ctx, arg_id));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.debug_get_display_name(ctx, upcast_id));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_expr { self.queue_node_parent(ctx, upcast_id); }
 
        if progress_arg { self.queue_expr(ctx, arg_id); }
 

	
 
        Ok(())
 
@@ -2266,7 +2726,7 @@ impl PassTyping {
 
        debug_log!("   - Index   type [{}]: {}", progress_index, self.debug_get_display_name(ctx, index_id));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.debug_get_display_name(ctx, upcast_id));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_expr { self.queue_node_parent(ctx, upcast_id); }
 
        if progress_subject_base || progress_subject { self.queue_expr(ctx, subject_id); }
 
        if progress_index { self.queue_expr(ctx, index_id); }
 

	
 
@@ -2321,7 +2781,7 @@ impl PassTyping {
 
        debug_log!("   - ToIdx   type [{}]: {}", progress_idx_base || progress_to, self.debug_get_display_name(ctx, to_id));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.debug_get_display_name(ctx, upcast_id));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_expr { self.queue_node_parent(ctx, upcast_id); }
 
        if progress_subject_base || progress_subject { self.queue_expr(ctx, subject_id); }
 
        if progress_idx_base || progress_from { self.queue_expr(ctx, from_id); }
 
        if progress_idx_base || progress_to { self.queue_expr(ctx, to_id); }
 
@@ -2391,7 +2851,7 @@ impl PassTyping {
 
        let (progress_subject, progress_expr) = match &select_expr.kind {
 
            SelectKind::StructField(field_name) => {
 
                // Handle select of a struct's field
 
                if infer_expr.field_or_monomorph_idx < 0 {
 
                if infer_expr.field_or_monomorph_index < 0 {
 
                    // We don't know the field or the definition it is pointing to yet
 
                    // Not yet known, check if we can determine it
 
                    let subject_type = &self.infer_nodes[subject_expr_idx as usize].expr_type;
 
@@ -2418,7 +2878,7 @@ impl PassTyping {
 
                                if field_def.identifier == *field_name {
 
                                    // Set field definition and index
 
                                    let infer_expr = &mut self.infer_nodes[expr_idx as usize];
 
                                    infer_expr.field_or_monomorph_idx = field_def_idx as i32;
 
                                    infer_expr.field_or_monomorph_index = field_def_idx as i32;
 
                                    struct_def_id = Some(type_def.ast_definition);
 
                                    break;
 
                                }
 
@@ -2509,7 +2969,7 @@ impl PassTyping {
 
            SelectKind::TupleMember(member_index) => {
 
                let member_index = *member_index;
 

	
 
                if infer_expr.field_or_monomorph_idx < 0 {
 
                if infer_expr.field_or_monomorph_index < 0 {
 
                    // We don't know what kind of tuple we're accessing yet
 
                    let subject_type = &self.infer_nodes[subject_expr_idx as usize].expr_type;
 
                    let tuple_size = try_get_tuple_size_from_inference_type(subject_type);
 
@@ -2530,7 +2990,7 @@ impl PassTyping {
 
                            // Within bounds, so set the index (such that we
 
                            // will not perform this lookup again)
 
                            let infer_expr = &mut self.infer_nodes[expr_idx as usize];
 
                            infer_expr.field_or_monomorph_idx = member_index as i32;
 
                            infer_expr.field_or_monomorph_index = member_index as i32;
 
                        },
 
                        Ok(None) => {
 
                            // Nothing is known about the tuple yet
 
@@ -2564,7 +3024,7 @@ impl PassTyping {
 
        };
 

	
 
        if progress_subject { self.queue_expr(ctx, subject_id); }
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_expr { self.queue_node_parent(ctx, upcast_id); }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Subject type [{}]: {}", progress_subject, self.debug_get_display_name(ctx, subject_id));
 
@@ -2909,7 +3369,7 @@ impl PassTyping {
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.debug_get_display_name(ctx, upcast_id));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_expr { self.queue_node_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 
@@ -2934,7 +3394,7 @@ impl PassTyping {
 
        let expr_progress = self.apply_template_constraint(ctx, upcast_id, &infer_type.parts)?;
 

	
 
        if expr_progress {
 
            self.queue_expr_parent(ctx, upcast_id);
 
            self.queue_node_parent(ctx, upcast_id);
 
        }
 

	
 
        // Check if the two types are compatible
 
@@ -3096,7 +3556,7 @@ impl PassTyping {
 
            unsafe{&*ret_type}.display_name(&ctx.heap)
 
        );
 
        if progress_ret {
 
            self.queue_expr_parent(ctx, upcast_id);
 
            self.queue_node_parent(ctx, upcast_id);
 
        }
 

	
 
        debug_log!(" * After:");
 
@@ -3195,7 +3655,7 @@ impl PassTyping {
 
                }
 
            }
 
        }
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_expr { self.queue_node_parent(ctx, upcast_id); }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Var  type [{}]: {}", progress_var, self.var_types.get(&var_id).unwrap().var_type.display_name(&ctx.heap));
 
@@ -3205,23 +3665,22 @@ impl PassTyping {
 
        Ok(())
 
    }
 

	
 
    fn queue_expr_parent(&mut self, ctx: &Ctx, expr_id: ExpressionId) {
 
        if let ExpressionParent::Expression(parent_expr_id, _) = &ctx.heap[expr_id].parent() {
 
            let expr_idx = ctx.heap[*parent_expr_id].get_unique_id_in_definition();
 
            self.expr_queued.push_back(expr_idx);
 
    fn queue_node_parent(&mut self, node_index: InferNodeIndex) {
 
        let node = &self.infer_nodes[node_index];
 
        if let Some(parent_node_index) = node.parent_index {
 
            self.expr_queued.push_back(parent_node_index);
 
        }
 
    }
 

	
 
    fn queue_expr(&mut self, ctx: &Ctx, expr_id: ExpressionId) {
 
        let expr_idx = ctx.heap[expr_id].get_unique_id_in_definition();
 
        self.expr_queued.push_back(expr_idx);
 
    #[inline]
 
    fn queue_node(&mut self, node_index: InferNodeIndex) {
 
        self.expr_queued.push_back(node_index);
 
    }
 

	
 

	
 
    // first returned is certainly string, second is certainly not
 
    fn type_is_certainly_or_certainly_not_string(&self, ctx: &Ctx, expr_id: ExpressionId) -> (bool, bool) {
 
        let expr_idx = ctx.heap[expr_id].get_unique_id_in_definition();
 
        let expr_type = &self.infer_nodes[expr_idx as usize].expr_type;
 
    /// Returns whether the type is certainly a string (true, false), certainly
 
    /// not a string (false, true), or still unknown (false, false).
 
    fn type_is_certainly_or_certainly_not_string(&self, node_index: InferNodeIndex) -> (bool, bool) {
 
        let expr_type = &self.infer_nodes[node_index].expr_type;
 
        if expr_type.is_done {
 
            if expr_type.parts[0] == InferenceTypePart::String {
 
                return (true, false);
 
@@ -3239,15 +3698,14 @@ impl PassTyping {
 
    /// expression type as well. Hence the template may be fully specified (e.g.
 
    /// a bool) or contain "inference" variables (e.g. an array of T)
 
    fn apply_template_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
        &mut self, ctx: &Ctx, node_index: InferNodeIndex, template: &[InferenceTypePart]
 
    ) -> Result<bool, ParseError> {
 
        let expr_idx = ctx.heap[expr_id].get_unique_id_in_definition(); // TODO: @Temp
 
        let expr_type = &mut self.infer_nodes[expr_idx as usize].expr_type;
 
        let expr_type = &mut self.infer_nodes[node_index].expr_type;
 
        match InferenceType::infer_subtree_for_single_type(expr_type, 0, template, 0, false) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(
 
                self.construct_template_type_error(ctx, expr_id, template)
 
                self.construct_template_type_error(ctx, node_index, template)
 
            )
 
        }
 
    }
 
@@ -3269,15 +3727,15 @@ impl PassTyping {
 
    /// Applies a forced constraint: the supplied expression's type MUST be
 
    /// inferred from the template, the other way around is considered invalid.
 
    fn apply_forced_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
        &mut self, ctx: &Ctx, node_index: InferNodeIndex, template: &[InferenceTypePart]
 
    ) -> Result<bool, ParseError> {
 
        let expr_idx = ctx.heap[expr_id].get_unique_id_in_definition();
 
        let expr_type = &mut self.infer_nodes[expr_idx as usize].expr_type;
 
        let expr_type = &mut self.infer_nodes[node_index].expr_type;
 

	
 
        match InferenceType::infer_subtree_for_single_type(expr_type, 0, template, 0, true) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(
 
                self.construct_template_type_error(ctx, expr_id, template)
 
                self.construct_template_type_error(ctx, node_index, template)
 
            )
 
        }
 
    }
 
@@ -3287,21 +3745,19 @@ impl PassTyping {
 
    /// is successful then the composition of all types are made equal.
 
    /// The "parent" `expr_id` is provided to construct errors.
 
    fn apply_equal2_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg1_start_idx: usize,
 
        arg2_id: ExpressionId, arg2_start_idx: usize
 
        &mut self, ctx: &Ctx, node_index: InferNodeIndex,
 
        arg1_index: InferNodeIndex, arg1_start_idx: usize,
 
        arg2_index: InferNodeIndex, arg2_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError> {
 
        let arg1_expr_idx = ctx.heap[arg1_id].get_unique_id_in_definition(); // TODO: @Temp
 
        let arg2_expr_idx = ctx.heap[arg2_id].get_unique_id_in_definition();
 
        let arg1_type: *mut _ = &mut self.infer_nodes[arg1_expr_idx as usize].expr_type;
 
        let arg2_type: *mut _ = &mut self.infer_nodes[arg2_expr_idx as usize].expr_type;
 
        let arg1_type: *mut _ = &mut self.infer_nodes[arg1_index].expr_type;
 
        let arg2_type: *mut _ = &mut self.infer_nodes[arg2_index].expr_type;
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            arg1_type, arg1_start_idx,
 
            arg2_type, arg2_start_idx
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
            return Err(self.construct_arg_type_error(ctx, node_index, arg1_index, arg2_index));
 
        }
 

	
 
        Ok((infer_res.modified_lhs(), infer_res.modified_rhs()))
 
@@ -3440,31 +3896,27 @@ impl PassTyping {
 
    /// attempt to do so. If the call is successful then the composition of all
 
    /// types is made equal.
 
    fn apply_equal3_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId,
 
        &mut self, ctx: &Ctx, node_index: InferNodeIndex,
 
        arg1_index: InferNodeIndex, arg2_index: InferNodeIndex,
 
        start_idx: usize
 
    ) -> Result<(bool, bool, bool), ParseError> {
 
        // Safety: all points are unique
 
        // Safety: all indices are unique
 
        //         containers may not be modified
 
        let expr_expr_idx = ctx.heap[expr_id].get_unique_id_in_definition(); // TODO: @Temp
 
        let arg1_expr_idx = ctx.heap[arg1_id].get_unique_id_in_definition();
 
        let arg2_expr_idx = ctx.heap[arg2_id].get_unique_id_in_definition();
 

	
 
        let expr_type: *mut _ = &mut self.infer_nodes[expr_expr_idx as usize].expr_type;
 
        let arg1_type: *mut _ = &mut self.infer_nodes[arg1_expr_idx as usize].expr_type;
 
        let arg2_type: *mut _ = &mut self.infer_nodes[arg2_expr_idx as usize].expr_type;
 
        let expr_type: *mut _ = &mut self.infer_nodes[node_index].expr_type;
 
        let arg1_type: *mut _ = &mut self.infer_nodes[arg1_index].expr_type;
 
        let arg2_type: *mut _ = &mut self.infer_nodes[arg2_index].expr_type;
 

	
 
        let expr_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(expr_type, start_idx, arg1_type, start_idx)
 
        };
 
        if expr_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_expr_type_error(ctx, expr_id, arg1_id));
 
            return Err(self.construct_expr_type_error(ctx, node_index, arg1_index));
 
        }
 

	
 
        let args_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(arg1_type, start_idx, arg2_type, start_idx) };
 
        if args_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
            return Err(self.construct_arg_type_error(ctx, node_index, arg1_index, arg2_index));
 
        }
 

	
 
        // If all types are compatible, but the second call caused the arg1_type
 
@@ -3563,12 +4015,12 @@ impl PassTyping {
 
    }
 

	
 
    /// Determines the `InferenceType` for the expression based on the
 
    /// expression parent. Note that if the parent is another expression, we do
 
    /// not take special action, instead we let parent expressions fix the type
 
    /// of subexpressions before they have a chance to call this function.
 
    fn insert_initial_expr_inference_type(
 
    /// expression parent (this is not done if the parent is a regular 'ol
 
    /// expression). Expects `parent_index` to be set to the parent of the
 
    /// inference node that is created here.
 
    fn insert_initial_inference_node(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId
 
    ) -> Result<InferIndex, ParseError> {
 
    ) -> Result<InferNodeIndex, ParseError> {
 
        use ExpressionParent as EP;
 
        use InferenceTypePart as ITP;
 

	
 
@@ -3615,11 +4067,12 @@ impl PassTyping {
 
                InferenceType::new(false, true, vec![ITP::Void]),
 
        };
 

	
 
        let infer_index = self.infer_nodes.len() as InferIndex;
 
        let infer_index = self.infer_nodes.len() as InferNodeIndex;
 
        self.infer_nodes.push(InferenceNode {
 
            expr_type: inference_type,
 
            expr_id,
 
            field_or_monomorph_idx: -1,
 
            parent_index: self.parent_index,
 
            field_or_monomorph_index: -1,
 
            extra_data_idx: -1,
 
            type_id: TypeId::new_invalid(),
 
        });
 
@@ -3805,7 +4258,7 @@ impl PassTyping {
 
    /// arguments may be partially determined from embedded values in the union.
 
    fn insert_initial_union_polymorph_data(
 
        &mut self, ctx: &Ctx, lit_id: LiteralExpressionId
 
    ) {
 
    ) -> ExtraIndex {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_union();
 

	
 
@@ -3858,21 +4311,20 @@ impl PassTyping {
 
            embedded,
 
            returned: union_type
 
        });
 

	
 
        return extra_data_index;
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct. Assumes that the select
 
    /// expression's referenced (definition_id, field_idx) has been resolved.
 
    fn insert_initial_select_polymorph_data(
 
        &mut self, ctx: &Ctx, select_id: SelectExpressionId, struct_def_id: DefinitionId
 
        &mut self, ctx: &Ctx, node_index: InferNodeIndex, struct_def_id: DefinitionId
 
    ) -> ExtraIndex {
 
        use InferenceTypePart as ITP;
 

	
 
        // Retrieve relevant data
 
        let expr = &ctx.heap[select_id];
 
        let expr_type = &self.infer_nodes[expr.unique_id_in_definition as usize];
 
        let field_idx = expr_type.field_or_monomorph_idx as usize;
 

	
 
        let definition = ctx.heap[struct_def_id].as_struct();
 
        let node = &self.infer_nodes[node_index];
 
        let field_index = node.field_or_monomorph_index as usize;
 

	
 
        // Generate initial polyvar types and struct type
 
        // TODO: @Performance: we can immediately set the polyvars of the subject's struct type
 
@@ -3892,11 +4344,10 @@ impl PassTyping {
 
        debug_assert_eq!(struct_parts.len(), struct_parts_reserved);
 

	
 
        // Generate initial field type
 
        let field_type = self.determine_inference_type_from_parser_type_elements(&definition.fields[field_idx].parser_type.elements, false);
 
        let field_type = self.determine_inference_type_from_parser_type_elements(&definition.fields[field_index].parser_type.elements, false);
 

	
 
        let extra_data_index = self.extra_data.len() as ExtraIndex;
 
        self.extra_data.push(ExtraData{
 
            expr_id: select_id.upcast(),
 
            definition_id: struct_def_id,
 
            poly_vars,
 
            embedded: vec![InferenceType::new(num_poly_vars != 0, num_poly_vars == 0, struct_parts)],
 
@@ -4041,41 +4492,39 @@ impl PassTyping {
 
    /// But the expression type was already set due to our parent (e.g. an
 
    /// "if statement" or a "logical not" always expecting a boolean)
 
    fn construct_expr_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, arg_id: ExpressionId
 
        &self, ctx: &Ctx, expr_index: InferNodeIndex, arg_index: InferNodeIndex
 
    ) -> ParseError {
 
        // TODO: Expand and provide more meaningful information for humans
 
        let expr = &ctx.heap[expr_id];
 
        let arg_expr = &ctx.heap[arg_id];
 
        let expr_idx = expr.get_unique_id_in_definition();
 
        let arg_expr_idx = arg_expr.get_unique_id_in_definition();
 
        let expr_type = &self.infer_nodes[expr_idx as usize].expr_type;
 
        let arg_type = &self.infer_nodes[arg_expr_idx as usize].expr_type;
 
        let expr_node = &self.infer_nodes[expr_index];
 
        let arg_node = &self.infer_nodes[arg_index];
 

	
 
        let expr = &ctx.heap[expr_node.expr_id];
 
        let arg = &ctx.heap[arg_node.expr_id];
 

	
 
        return ParseError::new_error_at_span(
 
            &ctx.module().source, expr.operation_span(), format!(
 
                "incompatible types: this expression expected a '{}'",
 
                expr_type.display_name(&ctx.heap)
 
                expr_node.expr_type.display_name(&ctx.heap)
 
            )
 
        ).with_info_at_span(
 
            &ctx.module().source, arg_expr.full_span(), format!(
 
            &ctx.module().source, arg.full_span(), format!(
 
                "but this expression yields a '{}'",
 
                arg_type.display_name(&ctx.heap)
 
                arg_node.expr_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_arg_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId
 
        &self, ctx: &Ctx, expr_index: InferNodeIndex,
 
        arg1_index: InferNodeIndex, arg2_index: InferNodeIndex
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let arg1 = &ctx.heap[arg1_id];
 
        let arg2 = &ctx.heap[arg2_id];
 
        let arg1_node = &self.infer_nodes[arg1_index];
 
        let arg2_node = &self.infer_nodes[arg2_index];
 

	
 
        let arg1_idx = arg1.get_unique_id_in_definition();
 
        let arg1_type = &self.infer_nodes[arg1_idx as usize].expr_type;
 
        let arg2_idx = arg2.get_unique_id_in_definition();
 
        let arg2_type = &self.infer_nodes[arg2_idx as usize].expr_type;
 
        let expr_id = self.infer_nodes[expr_index].expr_id;
 
        let expr = &ctx.heap[expr_id];
 
        let arg1 = &ctx.heap[arg1_node.expr_id];
 
        let arg2 = &ctx.heap[arg2_node.expr_id];
 

	
 
        return ParseError::new_error_str_at_span(
 
            &ctx.module().source, expr.operation_span(),
 
@@ -4083,22 +4532,22 @@ impl PassTyping {
 
        ).with_info_at_span(
 
            &ctx.module().source, arg1.full_span(), format!(
 
                "Because this expression has type '{}'",
 
                arg1_type.display_name(&ctx.heap)
 
                arg1_node.expr_type.display_name(&ctx.heap)
 
            )
 
        ).with_info_at_span(
 
            &ctx.module().source, arg2.full_span(), format!(
 
                "But this expression has type '{}'",
 
                arg2_type.display_name(&ctx.heap)
 
                arg2_node.expr_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_template_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
        &self, ctx: &Ctx, node_index: InferNodeIndex, template: &[InferenceTypePart]
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let expr_idx = expr.get_unique_id_in_definition();
 
        let expr_type = &self.infer_nodes[expr_idx as usize].expr_type;
 
        let node = &self.infer_nodes[node_index];
 
        let expr = &ctx.heap[node.expr_id];
 
        let expr_type = &node.expr_type;
 

	
 
        return ParseError::new_error_at_span(
 
            &ctx.module().source, expr.full_span(), format!(
0 comments (0 inline, 0 general)