Changeset - 10d2e43f1350
[Not reviewed]
0 7 1
mh - 4 years ago 2021-05-20 11:19:56
contact@maxhenger.nl
WIP on poly rewrite
8 files changed with 399 insertions and 207 deletions:
0 comments (0 inline, 0 general)
src/collections/mod.rs
Show inline comments
 
mod string_pool;
 
mod scoped_buffer;
 
mod sets;
 

	
 

	
 
pub(crate) use string_pool::{StringPool, StringRef};
 
pub(crate) use scoped_buffer::{ScopedBuffer, ScopedSection};
 
\ No newline at end of file
 
pub(crate) use scoped_buffer::{ScopedBuffer, ScopedSection};
 
pub(crate) use sets::DequeSet;
 
\ No newline at end of file
src/collections/sets.rs
Show inline comments
 
new file 100644
 
use std::collections::VecDeque;
 

	
 
/// Simple double ended queue that ensures that all elements are unique. Queue
 
/// is not ordered.
 
pub struct DequeSet<T: Eq> {
 
    inner: VecDeque<T>,
 
}
 

	
 
impl<T: Eq> DequeSet<T> {
 
    pub fn new() -> Self {
 
        Self{ inner: VecDeque::new() }
 
    }
 

	
 
    #[inline]
 
    pub fn pop_front(&mut self) -> Option<T> {
 
        self.inner.pop_front()
 
    }
 

	
 
    #[inline]
 
    pub fn pop_back(&mut self) -> Option<T> {
 
        self.inner.pop_back()
 
    }
 

	
 
    #[inline]
 
    pub fn push_back(&mut self, to_push: T) {
 
        for element in self.inner.iter() {
 
            if *element == to_push {
 
                return;
 
            }
 
        }
 

	
 
        self.inner.push_back(to_push);
 
    }
 

	
 
    #[inline]
 
    pub fn push_front(&mut self, to_push: T) {
 
        for element in self.inner.iter() {
 
            if *element == to_push {
 
                return;
 
            }
 
        }
 

	
 
        self.inner.push_front(to_push);
 
    }
 

	
 
    #[inline]
 
    pub fn is_empty(&self) -> bool {
 
        self.inner.is_empty()
 
    }
 
}
 
\ No newline at end of file
src/protocol/ast.rs
Show inline comments
 
@@ -375,14 +375,14 @@ pub enum ParserTypeVariant {
 
    Inferred,
 
    // Builtins expecting one subsequent type
 
    Array,
 
    Input,
 
    Output,
 
    // User-defined types
 
    PolymorphicArgument(DefinitionId, usize), // usize = index into polymorphic variables
 
    Definition(DefinitionId, usize), // usize = number of following subtypes
 
    PolymorphicArgument(DefinitionId, u32), // u32 = index into polymorphic variables
 
    Definition(DefinitionId, u32), // u32 = number of following subtypes
 
}
 

	
 
impl ParserTypeVariant {
 
    pub(crate) fn num_embedded(&self) -> usize {
 
        use ParserTypeVariant::*;
 

	
 
@@ -484,16 +484,12 @@ pub enum SymbolicParserTypeVariant {
 
}
 

	
 
/// ConcreteType is the representation of a type after resolving symbolic types
 
/// and performing type inference
 
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
 
pub enum ConcreteTypePart {
 
    // Markers for the use of polymorphic types within a procedure's body that
 
    // refer to polymorphic variables on the procedure's definition. Different
 
    // from markers in the `InferenceType`, these will not contain nested types.
 
    Marker(usize),
 
    // Special types (cannot be explicitly constructed by the programmer)
 
    Void,
 
    // Builtin types without nested types
 
    Message,
 
    Bool,
 
    UInt8, UInt16, UInt32, UInt64,
 
@@ -502,13 +498,13 @@ pub enum ConcreteTypePart {
 
    // Builtin types with one nested type
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // User defined type with any number of nested types
 
    Instance(DefinitionId, usize),
 
    Instance(DefinitionId, u32),
 
}
 

	
 
#[derive(Debug, Clone, Eq, PartialEq)]
 
pub struct ConcreteType {
 
    pub(crate) parts: Vec<ConcreteTypePart>
 
}
 
@@ -923,50 +919,56 @@ pub enum ComponentVariant {
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ComponentDefinition {
 
    pub this: ComponentDefinitionId,
 
    pub defined_in: RootId,
 
    // Phase 1: symbol scanning
 
    // Symbol scanning
 
    pub span: InputSpan,
 
    pub variant: ComponentVariant,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Phase 2: parsing
 
    // Parsing
 
    pub parameters: Vec<VariableId>,
 
    pub body: BlockStatementId,
 
    // Validation/linking
 
    pub num_expressions_in_body: i32,
 
}
 

	
 
impl ComponentDefinition {
 
    // Used for preallocation during symbol scanning
 
    pub(crate) fn new_empty(
 
        this: ComponentDefinitionId, defined_in: RootId, span: InputSpan,
 
        variant: ComponentVariant, identifier: Identifier, poly_vars: Vec<Identifier>
 
    ) -> Self {
 
        Self{ 
 
            this, defined_in, span, variant, identifier, poly_vars,
 
            parameters: Vec::new(), 
 
            body: BlockStatementId::new_invalid()
 
            body: BlockStatementId::new_invalid(),
 
            num_expressions_in_body: -1,
 
        }
 
    }
 
}
 

	
 
// Note that we will have function definitions for builtin functions as well. In
 
// that case the span, the identifier span and the body are all invalid.
 
#[derive(Debug, Clone)]
 
pub struct FunctionDefinition {
 
    pub this: FunctionDefinitionId,
 
    pub defined_in: RootId,
 
    // Phase 1: symbol scanning
 
    // Symbol scanning
 
    pub builtin: bool,
 
    pub span: InputSpan,
 
    pub identifier: Identifier,
 
    pub poly_vars: Vec<Identifier>,
 
    // Phase 2: parsing
 
    // Parser
 
    pub return_types: Vec<ParserType>,
 
    pub parameters: Vec<VariableId>,
 
    pub body: BlockStatementId,
 
    // Validation/linking
 
    pub num_expressions_in_body: i32,
 
}
 

	
 
impl FunctionDefinition {
 
    pub(crate) fn new_empty(
 
        this: FunctionDefinitionId, defined_in: RootId, span: InputSpan,
 
        identifier: Identifier, poly_vars: Vec<Identifier>
 
@@ -975,12 +977,13 @@ impl FunctionDefinition {
 
            this, defined_in,
 
            builtin: false,
 
            span, identifier, poly_vars,
 
            return_types: Vec::new(),
 
            parameters: Vec::new(),
 
            body: BlockStatementId::new_invalid(),
 
            num_expressions_in_body: -1,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Statement {
 
@@ -1598,12 +1601,28 @@ impl Expression {
 
            Expression::Select(expr) => &mut expr.concrete_type,
 
            Expression::Literal(expr) => &mut expr.concrete_type,
 
            Expression::Call(expr) => &mut expr.concrete_type,
 
            Expression::Variable(expr) => &mut expr.concrete_type,
 
        }
 
    }
 

	
 
    pub fn get_unique_id_in_definition(&self) -> i32 {
 
        match self {
 
            Expression::Assignment(expr) => expr.unique_id_in_definition,
 
            Expression::Binding(expr) => expr.unique_id_in_definition,
 
            Expression::Conditional(expr) => expr.unique_id_in_definition,
 
            Expression::Binary(expr) => expr.unique_id_in_definition,
 
            Expression::Unary(expr) => expr.unique_id_in_definition,
 
            Expression::Indexing(expr) => expr.unique_id_in_definition,
 
            Expression::Slicing(expr) => expr.unique_id_in_definition,
 
            Expression::Select(expr) => expr.unique_id_in_definition,
 
            Expression::Literal(expr) => expr.unique_id_in_definition,
 
            Expression::Call(expr) => expr.unique_id_in_definition,
 
            Expression::Variable(expr) => expr.unique_id_in_definition,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy)]
 
pub enum AssignmentOperator {
 
    Set,
 
    Multiplied,
 
@@ -1625,12 +1644,13 @@ pub struct AssignmentExpression {
 
    pub span: InputSpan, // of the operator
 
    pub left: ExpressionId,
 
    pub operation: AssignmentOperator,
 
    pub right: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
    // Typing
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct BindingExpression {
 
@@ -1638,12 +1658,13 @@ pub struct BindingExpression {
 
    // Parsing
 
    pub span: InputSpan,
 
    pub left: LiteralExpressionId,
 
    pub right: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
    // Typing
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct ConditionalExpression {
 
@@ -1652,12 +1673,13 @@ pub struct ConditionalExpression {
 
    pub span: InputSpan, // of question mark operator
 
    pub test: ExpressionId,
 
    pub true_expression: ExpressionId,
 
    pub false_expression: ExpressionId,
 
    // Validator/Linking
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
    // Typing
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
 
pub enum BinaryOperator {
 
@@ -1689,12 +1711,13 @@ pub struct BinaryExpression {
 
    pub span: InputSpan, // of the operator
 
    pub left: ExpressionId,
 
    pub operation: BinaryOperator,
 
    pub right: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
    // Typing
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
 
pub enum UnaryOperator {
 
@@ -1714,12 +1737,13 @@ pub struct UnaryExpression {
 
    // Parsing
 
    pub span: InputSpan, // of the operator
 
    pub operation: UnaryOperator,
 
    pub expression: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
    // Typing
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct IndexingExpression {
 
@@ -1727,12 +1751,13 @@ pub struct IndexingExpression {
 
    // Parsing
 
    pub span: InputSpan,
 
    pub subject: ExpressionId,
 
    pub index: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
    // Typing
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SlicingExpression {
 
@@ -1741,12 +1766,13 @@ pub struct SlicingExpression {
 
    pub span: InputSpan, // from '[' to ']';
 
    pub subject: ExpressionId,
 
    pub from_index: ExpressionId,
 
    pub to_index: ExpressionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
    // Typing
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct SelectExpression {
 
@@ -1754,12 +1780,13 @@ pub struct SelectExpression {
 
    // Parsing
 
    pub span: InputSpan, // of the '.'
 
    pub subject: ExpressionId,
 
    pub field: Field,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
    // Typing
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct CallExpression {
 
@@ -1769,12 +1796,13 @@ pub struct CallExpression {
 
    pub parser_type: ParserType, // of the function call, not the return type
 
    pub method: Method,
 
    pub arguments: Vec<ExpressionId>,
 
    pub definition: DefinitionId,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
    // Typing
 
    pub concrete_type: ConcreteType, // of the return type
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq)]
 
pub enum Method {
 
@@ -1800,12 +1828,13 @@ pub struct LiteralExpression {
 
    pub this: LiteralExpressionId,
 
    // Parsing
 
    pub span: InputSpan,
 
    pub value: Literal,
 
    // Validator/Linker
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
    // Typing
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub enum Literal {
 
@@ -1904,9 +1933,10 @@ pub struct VariableExpression {
 
    pub this: VariableExpressionId,
 
    // Parsing
 
    pub identifier: Identifier,
 
    // Validator/Linker
 
    pub declaration: Option<VariableId>,
 
    pub parent: ExpressionParent,
 
    pub unique_id_in_definition: i32,
 
    // Typing
 
    pub concrete_type: ConcreteType,
 
}
 
\ No newline at end of file
src/protocol/parser/pass_definitions.rs
Show inline comments
 
@@ -5,13 +5,13 @@ use super::tokens::*;
 
use super::token_parsing::*;
 
use crate::protocol::input_source::{InputSource as InputSource, InputPosition as InputPosition, InputSpan, ParseError};
 
use crate::collections::*;
 

	
 
/// Parses all the tokenized definitions into actual AST nodes.
 
pub(crate) struct PassDefinitions {
 
    // State
 
    // State associated with the definition currently being processed
 
    cur_definition: DefinitionId,
 
    // Temporary buffers of various kinds
 
    buffer: String,
 
    struct_fields: ScopedBuffer<StructFieldDefinition>,
 
    enum_variants: ScopedBuffer<EnumVariantDefinition>,
 
    union_variants: ScopedBuffer<UnionVariantDefinition>,
 
@@ -244,12 +244,13 @@ impl PassDefinitions {
 
        Ok(())
 
    }
 

	
 
    fn visit_function_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        // Retrieve function name
 
        consume_exact_ident(&module.source, iter, KW_FUNCTION)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
@@ -298,12 +299,13 @@ impl PassDefinitions {
 
        Ok(())
 
    }
 

	
 
    fn visit_component_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        // Consume component variant and name
 
        let (_variant_text, _) = consume_any_ident(&module.source, iter)?;
 
        debug_assert!(_variant_text == KW_PRIMITIVE || _variant_text == KW_COMPOSITE);
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated definition
 
        let module_scope = SymbolScope::Module(module.root_id);
 
@@ -832,21 +834,23 @@ impl PassDefinitions {
 
                // Allocate the initial assignment
 
                let variable_expr_id = ctx.heap.alloc_variable_expression(|this| VariableExpression{
 
                    this,
 
                    identifier,
 
                    declaration: None,
 
                    parent: ExpressionParent::None,
 
                    unique_id_in_definition: -1,
 
                    concrete_type: Default::default()
 
                });
 
                let assignment_expr_id = ctx.heap.alloc_assignment_expression(|this| AssignmentExpression{
 
                    this,
 
                    span: assign_span,
 
                    left: variable_expr_id.upcast(),
 
                    operation: AssignmentOperator::Set,
 
                    right: initial_expr_id,
 
                    parent: ExpressionParent::None,
 
                    unique_id_in_definition: -1,
 
                    concrete_type: Default::default(),
 
                });
 
                let assignment_stmt_id = ctx.heap.alloc_expression_statement(|this| ExpressionStatement{
 
                    this,
 
                    span: InputSpan::from_positions(initial_expr_begin_pos, initial_expr_end_pos),
 
                    expression: assignment_expr_id.upcast(),
 
@@ -927,12 +931,13 @@ impl PassDefinitions {
 
            let left = expr;
 
            let right = self.consume_expression(module, iter, ctx)?;
 

	
 
            Ok(ctx.heap.alloc_assignment_expression(|this| AssignmentExpression{
 
                this, span, left, operation, right,
 
                parent: ExpressionParent::None,
 
                unique_id_in_definition: -1,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast())
 
        } else {
 
            Ok(expr)
 
        }
 
    }
 
@@ -949,12 +954,13 @@ impl PassDefinitions {
 
            let true_expression = self.consume_expression(module, iter, ctx)?;
 
            consume_token(&module.source, iter, TokenKind::Colon)?;
 
            let false_expression = self.consume_expression(module, iter, ctx)?;
 
            Ok(ctx.heap.alloc_conditional_expression(|this| ConditionalExpression{
 
                this, span, test, true_expression, false_expression,
 
                parent: ExpressionParent::None,
 
                unique_id_in_definition: -1,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast())
 
        } else {
 
            Ok(result)
 
        }
 
    }
 
@@ -1132,12 +1138,13 @@ impl PassDefinitions {
 
            iter.consume();
 

	
 
            let expression = self.consume_prefix_expression(module, iter, ctx)?;
 
            Ok(ctx.heap.alloc_unary_expression(|this| UnaryExpression {
 
                this, span, operation, expression,
 
                parent: ExpressionParent::None,
 
                unique_id_in_definition: -1,
 
                concrete_type: ConcreteType::default()
 
            }).upcast())
 
        } else {
 
            self.consume_postfix_expression(module, iter, ctx)
 
        }
 
    }
 
@@ -1165,20 +1172,22 @@ impl PassDefinitions {
 
            if token == TokenKind::PlusPlus {
 
                result = ctx.heap.alloc_unary_expression(|this| UnaryExpression{
 
                    this, span,
 
                    operation: UnaryOperator::PostIncrement,
 
                    expression: result,
 
                    parent: ExpressionParent::None,
 
                    unique_id_in_definition: -1,
 
                    concrete_type: ConcreteType::default()
 
                }).upcast();
 
            } else if token == TokenKind::MinusMinus {
 
                result = ctx.heap.alloc_unary_expression(|this| UnaryExpression{
 
                    this, span,
 
                    operation: UnaryOperator::PostDecrement,
 
                    expression: result,
 
                    parent: ExpressionParent::None,
 
                    unique_id_in_definition: -1,
 
                    concrete_type: ConcreteType::default()
 
                }).upcast();
 
            } else if token == TokenKind::OpenSquare {
 
                let subject = result;
 
                let from_index = self.consume_expression(module, iter, ctx)?;
 

	
 
@@ -1191,22 +1200,24 @@ impl PassDefinitions {
 
                    let end_span = consume_token(&module.source, iter, TokenKind::CloseSquare)?;
 
                    span.end = end_span.end;
 

	
 
                    result = ctx.heap.alloc_slicing_expression(|this| SlicingExpression{
 
                        this, span, subject, from_index, to_index,
 
                        parent: ExpressionParent::None,
 
                        unique_id_in_definition: -1,
 
                        concrete_type: ConcreteType::default()
 
                    }).upcast();
 
                } else if Some(TokenKind::CloseSquare) == next {
 
                    let end_span = consume_token(&module.source, iter, TokenKind::CloseSquare)?;
 
                    span.end = end_span.end;
 

	
 
                    result = ctx.heap.alloc_indexing_expression(|this| IndexingExpression{
 
                        this, span, subject,
 
                        index: from_index,
 
                        parent: ExpressionParent::None,
 
                        unique_id_in_definition: -1,
 
                        concrete_type: ConcreteType::default()
 
                    }).upcast();
 
                } else {
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        &module.source, iter.last_valid_pos(), "unexpected token: expected ']' or '..'"
 
                    ));
 
@@ -1223,12 +1234,13 @@ impl PassDefinitions {
 
                    Field::Symbolic(FieldSymbolic{ identifier, definition: None, field_idx: 0 })
 
                };
 

	
 
                result = ctx.heap.alloc_select_expression(|this| SelectExpression{
 
                    this, span, subject, field,
 
                    parent: ExpressionParent::None,
 
                    unique_id_in_definition: -1,
 
                    concrete_type: ConcreteType::default()
 
                }).upcast();
 
            }
 

	
 
            next = iter.next();
 
        }
 
@@ -1260,40 +1272,44 @@ impl PassDefinitions {
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this,
 
                span: InputSpan::from_positions(start_pos, end_pos),
 
                value: Literal::Array(scoped_section.into_vec()),
 
                parent: ExpressionParent::None,
 
                unique_id_in_definition: -1,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast()
 
        } else if next == Some(TokenKind::Integer) {
 
            let (literal, span) = consume_integer_literal(&module.source, iter, &mut self.buffer)?;
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this, span,
 
                value: Literal::Integer(LiteralInteger{ unsigned_value: literal, negated: false }),
 
                parent: ExpressionParent::None,
 
                unique_id_in_definition: -1,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast()
 
        } else if next == Some(TokenKind::String) {
 
            let span = consume_string_literal(&module.source, iter, &mut self.buffer)?;
 
            let interned = ctx.pool.intern(self.buffer.as_bytes());
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this, span,
 
                value: Literal::String(interned),
 
                parent: ExpressionParent::None,
 
                unique_id_in_definition: -1,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast()
 
        } else if next == Some(TokenKind::Character) {
 
            let (character, span) = consume_character_literal(&module.source, iter)?;
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this, span,
 
                value: Literal::Character(character),
 
                parent: ExpressionParent::None,
 
                unique_id_in_definition: -1,
 
                concrete_type: ConcreteType::default(),
 
            }).upcast()
 
        } else if next == Some(TokenKind::Ident) {
 
            // May be a variable, a type instantiation or a function call. If we
 
            // have a single identifier that we cannot find in the type table
 
            // then we're going to assume that we're dealing with a variable.
 
@@ -1339,12 +1355,13 @@ impl PassDefinitions {
 
                                    value: Literal::Struct(LiteralStruct{
 
                                        parser_type,
 
                                        fields: struct_fields,
 
                                        definition: target_definition_id,
 
                                    }),
 
                                    parent: ExpressionParent::None,
 
                                    unique_id_in_definition: -1,
 
                                    concrete_type: ConcreteType::default(),
 
                                }).upcast()
 
                            },
 
                            Definition::Enum(_) => {
 
                                // Enum literal: consume the variant
 
                                consume_token(&module.source, iter, TokenKind::ColonColon)?;
 
@@ -1357,12 +1374,13 @@ impl PassDefinitions {
 
                                        parser_type,
 
                                        variant,
 
                                        definition: target_definition_id,
 
                                        variant_idx: 0
 
                                    }),
 
                                    parent: ExpressionParent::None,
 
                                    unique_id_in_definition: -1,
 
                                    concrete_type: ConcreteType::default()
 
                                }).upcast()
 
                            },
 
                            Definition::Union(_) => {
 
                                // Union literal: consume the variant
 
                                consume_token(&module.source, iter, TokenKind::ColonColon)?;
 
@@ -1382,12 +1400,13 @@ impl PassDefinitions {
 
                                    value: Literal::Union(LiteralUnion{
 
                                        parser_type, variant, values,
 
                                        definition: target_definition_id,
 
                                        variant_idx: 0,
 
                                    }),
 
                                    parent: ExpressionParent::None,
 
                                    unique_id_in_definition: -1,
 
                                    concrete_type: ConcreteType::default()
 
                                }).upcast()
 
                            },
 
                            Definition::Component(_) => {
 
                                // Component instantiation
 
                                let arguments = self.consume_expression_list(module, iter, ctx, None)?;
 
@@ -1397,12 +1416,13 @@ impl PassDefinitions {
 
                                    span: parser_type.elements[0].full_span, // TODO: @Span fix
 
                                    parser_type,
 
                                    method: Method::UserComponent,
 
                                    arguments,
 
                                    definition: target_definition_id,
 
                                    parent: ExpressionParent::None,
 
                                    unique_id_in_definition: -1,
 
                                    concrete_type: ConcreteType::default(),
 
                                }).upcast()
 
                            },
 
                            Definition::Function(function_definition) => {
 
                                // Check whether it is a builtin function
 
                                let method = if function_definition.builtin {
 
@@ -1427,12 +1447,13 @@ impl PassDefinitions {
 
                                    span: parser_type.elements[0].full_span, // TODO: @Span fix
 
                                    parser_type,
 
                                    method,
 
                                    arguments,
 
                                    definition: target_definition_id,
 
                                    parent: ExpressionParent::None,
 
                                    unique_id_in_definition: -1,
 
                                    concrete_type: ConcreteType::default(),
 
                                }).upcast()
 
                            }
 
                        }
 
                    },
 
                    _ => {
 
@@ -1458,12 +1479,13 @@ impl PassDefinitions {
 

	
 
                    ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                        this,
 
                        span: ident_span,
 
                        value,
 
                        parent: ExpressionParent::None,
 
                        unique_id_in_definition: -1,
 
                        concrete_type: ConcreteType::default(),
 
                    }).upcast()
 
                } else {
 
                    // Not a builtin literal, but also not a known type. So we
 
                    // assume it is a variable expression. Although if we do,
 
                    // then if a programmer mistyped a struct/function name the
 
@@ -1492,12 +1514,13 @@ impl PassDefinitions {
 

	
 
                    ctx.heap.alloc_variable_expression(|this| VariableExpression {
 
                        this,
 
                        identifier,
 
                        declaration: None,
 
                        parent: ExpressionParent::None,
 
                        unique_id_in_definition: -1,
 
                        concrete_type: ConcreteType::default()
 
                    }).upcast()
 
                }
 
            }
 
        } else {
 
            return Err(ParseError::new_error_str_at_pos(
 
@@ -1527,12 +1550,13 @@ impl PassDefinitions {
 
            let left = result;
 
            let right = higher_precedence_fn(self, module, iter, ctx)?;
 

	
 
            result = ctx.heap.alloc_binary_expression(|this| BinaryExpression{
 
                this, span, left, operation, right,
 
                parent: ExpressionParent::None,
 
                unique_id_in_definition: -1,
 
                concrete_type: ConcreteType::default()
 
            }).upcast();
 
        }
 

	
 
        Ok(result)
 
    }
 
@@ -1854,13 +1878,13 @@ fn consume_parser_type_ident(
 
        },
 
        _ => {
 
            // Must be some kind of symbolic type
 
            let mut type_kind = None;
 
            for (poly_idx, poly_var) in poly_vars.iter().enumerate() {
 
                if poly_var.value.as_bytes() == type_text {
 
                    type_kind = Some(PTV::PolymorphicArgument(wrapping_definition, poly_idx));
 
                    type_kind = Some(PTV::PolymorphicArgument(wrapping_definition, poly_idx as u32));
 
                }
 
            }
 

	
 
            if type_kind.is_none() {
 
                // Check symbol table for definition. To be fair, the language
 
                // only allows a single namespace for now. That said:
 
@@ -1910,13 +1934,13 @@ fn consume_parser_type_ident(
 
                            }
 

	
 
                            last_symbol = new_symbol.unwrap();
 
                        },
 
                        SymbolVariant::Definition(symbol_definition) => {
 
                            let num_poly_vars = heap[symbol_definition.definition_id].poly_vars().len();
 
                            type_kind = Some(PTV::Definition(symbol_definition.definition_id, num_poly_vars));
 
                            type_kind = Some(PTV::Definition(symbol_definition.definition_id, num_poly_vars as u32));
 
                            break;
 
                        }
 
                    }
 
                }
 
            }
 

	
src/protocol/parser/pass_typing.rs
Show inline comments
 
@@ -57,12 +57,13 @@ macro_rules! debug_log {
 
        enabled_debug_print!(false, "types", $format, $($args),*);
 
    };
 
}
 

	
 
use std::collections::{HashMap, HashSet};
 

	
 
use crate::collections::DequeSet;
 
use crate::protocol::ast::*;
 
use crate::protocol::input_source::ParseError;
 
use crate::protocol::parser::ModuleCompilationPhase;
 
use crate::protocol::parser::type_table::*;
 
use crate::protocol::parser::token_parsing::*;
 
use super::visitor::{
 
@@ -79,25 +80,26 @@ const MESSAGE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Message, I
 
const BOOL_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Bool ];
 
const CHARACTER_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Character ];
 
const STRING_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::String ];
 
const NUMBERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::NumberLike ];
 
const INTEGERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::IntegerLike ];
 
const ARRAY_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Array, InferenceTypePart::Unknown ];
 
const SLICE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Slice, InferenceTypePart::Unknown ];
 
const ARRAYLIKE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::ArrayLike, InferenceTypePart::Unknown ];
 

	
 
/// TODO: @performance Turn into PartialOrd+Ord to simplify checks
 
/// TODO: @types Remove the Message -> Byte hack at some point...
 
#[derive(Debug, Clone, Eq, PartialEq)]
 
pub(crate) enum InferenceTypePart {
 
    // A marker with an identifier which we can use to retrieve the type subtree
 
    // that follows the marker. This is used to perform type inference on
 
    // polymorphs: an expression may determine the polymorphs type, after we
 
    // need to apply that information to all other places where the polymorph is
 
    // used.
 
    MarkerDefinition(usize), // marker for polymorph types on a procedure's definition
 
    MarkerBody(usize), // marker for polymorph types within a procedure body
 
    // When we infer types of AST elements that support polymorphic arguments,
 
    // then we might have the case that multiple embedded types depend on the
 
    // polymorphic type (e.g. func bla(T a, T[] b) -> T[][]). If we can infer
 
    // the type in one place (e.g. argument a), then we may propagate this
 
    // information to other types (e.g. argument b and the return type). For
 
    // this reason we place markers in the `InferenceType` instances such that
 
    // we know which part of the type was originally a polymorphic argument.
 
    Marker(u32),
 
    // Completely unknown type, needs to be inferred
 
    Unknown,
 
    // Partially known type, may be inferred to to be the appropriate related 
 
    // type.
 
    // IndexLike,      // index into array/slice
 
    NumberLike,     // any kind of integer/float
 
@@ -122,21 +124,19 @@ pub(crate) enum InferenceTypePart {
 
    Message,
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // A user-defined type with any number of subtypes
 
    Instance(DefinitionId, usize)
 
    Instance(DefinitionId, u32)
 
}
 

	
 
impl InferenceTypePart {
 
    fn is_marker(&self) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        match self {
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) => true,
 
            InferenceTypePart::Marker(_) => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    /// Checks if the type is concrete, markers are interpreted as concrete
 
    /// types.
 
@@ -157,14 +157,15 @@ impl InferenceTypePart {
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_integer(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {ITP::UInt8 | ITP::UInt16 | ITP::UInt32 | ITP::UInt64 |
 
        ITP::SInt8 | ITP::SInt16 | ITP::SInt32 | ITP::SInt64 => true,
 
        match self {
 
            ITP::UInt8 | ITP::UInt16 | ITP::UInt32 | ITP::UInt64 |
 
            ITP::SInt8 | ITP::SInt16 | ITP::SInt32 | ITP::SInt64 => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_msg_array_or_slice(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
@@ -204,13 +205,13 @@ impl InferenceTypePart {
 
            ITP::Void | ITP::Bool |
 
            ITP::UInt8 | ITP::UInt16 | ITP::UInt32 | ITP::UInt64 |
 
            ITP::SInt8 | ITP::SInt16 | ITP::SInt32 | ITP::SInt64 |
 
            ITP::Character | ITP::String => {
 
                -1
 
            },
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) |
 
            ITP::Marker(_) |
 
            ITP::ArrayLike | ITP::Message | ITP::Array | ITP::Slice |
 
            ITP::PortLike | ITP::Input | ITP::Output => {
 
                // One subtype, so do not modify depth
 
                0
 
            },
 
            ITP::Instance(_, num_args) => {
 
@@ -223,15 +224,12 @@ impl InferenceTypePart {
 
impl From<ConcreteTypePart> for InferenceTypePart {
 
    fn from(v: ConcreteTypePart) -> InferenceTypePart {
 
        use ConcreteTypePart as CTP;
 
        use InferenceTypePart as ITP;
 

	
 
        match v {
 
            CTP::Marker(_) => {
 
                unreachable!("encountered marker while converting concrete type to inferred type");
 
            }
 
            CTP::Void => ITP::Void,
 
            CTP::Message => ITP::Message,
 
            CTP::Bool => ITP::Bool,
 
            CTP::UInt8 => ITP::UInt8,
 
            CTP::UInt16 => ITP::UInt16,
 
            CTP::UInt32 => ITP::UInt32,
 
@@ -250,31 +248,29 @@ impl From<ConcreteTypePart> for InferenceTypePart {
 
        }
 
    }
 
}
 

	
 
#[derive(Debug)]
 
struct InferenceType {
 
    has_body_marker: bool,
 
    has_marker: bool,
 
    is_done: bool,
 
    parts: Vec<InferenceTypePart>,
 
}
 

	
 
impl InferenceType {
 
    /// Generates a new InferenceType. The two boolean flags will be checked in
 
    /// debug mode.
 
    fn new(has_body_marker: bool, is_done: bool, parts: Vec<InferenceTypePart>) -> Self {
 
    fn new(has_marker: bool, is_done: bool, parts: Vec<InferenceTypePart>) -> Self {
 
        if cfg!(debug_assertions) {
 
            debug_assert!(!parts.is_empty());
 
            let parts_body_marker = parts.iter().any(|v| {
 
                if let InferenceTypePart::MarkerBody(_) = v { true } else { false }
 
            });
 
            debug_assert_eq!(has_body_marker, parts_body_marker);
 
            let parts_body_marker = parts.iter().any(|v| v.is_marker());
 
            debug_assert_eq!(has_marker, parts_body_marker);
 
            let parts_done = parts.iter().all(|v| v.is_concrete());
 
            debug_assert_eq!(is_done, parts_done, "{:?}", parts);
 
        }
 
        Self{ has_body_marker, is_done, parts }
 
        Self{ has_marker, is_done, parts }
 
    }
 

	
 
    /// Replaces a type subtree with the provided subtree. The caller must make
 
    /// sure the the replacement is a well formed type subtree.
 
    fn replace_subtree(&mut self, start_idx: usize, with: &[InferenceTypePart]) {
 
        let end_idx = Self::find_subtree_end_idx(&self.parts, start_idx);
 
@@ -288,13 +284,13 @@ impl InferenceType {
 
        self.is_done = self.parts.iter().all(|v| v.is_concrete());
 
    }
 

	
 
    /// Seeks a body marker starting at the specified position. If a marker is
 
    /// found then its value and the index of the type subtree that follows it
 
    /// is returned.
 
    fn find_body_marker(&self, mut start_idx: usize) -> Option<(usize, usize)> {
 
    fn find_marker(&self, mut start_idx: usize) -> Option<(usize, usize)> {
 
        while start_idx < self.parts.len() {
 
            if let InferenceTypePart::MarkerBody(marker) = &self.parts[start_idx] {
 
                return Some((*marker, start_idx + 1))
 
            }
 

	
 
            start_idx += 1;
 
@@ -303,13 +299,13 @@ impl InferenceType {
 
        None
 
    }
 

	
 
    /// Returns an iterator over all body markers and the partial type tree that
 
    /// follows those markers. If it is a problem that `InferenceType` is 
 
    /// borrowed by the iterator, then use `find_body_marker`.
 
    fn body_marker_iter(&self) -> InferenceTypeMarkerIter {
 
    fn marker_iter(&self) -> InferenceTypeMarkerIter {
 
        InferenceTypeMarkerIter::new(&self.parts)
 
    }
 

	
 
    /// Given that the `parts` are a depth-first serialized tree of types, this
 
    /// function finds the subtree anchored at a specific node. The returned 
 
    /// index is exclusive.
 
@@ -348,62 +344,38 @@ impl InferenceType {
 
    ) -> Option<i32> {
 
        use InferenceTypePart as ITP;
 

	
 
        let to_infer_part = &to_infer.parts[*to_infer_idx];
 
        let template_part = &template_parts[*template_idx];
 

	
 
        // TODO: Maybe do this differently?
 
        let mut template_definition_marker = None;
 
        if *template_idx > 0 {
 
            if let ITP::MarkerDefinition(marker) = &template_parts[*template_idx - 1] {
 
                template_definition_marker = Some(*marker)
 
            }
 
        }
 

	
 
        // Check for programmer mistakes
 
        debug_assert_ne!(to_infer_part, template_part);
 
        debug_assert!(!to_infer_part.is_marker(), "marker encountered in 'infer part'");
 
        debug_assert!(!template_part.is_marker(), "marker encountered in 'template part'");
 

	
 
        // Inference of a somewhat-specified type
 
        if to_infer_part.may_be_inferred_from(template_part) {
 
            let depth_change = to_infer_part.depth_change();
 
            debug_assert_eq!(depth_change, template_part.depth_change());
 

	
 
            if let Some(marker) = template_definition_marker {
 
                to_infer.parts.insert(*to_infer_idx, ITP::MarkerDefinition(marker));
 
                *to_infer_idx += 1;
 
            }
 

	
 
            to_infer.parts[*to_infer_idx] = template_part.clone();
 

	
 
            *to_infer_idx += 1;
 
            *template_idx += 1;
 
            return Some(depth_change);
 
        }
 

	
 
        // Inference of a completely unknown type
 
        if *to_infer_part == ITP::Unknown {
 
            // template part is different, so cannot be unknown, hence copy the
 
            // entire subtree. Make sure to copy definition markers, but not to
 
            // transfer polymorph markers.
 
            // entire subtree. Make sure not to copy markers.
 
            let template_end_idx = Self::find_subtree_end_idx(template_parts, *template_idx);
 
            let template_start_idx = if let Some(marker) = template_definition_marker {
 
                to_infer.parts[*to_infer_idx] = ITP::MarkerDefinition(marker);
 
                *template_idx
 
            } else {
 
                to_infer.parts[*to_infer_idx] = template_parts[*template_idx].clone();
 
                *template_idx + 1
 
            };
 
            *to_infer_idx += 1;
 

	
 
            for template_idx in template_start_idx..template_end_idx {
 
            for template_idx in *template_idx..template_end_idx {
 
                let template_part = &template_parts[template_idx];
 
                if let ITP::MarkerBody(_) = template_part {
 
                    // Do not copy this one
 
                } else {
 
                if !template_part.is_marker() {
 
                    to_infer.parts.insert(*to_infer_idx, template_part.clone());
 
                    *to_infer_idx += 1;
 
                }
 
            }
 
            *template_idx = template_end_idx;
 

	
 
@@ -493,13 +465,13 @@ impl InferenceType {
 
            if let Some(depth_change) = Self::infer_part_for_single_type(type_b, &mut idx_b, &type_a.parts, &mut idx_a) {
 
                depth += depth_change;
 
                modified_b = true;
 
                continue;
 
            }
 

	
 
            // And can also not be inferred in any way: types must be incompatible
 
            // Types can not be inferred in any way: types must be incompatible
 
            return DualInferenceResult::Incompatible;
 
        }
 

	
 
        if modified_a { type_a.recompute_is_done(); }
 
        if modified_b { type_b.recompute_is_done(); }
 

	
 
@@ -512,13 +484,12 @@ impl InferenceType {
 
        }
 
    }
 

	
 
    /// Attempts to infer the first subtree based on the template. Like
 
    /// `infer_subtrees_for_both_types`, but now only applying inference to
 
    /// `to_infer` based on the type information in `template`.
 
    /// Secondary use is to make sure that a type follows a certain template.
 
    fn infer_subtree_for_single_type(
 
        to_infer: &mut InferenceType, mut to_infer_idx: usize,
 
        template: &[InferenceTypePart], mut template_idx: usize,
 
    ) -> SingleInferenceResult {
 
        let mut modified = false;
 
        let mut depth = 1;
 
@@ -557,17 +528,17 @@ impl InferenceType {
 
                continue;
 
            }
 

	
 
            return SingleInferenceResult::Incompatible
 
        }
 

	
 
        return if modified {
 
        if modified {
 
            to_infer.recompute_is_done();
 
            SingleInferenceResult::Modified
 
            return SingleInferenceResult::Modified;
 
        } else {
 
            SingleInferenceResult::Unmodified
 
            return SingleInferenceResult::Unmodified;
 
        }
 
    }
 

	
 
    /// Checks if both types are compatible, doesn't perform any inference
 
    fn check_subtrees(
 
        type_parts_a: &[InferenceTypePart], start_idx_a: usize,
 
@@ -612,45 +583,35 @@ impl InferenceType {
 
        true
 
    }
 

	
 
    /// Performs the conversion of the inference type into a concrete type.
 
    /// By calling this function you must make sure that no unspecified types
 
    /// (e.g. Unknown or IntegerLike) exist in the type.
 
    fn write_concrete_type(&self, concrete_type: &mut ConcreteType, discard_marked_types: bool) {
 
    fn write_concrete_type(&self, concrete_type: &mut ConcreteType) {
 
        use InferenceTypePart as ITP;
 
        use ConcreteTypePart as CTP;
 

	
 
        // Make sure inference type is specified but concrete type is not yet specified
 
        debug_assert!(!self.parts.is_empty());
 
        debug_assert!(concrete_type.parts.is_empty());
 
        concrete_type.parts.reserve(self.parts.len());
 

	
 
        let mut idx = 0;
 
        while idx < self.parts.len() {
 
            let part = &self.parts[idx];
 
            let converted_part = match part {
 
                ITP::MarkerDefinition(marker) => {
 
                    // When annotating the AST we keep the markers. When
 
                    // determining types for monomorphs we instead want to
 
                    // keep the type (not the markers)
 
                    if discard_marked_types {
 
                        idx = InferenceType::find_subtree_end_idx(&self.parts, idx + 1);
 
                        concrete_type.parts.push(CTP::Marker(*marker));
 
                    } else {
 
                        idx += 1;
 
                    }
 
                    continue;
 
                },
 
                ITP::MarkerBody(_) => {
 
                    // Inner markers are removed when writing to the concrete
 
                    // type.
 
                ITP::Marker(_) => {
 
                    // Markers are removed when writing to the concrete type.
 
                    idx += 1;
 
                    continue;
 
                },
 
                ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | ITP::ArrayLike | ITP::PortLike => {
 
                    unreachable!("Attempted to convert inference type part {:?} into concrete type", part);
 
                    // Should not happen if type inferencing works correctly: we
 
                    // should have returned a programmer-readable error or have
 
                    // inferred all types.
 
                    unreachable!("attempted to convert inference type part {:?} into concrete type", part);
 
                },
 
                ITP::Void => CTP::Void,
 
                ITP::Message => CTP::Message,
 
                ITP::Bool => CTP::Bool,
 
                ITP::UInt8 => CTP::UInt8,
 
                ITP::UInt16 => CTP::UInt16,
 
@@ -679,18 +640,16 @@ impl InferenceType {
 
    fn write_display_name(
 
        buffer: &mut String, heap: &Heap, parts: &[InferenceTypePart], mut idx: usize
 
    ) -> usize {
 
        use InferenceTypePart as ITP;
 

	
 
        match &parts[idx] {
 
            ITP::MarkerDefinition(thing) => {
 
                buffer.push_str(&format!("{{D:{}}}", *thing));
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
            }, 
 
            ITP::MarkerBody(thing) => {
 
                buffer.push_str(&format!("{{B:{}}}", *thing));
 
            ITP::Marker(_marker_idx) => {
 
                if debug_log_enabled!() {
 
                    buffer.push_str(&format!("{{Marker:{}}}", *_marker_idx));
 
                }
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
            },
 
            ITP::Unknown => buffer.push_str("?"),
 
            ITP::NumberLike => buffer.push_str("numberlike"),
 
            ITP::IntegerLike => buffer.push_str("integerlike"),
 
            ITP::ArrayLike => {
 
@@ -769,12 +728,22 @@ impl InferenceType {
 
    /// Returns the display name of the full type tree. Will allocate a string.
 
    fn display_name(&self, heap: &Heap) -> String {
 
        Self::partial_display_name(heap, &self.parts)
 
    }
 
}
 

	
 
impl Default for InferenceType {
 
    fn default() -> Self {
 
        Self{
 
            has_marker: false,
 
            is_done: false,
 
            parts: Vec::new(),
 
        }
 
    }
 
}
 

	
 
/// Iterator over the subtrees that follow a marker in an `InferenceType`
 
/// instance. Returns immutable slices over the internal parts
 
struct InferenceTypeMarkerIter<'a> {
 
    parts: &'a [InferenceTypePart],
 
    idx: usize,
 
}
 
@@ -783,18 +752,18 @@ impl<'a> InferenceTypeMarkerIter<'a> {
 
    fn new(parts: &'a [InferenceTypePart]) -> Self {
 
        Self{ parts, idx: 0 }
 
    }
 
}
 

	
 
impl<'a> Iterator for InferenceTypeMarkerIter<'a> {
 
    type Item = (usize, &'a [InferenceTypePart]);
 
    type Item = (u32, &'a [InferenceTypePart]);
 

	
 
    fn next(&mut self) -> Option<Self::Item> {
 
        // Iterate until we find a marker
 
        while self.idx < self.parts.len() {
 
            if let InferenceTypePart::MarkerBody(marker) = self.parts[self.idx] {
 
            if let InferenceTypePart::Marker(marker) = self.parts[self.idx] {
 
                // Found a marker, find the subtree end
 
                let start_idx = self.idx + 1;
 
                let end_idx = InferenceType::find_subtree_end_idx(self.parts, start_idx);
 

	
 
                // Modify internal index, then return items
 
                self.idx = end_idx;
 
@@ -859,12 +828,30 @@ pub(crate) struct ResolveQueueElement {
 
    pub(crate) definition_id: DefinitionId,
 
    pub(crate) monomorph_types: Vec<ConcreteType>,
 
}
 

	
 
pub(crate) type ResolveQueue = Vec<ResolveQueueElement>;
 

	
 
struct InferenceExpression {
 
    expr_type: InferenceType,       // result type from expression
 
    expr_id: ExpressionId,          // expression that is evaluated
 
    field_or_monomorph_idx: i32,    // index of field, of index of monomorph array in type table
 
    var_or_extra_data_idx: i32,     // index of extra data needed for inference
 
}
 

	
 
impl Default for InferenceExpression {
 
    fn default() -> Self {
 
        Self{
 
            expr_type: InferenceType::default(),
 
            expr_id: ExpressionId::new_invalid(),
 
            field_or_monomorph_idx: -1,
 
            var_or_extra_data_idx: -1,
 
        }
 
    }
 
}
 

	
 
/// This particular visitor will recurse depth-first into the AST and ensures
 
/// that all expressions have the appropriate types.
 
pub(crate) struct PassTyping {
 
    // Current definition we're typechecking.
 
    definition_type: DefinitionType,
 
    poly_vars: Vec<ConcreteType>,
 
@@ -872,30 +859,40 @@ pub(crate) struct PassTyping {
 
    // Buffers for iteration over substatements and subexpressions
 
    stmt_buffer: Vec<StatementId>,
 
    expr_buffer: Vec<ExpressionId>,
 

	
 
    // Mapping from parser type to inferred type. We attempt to continue to
 
    // specify these types until we're stuck or we've fully determined the type.
 
    var_types: HashMap<VariableId, VarData>,      // types of variables
 
    expr_types: HashMap<ExpressionId, InferenceType>,   // types of expressions
 
    extra_data: HashMap<ExpressionId, ExtraData>,       // data for polymorph inference
 
    var_types: HashMap<VariableId, VarData>,            // types of variables
 
    expr_types: Vec<InferenceExpression>,                     // will be transferred to type table at end
 
    extra_data: Vec<ExtraData>,       // data for polymorph inference
 
    // Keeping track of which expressions need to be reinferred because the
 
    // expressions they're linked to made progression on an associated type
 
    expr_queued: HashSet<ExpressionId>,
 
    expr_queued: DequeSet<i32>,
 
}
 

	
 
// TODO: @Rename, this is used for a lot of type inferencing. It seems like
 
//  there is a different underlying architecture waiting to surface.
 
struct ExtraData {
 
    /// Progression of polymorphic variables (if any)
 
    poly_vars: Vec<InferenceType>,
 
    /// Progression of types of call arguments or struct members
 
    embedded: Vec<InferenceType>,
 
    returned: InferenceType,
 
}
 

	
 
impl Default for ExtraData {
 
    fn default() -> Self {
 
        Self{
 
            poly_vars: Vec::new(),
 
            embedded: Vec::new(),
 
            returned: InferenceType::default(),
 
        }
 
    }
 
}
 

	
 
struct VarData {
 
    /// Type of the variable
 
    var_type: InferenceType,
 
    /// VariableExpressions that use the variable
 
    used_at: Vec<ExpressionId>,
 
    /// For channel statements we link to the other variable such that when one
 
@@ -917,15 +914,15 @@ impl PassTyping {
 
        PassTyping {
 
            definition_type: DefinitionType::Function(FunctionDefinitionId::new_invalid()),
 
            poly_vars: Vec::new(),
 
            stmt_buffer: Vec::with_capacity(STMT_BUFFER_INIT_CAPACITY),
 
            expr_buffer: Vec::with_capacity(EXPR_BUFFER_INIT_CAPACITY),
 
            var_types: HashMap::new(),
 
            expr_types: HashMap::new(),
 
            extra_data: HashMap::new(),
 
            expr_queued: HashSet::new(),
 
            expr_types: Vec::new(),
 
            extra_data: Vec::new(),
 
            expr_queued: DequeSet::new(),
 
        }
 
    }
 

	
 
    // TODO: @cleanup Unsure about this, maybe a pattern will arise after
 
    //  a while.
 
    pub(crate) fn queue_module_definitions(ctx: &Ctx, queue: &mut ResolveQueue) {
 
@@ -995,19 +992,25 @@ impl Visitor2 for PassTyping {
 
        debug_assert_eq!(comp_def.poly_vars.len(), self.poly_vars.len(), "component polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting component '{}': {}", comp_def.identifier.value.as_str(), id.0.index);
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        // Reserve data for expression types
 
        debug_assert!(self.expr_types.is_empty());
 
        self.expr_types.resize(comp_def.num_expressions_in_body as usize, Default::default());
 

	
 
        // Visit parameters
 
        for param_id in comp_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type_elements(&param.parser_type.elements, true);
 
            debug_assert!(var_type.is_done, "expected component arguments to be concrete types");
 
            self.var_types.insert(param_id, VarData::new_local(var_type));
 
        }
 

	
 
        // Visit the body and all of its expressions
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_block_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    fn visit_function_definition(&mut self, ctx: &mut Ctx, id: FunctionDefinitionId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Function(id);
 
@@ -1027,19 +1030,25 @@ impl Visitor2 for PassTyping {
 
                let infer_type = InferenceType::new(false, true, infer_type_parts);
 
                debug_log!(" - [{:03}] {:?}", idx, infer_type.display_name(&ctx.heap));
 
            }
 
        }
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        // Reserve data for expression types
 
        debug_assert!(self.expr_types.is_empty());
 
        self.expr_types.resize(func_def.num_expressions_in_body as usize, Default::default());
 

	
 
        // Visit parameters
 
        for param_id in func_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type_elements(&param.parser_type.elements, true);
 
            debug_assert!(var_type.is_done, "expected function arguments to be concrete types");
 
            self.var_types.insert(param_id, VarData::new_local(var_type));
 
        }
 

	
 
        // Visit all of the expressions within the body
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_block_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    // Statements
 

	
 
@@ -1163,13 +1172,12 @@ impl Visitor2 for PassTyping {
 

	
 
        let conditional_expr = &ctx.heap[id];
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        self.expr_types.insert(test_expr_id, InferenceType::new(false, true, vec![InferenceTypePart::Bool]));
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_expr(ctx, true_expr_id)?;
 
        self.visit_expr(ctx, false_expr_id)?;
 

	
 
        self.progress_conditional_expr(ctx, id)
 
    }
 
@@ -1325,33 +1333,12 @@ impl Visitor2 for PassTyping {
 
        var_data.used_at.push(upcast_id);
 

	
 
        self.progress_variable_expr(ctx, id)
 
    }
 
}
 

	
 
macro_rules! debug_assert_expr_ids_unique_and_known {
 
    // Base case for a single expression ID
 
    ($resolver:ident, $id:ident) => {
 
        if cfg!(debug_assertions) {
 
            $resolver.expr_types.contains_key(&$id);
 
        }
 
    };
 
    // Base case for two expression IDs
 
    ($resolver:ident, $id1:ident, $id2:ident) => {
 
        debug_assert_ne!($id1, $id2);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2);
 
    };
 
    // Generic case
 
    ($resolver:ident, $id1:ident, $id2:ident, $($tail:ident),+) => {
 
        debug_assert_ne!($id1, $id2);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2, $($tail),+);
 
    };
 
}
 

	
 
macro_rules! debug_assert_ptrs_distinct {
 
    // Base case
 
    ($ptr1:ident, $ptr2:ident) => {
 
        debug_assert!(!std::ptr::eq($ptr1, $ptr2));
 
    };
 
    // Generic case
 
@@ -1372,15 +1359,12 @@ impl PassTyping {
 

	
 
        // We check if we have all the types we need. If we're typechecking a 
 
        // polymorphic procedure more than once, then we have already annotated
 
        // the AST and have now performed typechecking for a different 
 
        // monomorph. In that case we just need to perform typechecking, no need
 
        // to annotate the AST again.
 
        // TODO: @Monomorph, this is completely wrong. It seemed okay, but it
 
        //  isn't. Each monomorph might result in completely different internal
 
        //  types.
 
        let definition_id = match &self.definition_type {
 
            DefinitionType::Component(id) => id.upcast(),
 
            DefinitionType::Function(id) => id.upcast(),
 
        };
 

	
 
        let already_checked = ctx.types.get_base_definition(&definition_id).unwrap().has_any_monomorph();
 
@@ -1399,17 +1383,17 @@ impl PassTyping {
 
                    ));
 
                }
 
            }
 

	
 
            if !already_checked {
 
                let concrete_type = ctx.heap[*expr_id].get_type_mut();
 
                expr_type.write_concrete_type(concrete_type, true);
 
                expr_type.write_concrete_type(concrete_type);
 
            } else {
 
                if cfg!(debug_assertions) {
 
                    let mut concrete_type = ConcreteType::default();
 
                    expr_type.write_concrete_type(&mut concrete_type, true);
 
                    expr_type.write_concrete_type(&mut concrete_type);
 
                    debug_assert_eq!(*ctx.heap[*expr_id].get_type(), concrete_type);
 
                }
 
            }
 
        }
 

	
 
        // All types are fine
 
@@ -1444,13 +1428,13 @@ impl PassTyping {
 
                                poly_idx, poly_type.display_name(&ctx.heap)
 
                            )
 
                        ))
 
                    }
 

	
 
                    let mut concrete_type = ConcreteType::default();
 
                    poly_type.write_concrete_type(&mut concrete_type, false);
 
                    poly_type.write_concrete_type(&mut concrete_type);
 
                    monomorph_types.insert(poly_idx, concrete_type);
 
                }
 

	
 
                // Resolve to the appropriate expression and instantiate 
 
                // monomorphs.
 
                match &ctx.heap[*expr_id] {
 
@@ -1800,24 +1784,25 @@ impl PassTyping {
 

	
 
        // Make sure subject is arraylike and indices are of equal integerlike
 
        let progress_subject_base = self.apply_forced_constraint(ctx, subject_id, &ARRAYLIKE_TEMPLATE)?;
 
        let progress_idx_base = self.apply_forced_constraint(ctx, from_id, &INTEGERLIKE_TEMPLATE)?;
 
        let (progress_from, progress_to) = self.apply_equal2_constraint(ctx, upcast_id, from_id, 0, to_id, 0)?;
 

	
 
        // Make sure if output is of T then subject is Array<T>
 
        // Make sure if output is of Slice<T> then subject is Array<T>
 
        let progress_expr_base = self.apply_forced_constraint(ctx, upcast_id, &SLICE_TEMPLATE)?;
 
        let (progress_expr, progress_subject) =
 
            self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, subject_id, 0)?;
 
            self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 1, subject_id, 1)?;
 

	
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Subject type [{}]: {}", progress_subject_base || progress_subject, self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - FromIdx type [{}]: {}", progress_idx_base || progress_from, self.expr_types.get(&from_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - ToIdx   type [{}]: {}", progress_idx_base || progress_to, self.expr_types.get(&to_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_expr_base || progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_subject_base || progress_subject { self.queue_expr(subject_id); }
 
        if progress_idx_base || progress_from { self.queue_expr(from_id); }
 
        if progress_idx_base || progress_to { self.queue_expr(to_id); }
 

	
 
        Ok(())
 
    }
 
@@ -2010,17 +1995,15 @@ impl PassTyping {
 
            },
 
            Literal::True | Literal::False => {
 
                self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?
 
            },
 
            Literal::Character(_) => {
 
                self.apply_forced_constraint(ctx, upcast_id, &CHARACTER_TEMPLATE)?;
 
                todo!("check character literal type inference");
 
            },
 
            Literal::String(_) => {
 
                self.apply_forced_constraint(ctx, upcast_id, &STRING_TEMPLATE)?;
 
                todo!("check string literal type inference");
 
            },
 
            Literal::Struct(data) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for _poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", _poly.display_name(&ctx.heap));
 
                }
 
@@ -2500,13 +2483,14 @@ impl PassTyping {
 
    /// be fully specified (e.g. a bool) or contain "inference" variables (e.g.
 
    /// an array of T)
 
    fn apply_forced_constraint(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
    ) -> Result<bool, ParseError> {
 
        debug_assert_expr_ids_unique_and_known!(self, expr_id);
 
        let expr_type = self.expr_types.get_mut(&expr_id).unwrap();
 
        let expr_idx = ctx.heap[expr_id].get_unique_id_in_definition(); // TODO: @Temp
 
        let expr_type = &mut self.expr_types[expr_idx as usize].expr_type;
 
        match InferenceType::infer_subtree_for_single_type(expr_type, 0, template, 0) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(
 
                self.construct_template_type_error(ctx, expr_id, template)
 
            )
 
@@ -2533,15 +2517,16 @@ impl PassTyping {
 
    /// The "parent" `expr_id` is provided to construct errors.
 
    fn apply_equal2_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg1_start_idx: usize,
 
        arg2_id: ExpressionId, arg2_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError> {
 
        debug_assert_expr_ids_unique_and_known!(self, arg1_id, arg2_id);
 
        let arg1_type: *mut _ = self.expr_types.get_mut(&arg1_id).unwrap();
 
        let arg2_type: *mut _ = self.expr_types.get_mut(&arg2_id).unwrap();
 
        let arg1_expr_idx = ctx.heap[arg1_id].get_unique_id_in_definition(); // TODO: @Temp
 
        let arg2_expr_idx = ctx.heap[arg2_id].get_unique_id_in_definition();
 
        let arg1_type: *mut _ = &mut self.expr_types[arg1_expr_idx as usize].expr_type;
 
        let arg2_type: *mut _ = &mut self.expr_types[arg2_expr_idx as usize].expr_type;
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            arg1_type, arg1_start_idx,
 
            arg2_type, arg2_start_idx
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
@@ -2559,19 +2544,17 @@ impl PassTyping {
 
    /// `outer_expr_id` is the main expression we're progressing (e.g. a 
 
    /// function call), while `expr_id` is the embedded expression we're 
 
    /// matching against the signature. `expression_type` and 
 
    /// `expression_start_idx` belong to `expr_id`.
 
    fn apply_equal2_signature_constraint(
 
        ctx: &Ctx, outer_expr_id: ExpressionId, expr_id: Option<ExpressionId>,
 
        polymorph_data: &mut ExtraData, polymorph_progress: &mut HashSet<usize>,
 
        polymorph_data: &mut ExtraData, polymorph_progress: &mut HashSet<u32>,
 
        signature_type: *mut InferenceType, signature_start_idx: usize,
 
        expression_type: *mut InferenceType, expression_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError> {
 
        // Safety: cannot mutually infer the same types
 
        //         polymorph_data containers may not be modified
 
        debug_assert_ptrs_distinct!(signature_type, expression_type);
 
        // Safety: all pointers distinct
 

	
 
        // Infer the signature and expression type
 
        let infer_res = unsafe { 
 
            InferenceType::infer_subtrees_for_both_types(
 
                signature_type, signature_start_idx,
 
                expression_type, expression_start_idx
 
@@ -2605,16 +2588,16 @@ impl PassTyping {
 
        let progress_sig = infer_res.modified_lhs();
 
        let progress_expr = infer_res.modified_rhs();
 

	
 
        if progress_sig {
 
            let signature_type = unsafe{&mut *signature_type};
 
            debug_assert!(
 
                signature_type.has_body_marker, 
 
                signature_type.has_marker,
 
                "made progress on signature type, but it doesn't have a marker"
 
            );
 
            for (poly_idx, poly_section) in signature_type.body_marker_iter() {
 
            for (poly_idx, poly_section) in signature_type.marker_iter() {
 
                let polymorph_type = &mut polymorph_data.poly_vars[poly_idx];
 
                match Self::apply_forced_constraint_types(
 
                    polymorph_type, 0, poly_section, 0
 
                ) {
 
                    Ok(true) => { polymorph_progress.insert(poly_idx); },
 
                    Ok(false) => {},
 
@@ -2633,27 +2616,27 @@ impl PassTyping {
 
    /// progressed as far as possible by calling 
 
    /// `apply_equal2_signature_constraint`. As such, we expect to not encounter
 
    /// any errors.
 
    ///
 
    /// This function returns true if the expression's type has been progressed
 
    fn apply_equal2_polyvar_constraint(
 
        polymorph_data: &ExtraData, _polymorph_progress: &HashSet<usize>,
 
        polymorph_data: &ExtraData, _polymorph_progress: &HashSet<u32>,
 
        signature_type: *mut InferenceType, expr_type: *mut InferenceType
 
    ) -> bool {
 
        // Safety: all pointers should be distinct
 
        //         polymorph_data contains may not be modified
 
        //         polymorph_data containers may not be modified
 
        debug_assert_ptrs_distinct!(signature_type, expr_type);
 
        let signature_type = unsafe{&mut *signature_type};
 
        let expr_type = unsafe{&mut *expr_type};
 

	
 
        // Iterate through markers in signature type to try and make progress
 
        // on the polymorphic variable        
 
        let mut seek_idx = 0;
 
        let mut modified_sig = false;
 
        
 
        while let Some((poly_idx, start_idx)) = signature_type.find_body_marker(seek_idx) {
 
        while let Some((poly_idx, start_idx)) = signature_type.find_marker(seek_idx) {
 
            let end_idx = InferenceType::find_subtree_end_idx(&signature_type.parts, start_idx);
 
            // if polymorph_progress.contains(&poly_idx) {
 
                // Need to match subtrees
 
                let polymorph_type = &polymorph_data.poly_vars[poly_idx];
 
                let modified_at_marker = Self::apply_forced_constraint_types(
 
                    signature_type, start_idx, 
 
@@ -2688,18 +2671,21 @@ impl PassTyping {
 
    /// types is made equal.
 
    fn apply_equal3_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId,
 
        start_idx: usize
 
    ) -> Result<(bool, bool, bool), ParseError> {
 
        // Safety: all expression IDs are always distinct, and we do not modify
 
        //  the container
 
        debug_assert_expr_ids_unique_and_known!(self, expr_id, arg1_id, arg2_id);
 
        let expr_type: *mut _ = self.expr_types.get_mut(&expr_id).unwrap();
 
        let arg1_type: *mut _ = self.expr_types.get_mut(&arg1_id).unwrap();
 
        let arg2_type: *mut _ = self.expr_types.get_mut(&arg2_id).unwrap();
 
        // Safety: all points are unique
 
        //         containers may not be modified
 
        let expr_expr_idx = ctx.heap[expr_id].get_unique_id_in_definition(); // TODO: @Temp
 
        let arg1_expr_idx = ctx.heap[arg1_id].get_unique_id_in_definition();
 
        let arg2_expr_idx = ctx.heap[arg2_id].get_unique_id_in_definition();
 

	
 
        let expr_type: *mut _ = &mut self.expr_types[expr_expr_idx as usize].expr_type;
 
        let arg1_type: *mut _ = &mut self.expr_types[arg1_expr_idx as usize].expr_type;
 
        let arg2_type: *mut _ = &mut self.expr_types[arg2_expr_idx as usize].expr_type;
 

	
 
        let expr_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(expr_type, start_idx, arg1_type, start_idx)
 
        };
 
        if expr_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_expr_type_error(ctx, expr_id, arg1_id));
 
@@ -2750,17 +2736,19 @@ impl PassTyping {
 
        let mut arg_iter = args.iter();
 
        let mut last_arg_id = *arg_iter.next().unwrap();
 
        let mut last_lhs_progressed = 0;
 
        let mut lhs_arg_idx = 0;
 

	
 
        while let Some(next_arg_id) = arg_iter.next() {
 
            let arg1_type: *mut _ = self.expr_types.get_mut(&last_arg_id).unwrap();
 
            let arg2_type: *mut _ = self.expr_types.get_mut(next_arg_id).unwrap();
 
            let last_expr_idx = ctx.heap[last_arg_id].get_unique_id_in_definition(); // TODO: @Temp
 
            let next_expr_idx = ctx.heap[next_arg_id].get_unique_id_in_definition();
 
            let last_type: *mut _ = &mut self.expr_types[last_expr_idx as usize].expr_type;
 
            let next_type: *mut _ = &mut self.expr_types[next_expr_idx as usize].expr_type;
 

	
 
            let res = unsafe {
 
                InferenceType::infer_subtrees_for_both_types(arg1_type, 0, arg2_type, 0)
 
                InferenceType::infer_subtrees_for_both_types(last_type, 0, next_type, 0)
 
            };
 

	
 
            if res == DualInferenceResult::Incompatible {
 
                return Err(self.construct_arg_type_error(ctx, expr_id, last_arg_id, *next_arg_id));
 
            }
 

	
 
@@ -2802,15 +2790,30 @@ impl PassTyping {
 

	
 
        let expr = &ctx.heap[expr_id];
 
        let inference_type = match expr.parent() {
 
            EP::None =>
 
                // Should have been set by linker
 
                unreachable!(),
 
            EP::ExpressionStmt(_) | EP::Expression(_, _) =>
 
            EP::ExpressionStmt(_) =>
 
                // Determined during type inference
 
                InferenceType::new(false, false, vec![ITP::Unknown]),
 
            EP::Expression(parent_id, idx_in_parent) => {
 
                // If we are the test expression of a conditional expression,
 
                // then we must resolve to a boolean
 
                let is_conditional = if let Expression::Conditional(_) = &ctx.heap[*parent_id] {
 
                    true
 
                } else {
 
                    false
 
                };
 

	
 
                if is_conditional && *idx_in_parent == 0 {
 
                    InferenceType::new(false, true, vec![ITP::Bool])
 
                } else {
 
                    InferenceType::new(false, false, vec![ITP::Unknown])
 
                }
 
            },
 
            EP::If(_) | EP::While(_) =>
 
                // Must be a boolean
 
                InferenceType::new(false, true, vec![ITP::Bool]),
 
            EP::Return(_) =>
 
                // Must match the return type of the function
 
                if let DefinitionType::Function(func_id) = self.definition_type {
 
@@ -2825,27 +2828,44 @@ impl PassTyping {
 
            EP::New(_) =>
 
                // Must be a component call, which we assign a "Void" return
 
                // type
 
                InferenceType::new(false, true, vec![ITP::Void]),
 
        };
 

	
 
        match self.expr_types.entry(expr_id) {
 
            Entry::Vacant(vacant) => {
 
                vacant.insert(inference_type);
 
        let infer_expr = &mut self.expr_types[expr.get_unique_id_in_definition() as usize];
 
        let needs_extra_data = match expr {
 
            Expression::Call(_) => true,
 
            Expression::Literal(expr) => match expr.value {
 
                Literal::Enum(_) | Literal::Union(_) | Literal::Struct(_) => true,
 
                _ => false,
 
            },
 
            Entry::Occupied(mut preexisting) => {
 
                // We already have an entry, this happens if our parent fixed
 
                // our type (e.g. we're used in a conditional expression's test)
 
                // but we have a different type.
 
                let old_type = preexisting.get_mut();
 
                if let SingleInferenceResult::Incompatible = InferenceType::infer_subtree_for_single_type(
 
                    old_type, 0, &inference_type.parts, 0
 
                ) {
 
                    return Err(self.construct_expr_type_error(ctx, expr_id, expr_id))
 
                }
 
            Expression::Select(_) => true,
 
            _ => false,
 
        };
 

	
 
        debug_assert!(!(needs_extra_data && needs_var_data)); // obvious, but for future changes
 

	
 
        if infer_expr.expr_id.is_invalid() {
 
            // Nothing is set yet
 
            infer_expr.expr_type = inference_type;
 
            infer_expr.expr_id = expr_id;
 
            if needs_extra_data {
 
                let extra_idx = self.extra_data.len() as i32;
 
                self.extra_data.push(ExtraData::default());
 
                infer_expr.var_or_extra_data_idx = extra_idx;
 
            }
 
        } else {
 
            // We already have an entry
 
            debug_assert!(false, "does this ever happen?");
 
            if let SingleInferenceResult::Incompatible = InferenceType::infer_subtree_for_single_type(
 
                &mut infer_expr.expr_type, 0, &inference_type.parts, 0
 
            ) {
 
                return Err(self.construct_expr_type_error(ctx, expr_id, expr_id));
 
            }
 

	
 
            debug_assert!((infer_expr.var_or_extra_data_idx != -1) == needs_extra_data);
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn insert_initial_call_polymorph_data(
 
@@ -2858,12 +2878,14 @@ impl PassTyping {
 
        // The arguments of the call may refer to polymorphic variables in the
 
        // definition of the function we're calling, not of the wrapping
 
        // definition. We insert markers in these inferred types to be able to
 
        // map them back and forth to the polymorphic arguments of the function
 
        // we are calling.
 
        let call = &ctx.heap[call_id];
 
        let extra_data_idx = self.expr_types[call.unique_id_in_definition as usize].var_or_extra_data_idx;
 
        debug_assert!(extra_data_idx != -1, "insert initial call polymorph data, no preallocated ExtraData");
 

	
 
        // Handle the polymorphic arguments (if there are any)
 
        let num_poly_args = call.parser_type.elements[0].variant.num_embedded();
 
        let mut poly_args = Vec::with_capacity(num_poly_args);
 
        for embedded_elements in call.parser_type.iter_embedded(0) {
 
            poly_args.push(self.determine_inference_type_from_parser_type_elements(embedded_elements, true));
 
@@ -2883,40 +2905,43 @@ impl PassTyping {
 
            Definition::Struct(_) | Definition::Enum(_) | Definition::Union(_) => {
 
                unreachable!("insert_initial_call_polymorph data for non-procedure type");
 
            },
 
        };
 

	
 
        let mut parameter_types = Vec::with_capacity(parameters.len());
 
        for parameter_id in parameters.clone().into_iter() { // TODO: @Performance
 
        for parameter_id in parameters.clone().into_iter() { // TODO: @Performance @Now
 
            let param = &ctx.heap[parameter_id];
 
            parameter_types.push(self.determine_inference_type_from_parser_type_elements(&param.parser_type.elements, false));
 
        }
 

	
 
        let return_type = match returned {
 
            None => {
 
                // Component, so returns a "Void"
 
                InferenceType::new(false, true, vec![InferenceTypePart::Void])
 
            },
 
            Some(returned) => {
 
                debug_assert_eq!(returned.len(), 1);
 
                debug_assert_eq!(returned.len(), 1); // TODO: @ReturnTypes
 
                let returned = &returned[0];
 
                self.determine_inference_type_from_parser_type_elements(&returned.elements, false)
 
            }
 
        };
 

	
 
        self.extra_data.insert(call_id.upcast(), ExtraData {
 
        self.extra_data[extra_data_idx as usize] = ExtraData{
 
            poly_vars: poly_args,
 
            embedded: parameter_types,
 
            returned: return_type
 
        });
 
    }
 

	
 
    fn insert_initial_struct_polymorph_data(
 
        &mut self, ctx: &mut Ctx, lit_id: LiteralExpressionId,
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = &ctx.heap[lit_id];
 
        let extra_data_idx = self.expr_types[literal.unique_id_in_definition as usize].var_or_extra_data_idx;
 
        debug_assert!(extra_data_idx != -1, "initial struct polymorph data, but no preallocated ExtraData");
 
        let literal = ctx.heap[lit_id].value.as_struct();
 

	
 
        // Handle polymorphic arguments
 
        let num_embedded = literal.parser_type.elements[0].variant.num_embedded();
 
        let mut total_num_poly_parts = 0;
 
        let mut poly_args = Vec::with_capacity(num_embedded);
 
@@ -2946,38 +2971,41 @@ impl PassTyping {
 
        // polymorphic variables. So:
 
        // - 1 part for definition
 
        // - N_poly_arg marker parts for each polymorphic argument
 
        // - all the parts for the currently known polymorphic arguments 
 
        let parts_reserved = 1 + poly_args.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(literal.definition, poly_args.len()));
 
        parts.push(ITP::Instance(literal.definition, poly_args.len() as u32));
 
        let mut return_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_args.iter().enumerate() {
 
            if !poly_var.is_done { return_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let return_type = InferenceType::new(!poly_args.is_empty(), return_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
        self.extra_data[extra_data_idx as usize] = ExtraData{
 
            poly_vars: poly_args,
 
            embedded: embedded_types,
 
            returned: return_type,
 
        });
 
        };
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct for enum expressions. These
 
    /// can never be determined from the enum itself, but may be inferred from
 
    /// the use of the enum.
 
    fn insert_initial_enum_polymorph_data(
 
        &mut self, ctx: &Ctx, lit_id: LiteralExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = &ctx.heap[lit_id];
 
        let extra_data_idx = self.expr_types[literal.unique_id_in_definition as usize].var_or_extra_data_idx;
 
        debug_assert!(extra_data_idx != -1, "initial enum polymorph data, but no preallocated ExtraData");
 
        let literal = ctx.heap[lit_id].value.as_enum();
 

	
 
        // Handle polymorphic arguments to the enum
 
        let num_poly_args = literal.parser_type.elements[0].variant.num_embedded();
 
        let mut total_num_poly_parts = 0;
 
        let mut poly_args = Vec::with_capacity(num_poly_args);
 
@@ -2988,37 +3016,40 @@ impl PassTyping {
 
            poly_args.push(poly_type);
 
        }
 

	
 
        // Handle enum type itself
 
        let parts_reserved = 1 + poly_args.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(literal.definition, poly_args.len()));
 
        parts.push(ITP::Instance(literal.definition, poly_args.len() as u32));
 
        let mut enum_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_args.iter().enumerate() {
 
            if !poly_var.is_done { enum_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let enum_type = InferenceType::new(!poly_args.is_empty(), enum_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
        self.extra_data[extra_data_idx as usize] = ExtraData{
 
            poly_vars: poly_args,
 
            embedded: Vec::new(),
 
            returned: enum_type,
 
        });
 
        };
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct for unions. The polymorphic
 
    /// arguments may be partially determined from embedded values in the union.
 
    fn insert_initial_union_polymorph_data(
 
        &mut self, ctx: &Ctx, lit_id: LiteralExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = &ctx.heap[lit_id];
 
        let extra_data_idx = self.expr_types[literal.unique_id_in_definition as usize].var_or_extra_data_idx;
 
        debug_assert!(extra_data_idx != -1, "initial union polymorph data, but no preallocated ExtraData");
 
        let literal = ctx.heap[lit_id].value.as_union();
 

	
 
        // Construct the polymorphic variables
 
        let num_poly_args = literal.parser_type.elements[0].variant.num_embedded();
 
        let mut total_num_poly_parts = 0;
 
        let mut poly_args = Vec::with_capacity(num_poly_args);
 
@@ -3044,70 +3075,72 @@ impl PassTyping {
 
            embedded.push(inference_type);
 
        }
 

	
 
        // Handle the type of the union itself
 
        let parts_reserved = 1 + poly_args.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(definition_id, poly_args.len()));
 
        parts.push(ITP::Instance(definition_id, poly_args.len() as u32));
 
        let mut union_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_args.iter().enumerate() {
 
            if !poly_var.is_done { union_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts_reserved, parts.len());
 
        let union_type = InferenceType::new(!poly_args.is_empty(), union_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
        self.extra_data[extra_data_idx as usize] = ExtraData{
 
            poly_vars: poly_args,
 
            embedded,
 
            returned: union_type
 
        });
 
        };
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct. Assumes that the select
 
    /// expression's referenced (definition_id, field_idx) has been resolved.
 
    fn insert_initial_select_polymorph_data(
 
        &mut self, ctx: &Ctx, select_id: SelectExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 

	
 
        // Retrieve relevant data
 
        let expr = &ctx.heap[select_id];
 
        let extra_data_idx = self.expr_types[expr.unique_id_in_definition as usize].var_or_extra_data_idx;
 
        debug_assert!(extra_data_idx != -1, "initial select polymorph data, but no preallocated ExtraData");
 
        let field = expr.field.as_symbolic();
 

	
 
        let definition_id = field.definition.unwrap();
 
        let definition = ctx.heap[definition_id].as_struct();
 
        let field_idx = field.field_idx;
 

	
 
        // Generate initial polyvar types and struct type
 
        // TODO: @Performance: we can immediately set the polyvars of the subject's struct type
 
        let num_poly_vars = definition.poly_vars.len();
 
        let mut poly_vars = Vec::with_capacity(num_poly_vars);
 
        let struct_parts_reserved = 1 + 2 * num_poly_vars;
 
        let mut struct_parts = Vec::with_capacity(struct_parts_reserved);
 
        struct_parts.push(ITP::Instance(definition_id, num_poly_vars));        
 
        struct_parts.push(ITP::Instance(definition_id, num_poly_vars as u32));
 

	
 
        for poly_idx in 0..num_poly_vars {
 
            poly_vars.push(InferenceType::new(true, false, vec![
 
                ITP::MarkerBody(poly_idx), ITP::Unknown,
 
            ]));
 
            struct_parts.push(ITP::MarkerBody(poly_idx));
 
            struct_parts.push(ITP::Unknown);
 
        }
 
        debug_assert_eq!(struct_parts.len(), struct_parts_reserved);
 

	
 
        // Generate initial field type
 
        let field_type = self.determine_inference_type_from_parser_type_elements(&definition.fields[field_idx].parser_type.elements, false);
 
        self.extra_data.insert(select_id.upcast(), ExtraData{
 
        self.extra_data[extra_data_idx as usize] = ExtraData{
 
            poly_vars,
 
            embedded: vec![InferenceType::new(num_poly_vars != 0, num_poly_vars == 0, struct_parts)],
 
            returned: field_type
 
        });
 
        };
 
    }
 

	
 
    /// Determines the initial InferenceType from the provided ParserType. This
 
    /// may be called with two kinds of intentions:
 
    /// 1. To resolve a ParserType within the body of a function, or on
 
    ///     polymorphic arguments to calls/instantiations within that body. This
 
@@ -3118,13 +3151,13 @@ impl PassTyping {
 
    ///     arguments inside those ParserTypes refer to the polymorphic
 
    ///     variables in the called/instantiated type's definition.
 
    /// In the second case we place InferenceTypePart::Marker instances such
 
    /// that we can perform type inference on the polymorphic variables.
 
    fn determine_inference_type_from_parser_type_elements(
 
        &mut self, elements: &[ParserTypeElement],
 
        parser_type_in_body: bool
 
        use_definitions_known_poly_args: bool
 
    ) -> InferenceType {
 
        use ParserTypeVariant as PTV;
 
        use InferenceTypePart as ITP;
 

	
 
        let mut infer_type = Vec::with_capacity(elements.len());
 
        let mut has_inferred = false;
 
@@ -3163,27 +3196,26 @@ impl PassTyping {
 
                // With nested types
 
                PTV::Array => { infer_type.push(ITP::Array); },
 
                PTV::Input => { infer_type.push(ITP::Input); },
 
                PTV::Output => { infer_type.push(ITP::Output); },
 
                PTV::PolymorphicArgument(belongs_to_definition, poly_arg_idx) => {
 
                    let poly_arg_idx = *poly_arg_idx;
 
                    if parser_type_in_body {
 
                    if use_definitions_known_poly_args {
 
                        // Refers to polymorphic argument on procedure we're currently processing.
 
                        // This argument is already known.
 
                        debug_assert_eq!(*belongs_to_definition, self.definition_type.definition_id());
 
                        debug_assert!((poly_arg_idx as usize) < self.poly_vars.len());
 

	
 
                        infer_type.push(ITP::MarkerDefinition(poly_arg_idx as usize));
 
                        for concrete_part in &self.poly_vars[poly_arg_idx].parts {
 
                            infer_type.push(ITP::from(*concrete_part));
 
                        }
 
                    } else {
 
                        // Polymorphic argument has to be inferred
 
                        has_markers = true;
 
                        has_inferred = true;
 
                        infer_type.push(ITP::MarkerBody(poly_arg_idx));
 
                        infer_type.push(ITP::Marker(poly_arg_idx));
 
                        infer_type.push(ITP::Unknown)
 
                    }
 
                },
 
                PTV::Definition(definition_id, num_embedded) => {
 
                    infer_type.push(ITP::Instance(*definition_id, *num_embedded));
 
                }
 
@@ -3274,19 +3306,19 @@ impl PassTyping {
 
    /// and that an actual error has occurred.
 
    fn construct_poly_arg_error(
 
        ctx: &Ctx, poly_data: &ExtraData, expr_id: ExpressionId
 
    ) -> ParseError {
 
        // Helper function to check for polymorph mismatch between two inference
 
        // types.
 
        fn has_poly_mismatch<'a>(type_a: &'a InferenceType, type_b: &'a InferenceType) -> Option<(usize, &'a [InferenceTypePart], &'a [InferenceTypePart])> {
 
            if !type_a.has_body_marker || !type_b.has_body_marker {
 
        fn has_poly_mismatch<'a>(type_a: &'a InferenceType, type_b: &'a InferenceType) -> Option<(u32, &'a [InferenceTypePart], &'a [InferenceTypePart])> {
 
            if !type_a.has_marker || !type_b.has_marker {
 
                return None
 
            }
 

	
 
            for (marker_a, section_a) in type_a.body_marker_iter() {
 
                for (marker_b, section_b) in type_b.body_marker_iter() {
 
            for (marker_a, section_a) in type_a.marker_iter() {
 
                for (marker_b, section_b) in type_b.marker_iter() {
 
                    if marker_a != marker_b {
 
                        // Not the same polymorphic variable
 
                        continue;
 
                    }
 

	
 
                    if !InferenceType::check_subtrees(section_a, 0, section_b, 0) {
 
@@ -3297,22 +3329,22 @@ impl PassTyping {
 
            }
 

	
 
            None
 
        }
 

	
 
        // Helpers function to retrieve polyvar name and definition name
 
        fn get_poly_var_and_definition_name<'a>(ctx: &'a Ctx, poly_var_idx: usize, definition_id: DefinitionId) -> (&'a str, &'a str) {
 
        fn get_poly_var_and_definition_name<'a>(ctx: &'a Ctx, poly_var_idx: u32, definition_id: DefinitionId) -> (&'a str, &'a str) {
 
            let definition = &ctx.heap[definition_id];
 
            let poly_var = definition.poly_vars()[poly_var_idx].value.as_str();
 
            let poly_var = definition.poly_vars()[poly_var_idx as usize].value.as_str();
 
            let func_name = definition.identifier().value.as_str();
 

	
 
            (poly_var, func_name)
 
        }
 

	
 
        // Helper function to construct initial error
 
        fn construct_main_error(ctx: &Ctx, poly_var_idx: usize, expr: &Expression) -> ParseError {
 
        fn construct_main_error(ctx: &Ctx, poly_var_idx: u32, expr: &Expression) -> ParseError {
 
            match expr {
 
                Expression::Call(expr) => {
 
                    let (poly_var, func_name) = get_poly_var_and_definition_name(ctx, poly_var_idx, expr.definition);
 
                    return ParseError::new_error_at_span(
 
                        &ctx.module.source, expr.span, format!(
 
                            "Conflicting type for polymorphic variable '{}' of '{}'",
src/protocol/parser/pass_validation_linking.rs
Show inline comments
 
@@ -63,14 +63,15 @@ pub(crate) struct PassValidationLinking {
 
    // used as an expression parent)
 
    expr_parent: ExpressionParent,
 
    // Set by parent to indicate that child expression must be assignable. The
 
    // child will throw an error if it is not assignable. The stored span is
 
    // used for the error's position
 
    must_be_assignable: Option<InputSpan>,
 
    // Keeping track of relative position in block in the breadth-first pass.
 
    relative_pos_in_block: u32,
 
    // Keeping track of relative positions and unique IDs.
 
    relative_pos_in_block: u32, // of statements: to determine when variables are visible
 
    next_expr_index: i32, // to arrive at a unique ID for all expressions within a definition
 
    // Various temporary buffers for traversal. Essentially working around
 
    // Rust's borrowing rules since it cannot understand we're modifying AST
 
    // members but not the AST container.
 
    variable_buffer: ScopedBuffer<VariableId>,
 
    definition_buffer: ScopedBuffer<DefinitionId>,
 
    statement_buffer: ScopedBuffer<StatementId>,
 
@@ -84,12 +85,13 @@ impl PassValidationLinking {
 
            in_while: None,
 
            cur_scope: Scope::Definition(DefinitionId::new_invalid()),
 
            expr_parent: ExpressionParent::None,
 
            def_type: DefinitionType::Function(FunctionDefinitionId::new_invalid()),
 
            must_be_assignable: None,
 
            relative_pos_in_block: 0,
 
            next_expr_index: 0,
 
            variable_buffer: ScopedBuffer::new_reserved(128),
 
            definition_buffer: ScopedBuffer::new_reserved(128),
 
            statement_buffer: ScopedBuffer::new_reserved(STMT_BUFFER_INIT_CAPACITY),
 
            expression_buffer: ScopedBuffer::new_reserved(EXPR_BUFFER_INIT_CAPACITY),
 
        }
 
    }
 
@@ -98,12 +100,13 @@ impl PassValidationLinking {
 
        self.in_sync = None;
 
        self.in_while = None;
 
        self.cur_scope = Scope::Definition(DefinitionId::new_invalid());
 
        self.expr_parent = ExpressionParent::None;
 
        self.def_type = DefinitionType::Function(FunctionDefinitionId::new_invalid());
 
        self.relative_pos_in_block = 0;
 
        self.next_expr_index = 0
 
    }
 
}
 

	
 
impl Visitor2 for PassValidationLinking {
 
    fn visit_module(&mut self, ctx: &mut Ctx) -> VisitorResult {
 
        debug_assert_eq!(ctx.module.phase, ModuleCompilationPhase::TypesAddedToTable);
 
@@ -143,12 +146,14 @@ impl Visitor2 for PassValidationLinking {
 
            variable.unique_id_in_scope = variable_idx as i32;
 
        }
 
        section.forget();
 

	
 
        // Visit statements in component body
 
        self.visit_block_stmt(ctx, body_id)?;
 

	
 
        ctx.heap[id].num_expressions_in_body = self.next_expr_index;
 
        Ok(())
 
    }
 

	
 
    fn visit_function_definition(&mut self, ctx: &mut Ctx, id: FunctionDefinitionId) -> VisitorResult {
 
        self.reset_state();
 

	
 
@@ -167,12 +172,14 @@ impl Visitor2 for PassValidationLinking {
 
            variable.unique_id_in_scope = variable_idx as i32;
 
        }
 
        section.forget();
 

	
 
        // Visit statements in function body
 
        self.visit_block_stmt(ctx, body_id)?;
 

	
 
        ctx.heap[id].num_expressions_in_body = self.next_expr_index;
 
        Ok(())
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Statement visitors
 
    //--------------------------------------------------------------------------
 
@@ -376,12 +383,14 @@ impl Visitor2 for PassValidationLinking {
 
        let assignment_expr = &mut ctx.heap[id];
 

	
 
        let left_expr_id = assignment_expr.left;
 
        let right_expr_id = assignment_expr.right;
 
        let old_expr_parent = self.expr_parent;
 
        assignment_expr.parent = old_expr_parent;
 
        assignment_expr.unique_id_in_definition = self.next_expr_index;
 
        self.next_expr_index += 1;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.must_be_assignable = Some(assignment_expr.span);
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.must_be_assignable = None;
 
@@ -403,12 +412,14 @@ impl Visitor2 for PassValidationLinking {
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        conditional_expr.parent = old_expr_parent;
 
        conditional_expr.unique_id_in_definition = self.next_expr_index;
 
        self.next_expr_index += 1;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, true_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 2);
 
@@ -430,12 +441,14 @@ impl Visitor2 for PassValidationLinking {
 

	
 
        let left_expr_id = binary_expr.left;
 
        let right_expr_id = binary_expr.right;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        binary_expr.parent = old_expr_parent;
 
        binary_expr.unique_id_in_definition = self.next_expr_index;
 
        self.next_expr_index += 1;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, right_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 
@@ -452,12 +465,14 @@ impl Visitor2 for PassValidationLinking {
 
                &ctx.module.source, span, "cannot assign to the result from a unary expression"
 
            ))
 
        }
 

	
 
        let old_expr_parent = self.expr_parent;
 
        unary_expr.parent = old_expr_parent;
 
        unary_expr.unique_id_in_definition = self.next_expr_index;
 
        self.next_expr_index += 1;
 

	
 
        self.expr_parent = ExpressionParent::Expression(id.upcast(), 0);
 
        self.visit_expr(ctx, expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
@@ -469,12 +484,14 @@ impl Visitor2 for PassValidationLinking {
 

	
 
        let subject_expr_id = indexing_expr.subject;
 
        let index_expr_id = indexing_expr.index;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        indexing_expr.parent = old_expr_parent;
 
        indexing_expr.unique_id_in_definition = self.next_expr_index;
 
        self.next_expr_index += 1;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, index_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 
@@ -489,12 +506,14 @@ impl Visitor2 for PassValidationLinking {
 
        let subject_expr_id = slicing_expr.subject;
 
        let from_expr_id = slicing_expr.from_index;
 
        let to_expr_id = slicing_expr.to_index;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        slicing_expr.parent = old_expr_parent;
 
        slicing_expr.unique_id_in_definition = self.next_expr_index;
 
        self.next_expr_index += 1;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, from_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 2);
 
@@ -507,24 +526,28 @@ impl Visitor2 for PassValidationLinking {
 
    fn visit_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> VisitorResult {
 
        let select_expr = &mut ctx.heap[id];
 
        let expr_id = select_expr.subject;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        select_expr.parent = old_expr_parent;
 
        select_expr.unique_id_in_definition = self.next_expr_index;
 
        self.next_expr_index += 1;
 

	
 
        self.expr_parent = ExpressionParent::Expression(id.upcast(), 0);
 
        self.visit_expr(ctx, expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitorResult {
 
        let literal_expr = &mut ctx.heap[id];
 
        let old_expr_parent = self.expr_parent;
 
        literal_expr.parent = old_expr_parent;
 
        literal_expr.unique_id_in_definition = self.next_expr_index;
 
        self.next_expr_index += 1;
 

	
 
        if let Some(span) = self.must_be_assignable {
 
            return Err(ParseError::new_error_str_at_span(
 
                &ctx.module.source, span, "cannot assign to a literal expression"
 
            ))
 
        }
 
@@ -819,12 +842,14 @@ impl Visitor2 for PassValidationLinking {
 
        // Recurse into all of the arguments and set the expression's parent
 
        let upcast_id = id.upcast();
 

	
 
        let section = self.expression_buffer.start_section_initialized(&call_expr.arguments);
 
        let old_expr_parent = self.expr_parent;
 
        call_expr.parent = old_expr_parent;
 
        call_expr.unique_id_in_definition = self.next_expr_index;
 
        self.next_expr_index += 1;
 

	
 
        for arg_expr_idx in 0..section.len() {
 
            let arg_expr_id = section[arg_expr_idx];
 
            self.expr_parent = ExpressionParent::Expression(upcast_id, arg_expr_idx as u32);
 
            self.visit_expr(ctx, arg_expr_id)?;
 
        }
 
@@ -838,12 +863,14 @@ impl Visitor2 for PassValidationLinking {
 
    fn visit_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> VisitorResult {
 
        let var_expr = &ctx.heap[id];
 
        let variable_id = self.find_variable(ctx, self.relative_pos_in_block, &var_expr.identifier)?;
 
        let var_expr = &mut ctx.heap[id];
 
        var_expr.declaration = Some(variable_id);
 
        var_expr.parent = self.expr_parent;
 
        var_expr.unique_id_in_definition = self.next_expr_index;
 
        self.next_expr_index += 1;
 

	
 
        Ok(())
 
    }
 
}
 

	
 
impl PassValidationLinking {
src/protocol/parser/type_table.rs
Show inline comments
 
@@ -158,25 +158,51 @@ pub struct StructField {
 
    pub(crate) identifier: Identifier,
 
    pub(crate) parser_type: ParserType,
 
}
 

	
 
pub struct FunctionType {
 
    pub return_types: Vec<ParserType>,
 
    pub arguments: Vec<FunctionArgument>
 
    pub arguments: Vec<FunctionArgument>,
 
    pub monomorphs: Vec<ProcedureMonomorph>,
 
}
 

	
 
pub struct ComponentType {
 
    pub variant: ComponentVariant,
 
    pub arguments: Vec<FunctionArgument>
 
    pub arguments: Vec<FunctionArgument>,
 
    pub monomorphs: Vec<ProcedureMonomorph>,
 
}
 

	
 
pub struct FunctionArgument {
 
    identifier: Identifier,
 
    parser_type: ParserType,
 
}
 

	
 
pub struct ProcedureMonomorph {
 
    // Expression data for one particular monomorph
 
    expr_data: Vec<MonomorphExpression>,
 
}
 

	
 
/// Represents the data associated with a single expression after type inference
 
/// for a monomorph (or just the normal expression types, if dealing with a
 
/// non-polymorphic function/component).
 
pub struct MonomorphExpression {
 
    // The output type of the expression. Note that for a function it is not the
 
    // function's signature but its return type
 
    pub(crate) expr_type: ConcreteType,
 
    // Has multiple meanings: the field index for select expressions, the
 
    // monomorph index for polymorphic function calls or literals. Negative
 
    // values are never used, but used to catch programming errors.
 
    pub(crate) field_or_monomorph_idx: i32,
 
}
 

	
 
impl Default for MonomorphExpression {
 
    fn default() -> Self {
 
        Self{ expr_type: ConcreteType::default(), field_or_monomorph_idx: -1 }
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Type table
 
//------------------------------------------------------------------------------
 

	
 
// TODO: @cleanup Do I really need this, doesn't make the code that much cleaner
 
struct TypeIterator {
src/protocol/tests/eval_silly.rs
Show inline comments
 
@@ -99,13 +99,13 @@ fn test_slicing_magic() {
 

	
 
        func foo() -> u32 {
 
            // left array will be [0, 1, 2, ...] and right array will be [2, 3, 4, ...]
 
            auto created = create_holder(0, 5, 2, 8);
 

	
 
            // in a convoluted fashion select the value 3 from the lhs and the value 3 from the rhs
 
            auto result = slicing_magic(create_holder(0, 5, 2, 8), 3, 2, 1, 2);
 
            auto result = slicing_magic(created, 3, 2, 1, 2);
 

	
 
            // and return 3 + 3
 
            return result[0] + result[1];
 
        }
 
    ").for_function("foo", |f| {
 
        f.call_ok(Some(Value::UInt32(6)));
0 comments (0 inline, 0 general)