Changeset - 6eafa2cfcfd0
[Not reviewed]
0 8 0
MH - 4 years ago 2021-05-28 14:46:37
contact@maxhenger.nl
Fix ParserType spans, fix some bugs
8 files changed with 243 insertions and 74 deletions:
0 comments (0 inline, 0 general)
src/protocol/ast.rs
Show inline comments
 
@@ -366,60 +366,68 @@ pub enum ParserTypeVariant {
 
    SInt8, SInt16, SInt32, SInt64,
 
    Character, String,
 
    // Literals (need to get concrete builtin type during typechecking)
 
    IntegerLiteral,
 
    // Marker for inference
 
    Inferred,
 
    // Builtins expecting one subsequent type
 
    Array,
 
    Input,
 
    Output,
 
    // User-defined types
 
    PolymorphicArgument(DefinitionId, u32), // u32 = index into polymorphic variables
 
    Definition(DefinitionId, u32), // u32 = number of following subtypes
 
    Definition(DefinitionId, u32), // u32 = number of subsequent types in the type tree.
 
}
 

	
 
impl ParserTypeVariant {
 
    pub(crate) fn num_embedded(&self) -> usize {
 
        use ParserTypeVariant::*;
 

	
 
        match self {
 
            Void | IntegerLike |
 
            Message | Bool |
 
            UInt8 | UInt16 | UInt32 | UInt64 |
 
            SInt8 | SInt16 | SInt32 | SInt64 |
 
            Character | String | IntegerLiteral |
 
            Inferred | PolymorphicArgument(_, _) =>
 
                0,
 
            ArrayLike | InputOrOutput | Array | Input | Output =>
 
                1,
 
            Definition(_, num) => *num as usize,
 
        }
 
    }
 
}
 

	
 
/// ParserTypeElement is an element of the type tree. An element may be
 
/// implicit, meaning that the user didn't specify the type, but it was set by
 
/// the compiler.
 
#[derive(Debug, Clone)]
 
pub struct ParserTypeElement {
 
    // TODO: @Fix span
 
    pub full_span: InputSpan, // full span of type, including any polymorphic arguments
 
    pub element_span: InputSpan, // span of this element, not including the child types
 
    pub variant: ParserTypeVariant,
 
}
 

	
 
/// ParserType is a specification of a type during the parsing phase and initial
 
/// linker/validator phase of the compilation process. These types may be
 
/// (partially) inferred or represent literals (e.g. a integer whose bytesize is
 
/// not yet determined).
 
///
 
/// Its contents are the depth-first serialization of the type tree. Each node
 
/// is a type that may accept polymorphic arguments. The polymorphic arguments
 
/// are then the children of the node.
 
#[derive(Debug, Clone)]
 
pub struct ParserType {
 
    pub elements: Vec<ParserTypeElement>
 
    pub elements: Vec<ParserTypeElement>,
 
    pub full_span: InputSpan,
 
}
 

	
 
impl ParserType {
 
    pub(crate) fn iter_embedded(&self, parent_idx: usize) -> ParserTypeIter {
 
        ParserTypeIter::new(&self.elements, parent_idx)
 
    }
 
}
 

	
 
/// Iterator over the embedded elements of a specific element.
 
pub struct ParserTypeIter<'a> {
 
    pub elements: &'a [ParserTypeElement],
 
    pub cur_embedded_idx: usize,
 
@@ -459,26 +467,26 @@ impl<'a> Iterator for ParserTypeIter<'a> {
 

	
 
            self.cur_embedded_idx += 1;
 
            if depth == 0 {
 
                break;
 
            }
 
        }
 

	
 
        debug_assert!(depth == 0, "illegally constructed ParserType: {:?}", self.elements);
 
        return Some(&self.elements[start_element..self.cur_embedded_idx]);
 
    }
 
}
 

	
 
/// ConcreteType is the representation of a type after resolving symbolic types
 
/// and performing type inference
 
/// ConcreteType is the representation of a type after the type inference and
 
/// checker is finished. These are fully typed.
 
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
 
pub enum ConcreteTypePart {
 
    // Special types (cannot be explicitly constructed by the programmer)
 
    Void,
 
    // Builtin types without nested types
 
    Message,
 
    Bool,
 
    UInt8, UInt16, UInt32, UInt64,
 
    SInt8, SInt16, SInt32, SInt64,
 
    Character, String,
 
    // Builtin types with one nested type
 
    Array,
src/protocol/parser/mod.rs
Show inline comments
 
@@ -86,27 +86,27 @@ impl Parser {
 
            pass_tokenizer: PassTokenizer::new(),
 
            pass_symbols: PassSymbols::new(),
 
            pass_import: PassImport::new(),
 
            pass_definitions: PassDefinitions::new(),
 
            pass_validation: PassValidationLinking::new(),
 
            pass_typing: PassTyping::new(),
 
            write_ast_to: None,
 
        };
 

	
 
        parser.symbol_table.insert_scope(None, SymbolScope::Global);
 

	
 
        fn quick_type(variants: &[ParserTypeVariant]) -> ParserType {
 
            let mut t = ParserType{ elements: Vec::with_capacity(variants.len()) };
 
            let mut t = ParserType{ elements: Vec::with_capacity(variants.len()), full_span: InputSpan::new() };
 
            for variant in variants {
 
                t.elements.push(ParserTypeElement{ full_span: InputSpan::new(), variant: variant.clone() });
 
                t.elements.push(ParserTypeElement{ element_span: InputSpan::new(), variant: variant.clone() });
 
            }
 
            t
 
        }
 

	
 
        use ParserTypeVariant as PTV;
 
        insert_builtin_function(&mut parser, "get", &["T"], |id| (
 
            vec![
 
                ("input", quick_type(&[PTV::Input, PTV::PolymorphicArgument(id.upcast(), 0)]))
 
            ],
 
            quick_type(&[PTV::PolymorphicArgument(id.upcast(), 0)])
 
        ));
 
        insert_builtin_function(&mut parser, "put", &["T"], |id| (
src/protocol/parser/pass_definitions.rs
Show inline comments
 
@@ -115,25 +115,25 @@ impl PassDefinitions {
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        // Parse struct definition
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        let mut fields_section = self.struct_fields.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let poly_vars = ctx.heap[definition_id].poly_vars(); // TODO: @Cleanup, this is really ugly. But rust...
 
                let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
                let start_pos = iter.last_valid_pos();
 
                let parser_type = consume_parser_type(
 
                    source, iter, &ctx.symbols, &ctx.heap, poly_vars, module_scope,
 
                    definition_id, false, 0
 
                )?;
 
                let field = consume_ident_interned(source, iter, ctx)?;
 
                Ok(StructFieldDefinition{
 
                    span: InputSpan::from_positions(start_pos, field.span.end),
 
                    field, parser_type
 
                })
 
            },
 
@@ -204,25 +204,25 @@ impl PassDefinitions {
 
        let mut variants_section = self.union_variants.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let identifier = consume_ident_interned(source, iter, ctx)?;
 
                let mut close_pos = identifier.span.end;
 

	
 
                let mut types_section = self.parser_types.start_section();
 

	
 
                let has_embedded = maybe_consume_comma_separated(
 
                    TokenKind::OpenParen, TokenKind::CloseParen, source, iter, ctx,
 
                    |source, iter, ctx| {
 
                        let poly_vars = ctx.heap[definition_id].poly_vars(); // TODO: @Cleanup, this is really ugly. But rust...
 
                        let poly_vars = ctx.heap[definition_id].poly_vars();
 
                        consume_parser_type(
 
                            source, iter, &ctx.symbols, &ctx.heap, poly_vars,
 
                            module_scope, definition_id, false, 0
 
                        )
 
                    },
 
                    &mut types_section, "an embedded type", Some(&mut close_pos)
 
                )?;
 
                let value = if has_embedded {
 
                    UnionVariantValue::Embedded(types_section.into_vec())
 
                } else {
 
                    types_section.forget();
 
                    UnionVariantValue::None
 
@@ -264,25 +264,25 @@ impl PassDefinitions {
 
        consume_parameter_list(
 
            &module.source, iter, ctx, &mut parameter_section, module_scope, definition_id
 
        )?;
 
        let parameters = parameter_section.into_vec();
 

	
 
        // Consume return types
 
        consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
        let mut return_types = self.parser_types.start_section();
 
        let mut open_curly_pos = iter.last_valid_pos(); // bogus value
 
        consume_comma_separated_until(
 
            TokenKind::OpenCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let poly_vars = ctx.heap[definition_id].poly_vars(); // TODO: @Cleanup, this is really ugly. But rust...
 
                let poly_vars = ctx.heap[definition_id].poly_vars();
 
                consume_parser_type(source, iter, &ctx.symbols, &ctx.heap, poly_vars, module_scope, definition_id, false, 0)
 
            },
 
            &mut return_types, "a return type", Some(&mut open_curly_pos)
 
        )?;
 
        let return_types = return_types.into_vec();
 

	
 
        // TODO: @ReturnValues
 
        match return_types.len() {
 
            0 => return Err(ParseError::new_error_str_at_pos(&module.source, open_curly_pos, "expected a return type")),
 
            1 => {},
 
            _ => return Err(ParseError::new_error_str_at_pos(&module.source, open_curly_pos, "multiple return types are not (yet) allowed")),
 
        }
 
@@ -345,25 +345,25 @@ impl PassDefinitions {
 
        } else {
 
            // Not a block statement, so wrap it in one
 
            let mut statements = self.statements.start_section();
 
            let wrap_begin_pos = iter.last_valid_pos();
 
            self.consume_statement(module, iter, ctx, &mut statements)?;
 
            let wrap_end_pos = iter.last_valid_pos();
 

	
 
            let statements = statements.into_vec();
 

	
 
            let id = ctx.heap.alloc_block_statement(|this| BlockStatement{
 
                this,
 
                is_implicit: true,
 
                span: InputSpan::from_positions(wrap_begin_pos, wrap_end_pos), // TODO: @Span
 
                span: InputSpan::from_positions(wrap_begin_pos, wrap_end_pos),
 
                statements,
 
                end_block: EndBlockStatementId::new_invalid(),
 
                scope_node: ScopeNode::new_invalid(),
 
                first_unique_id_in_scope: -1,
 
                next_unique_id_in_scope: -1,
 
                relative_pos_in_parent: 0,
 
                locals: Vec::new(),
 
                labels: Vec::new(),
 
                next: StatementId::new_invalid(),
 
            });
 

	
 
            let end_block = ctx.heap.alloc_end_block_statement(|this| EndBlockStatement{
 
@@ -613,25 +613,25 @@ impl PassDefinitions {
 
        }))
 
    }
 

	
 
    fn consume_return_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ReturnStatementId, ParseError> {
 
        let return_span = consume_exact_ident(&module.source, iter, KW_STMT_RETURN)?;
 
        let mut scoped_section = self.expressions.start_section();
 

	
 
        consume_comma_separated_until(
 
            TokenKind::SemiColon, &module.source, iter, ctx,
 
            |_source, iter, ctx| self.consume_expression(module, iter, ctx),
 
            &mut scoped_section, "a return expression", None
 
            &mut scoped_section, "an expression", None
 
        )?;
 
        let expressions = scoped_section.into_vec();
 

	
 
        if expressions.is_empty() {
 
            return Err(ParseError::new_error_str_at_span(&module.source, return_span, "expected at least one return value"));
 
        } else if expressions.len() > 1 {
 
            return Err(ParseError::new_error_str_at_span(&module.source, return_span, "multiple return values are not (yet) supported"))
 
        }
 

	
 
        Ok(ctx.heap.alloc_return_statement(|this| ReturnStatement{
 
            this,
 
            span: return_span,
 
@@ -649,25 +649,24 @@ impl PassDefinitions {
 
            this,
 
            span: goto_span,
 
            label,
 
            target: None
 
        }))
 
    }
 

	
 
    fn consume_new_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<NewStatementId, ParseError> {
 
        let new_span = consume_exact_ident(&module.source, iter, KW_STMT_NEW)?;
 

	
 
        // TODO: @Cleanup, should just call something like consume_component_expression-ish
 
        let start_pos = iter.last_valid_pos();
 
        let expression_id = self.consume_primary_expression(module, iter, ctx)?;
 
        let expression = &ctx.heap[expression_id];
 
        let mut valid = false;
 

	
 
        let mut call_id = CallExpressionId::new_invalid();
 
        if let Expression::Call(expression) = expression {
 
            // Allow both components and functions, as it makes more sense to
 
            // check their correct use in the validation and linking pass
 
            if expression.method == Method::UserComponent || expression.method == Method::UserFunction {
 
                call_id = expression.this;
 
                valid = true;
 
@@ -686,66 +685,78 @@ impl PassDefinitions {
 
            this,
 
            span: new_span,
 
            expression: call_id,
 
            next: StatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_channel_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ChannelStatementId, ParseError> {
 
        // Consume channel specification
 
        let channel_span = consume_exact_ident(&module.source, iter, KW_STMT_CHANNEL)?;
 
        let inner_port_type = if Some(TokenKind::OpenAngle) == iter.next() {
 
        let (inner_port_type, end_pos) = if Some(TokenKind::OpenAngle) == iter.next() {
 
            // Retrieve the type of the channel, we're cheating a bit here by
 
            // consuming the first '<' and setting the initial angle depth to 1
 
            // such that our final '>' will be consumed as well.
 
            iter.consume();
 
            let definition_id = self.cur_definition;
 
            let poly_vars = ctx.heap[definition_id].poly_vars();
 
            consume_parser_type(
 
            let parser_type = consume_parser_type(
 
                &module.source, iter, &ctx.symbols, &ctx.heap,
 
                poly_vars, SymbolScope::Module(module.root_id), definition_id,
 
                true, 1
 
            )?
 
            )?;
 

	
 
            (parser_type.elements, parser_type.full_span.end)
 
        } else {
 
            // Assume inferred
 
            ParserType{ elements: vec![ParserTypeElement{
 
                full_span: channel_span, // TODO: @Span fix
 
                variant: ParserTypeVariant::Inferred
 
            }]}
 
            (
 
                vec![ParserTypeElement{
 
                    element_span: channel_span,
 
                    variant: ParserTypeVariant::Inferred
 
                }],
 
                channel_span.end
 
            )
 
        };
 

	
 
        let from_identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
        let to_identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
        // Construct ports
 
        let port_type_len = inner_port_type.elements.len() + 1;
 
        let mut from_port_type = ParserType{ elements: Vec::with_capacity(port_type_len) };
 
        from_port_type.elements.push(ParserTypeElement{ full_span: channel_span, variant: ParserTypeVariant::Output });
 
        from_port_type.elements.extend_from_slice(&inner_port_type.elements);
 
        let port_type_span = InputSpan::from_positions(channel_span.begin, end_pos);
 
        let port_type_len = inner_port_type.len() + 1;
 
        let mut from_port_type = ParserType{ elements: Vec::with_capacity(port_type_len), full_span: port_type_span };
 
        from_port_type.elements.push(ParserTypeElement{
 
            element_span: channel_span,
 
            variant: ParserTypeVariant::Output,
 
        });
 
        from_port_type.elements.extend_from_slice(&inner_port_type);
 
        let from = ctx.heap.alloc_variable(|this| Variable{
 
            this,
 
            kind: VariableKind::Local,
 
            identifier: from_identifier,
 
            parser_type: from_port_type,
 
            relative_pos_in_block: 0,
 
            unique_id_in_scope: -1,
 
        });
 

	
 
        let mut to_port_type = ParserType{ elements: Vec::with_capacity(port_type_len) };
 
        to_port_type.elements.push(ParserTypeElement{ full_span: channel_span, variant: ParserTypeVariant::Input });
 
        to_port_type.elements.extend_from_slice(&inner_port_type.elements);
 
        let mut to_port_type = ParserType{ elements: Vec::with_capacity(port_type_len), full_span: port_type_span };
 
        to_port_type.elements.push(ParserTypeElement{
 
            element_span: channel_span,
 
            variant: ParserTypeVariant::Input
 
        });
 
        to_port_type.elements.extend_from_slice(&inner_port_type);
 
        let to = ctx.heap.alloc_variable(|this|Variable{
 
            this,
 
            kind: VariableKind::Local,
 
            identifier: to_identifier,
 
            parser_type: to_port_type,
 
            relative_pos_in_block: 0,
 
            unique_id_in_scope: -1,
 
        });
 

	
 
        // Construct the channel
 
        Ok(ctx.heap.alloc_channel_statement(|this| ChannelStatement{
 
            this,
 
@@ -806,25 +817,25 @@ impl PassDefinitions {
 
        let definition_id = self.cur_definition;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        let parser_type = consume_parser_type(
 
            &module.source, iter, &ctx.symbols, &ctx.heap, poly_vars,
 
            SymbolScope::Definition(definition_id), definition_id, true, 0
 
        );
 

	
 
        if let Ok(parser_type) = parser_type {
 
            if Some(TokenKind::Ident) == iter.next() {
 
                // Assume this is a proper memory statement
 
                let identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
                let memory_span = InputSpan::from_positions(parser_type.elements[0].full_span.begin, identifier.span.end);
 
                let memory_span = InputSpan::from_positions(parser_type.full_span.begin, identifier.span.end);
 
                let assign_span = consume_token(&module.source, iter, TokenKind::Equal)?;
 

	
 
                let initial_expr_begin_pos = iter.last_valid_pos();
 
                let initial_expr_id = self.consume_expression(module, iter, ctx)?;
 
                let initial_expr_end_pos = iter.last_valid_pos();
 
                consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
                // Allocate the memory statement with the variable
 
                let local_id = ctx.heap.alloc_variable(|this| Variable{
 
                    this,
 
                    kind: VariableKind::Local,
 
                    identifier: identifier.clone(),
 
@@ -1392,25 +1403,25 @@ impl PassDefinitions {
 
                                        variant_idx: 0,
 
                                    }),
 
                                    parent: ExpressionParent::None,
 
                                    unique_id_in_definition: -1,
 
                                }).upcast()
 
                            },
 
                            Definition::Component(_) => {
 
                                // Component instantiation
 
                                let arguments = self.consume_expression_list(module, iter, ctx, None)?;
 

	
 
                                ctx.heap.alloc_call_expression(|this| CallExpression{
 
                                    this,
 
                                    span: parser_type.elements[0].full_span, // TODO: @Span fix
 
                                    span: parser_type.full_span,
 
                                    parser_type,
 
                                    method: Method::UserComponent,
 
                                    arguments,
 
                                    definition: target_definition_id,
 
                                    parent: ExpressionParent::None,
 
                                    unique_id_in_definition: -1,
 
                                }).upcast()
 
                            },
 
                            Definition::Function(function_definition) => {
 
                                // Check whether it is a builtin function
 
                                let method = if function_definition.builtin {
 
                                    match function_definition.identifier.value.as_str() {
 
@@ -1422,39 +1433,38 @@ impl PassDefinitions {
 
                                        "assert" => Method::Assert,
 
                                        _ => unreachable!(),
 
                                    }
 
                                } else {
 
                                    Method::UserFunction
 
                                };
 

	
 
                                // Function call: consume the arguments
 
                                let arguments = self.consume_expression_list(module, iter, ctx, None)?;
 

	
 
                                ctx.heap.alloc_call_expression(|this| CallExpression{
 
                                    this,
 
                                    span: parser_type.elements[0].full_span, // TODO: @Span fix
 
                                    span: parser_type.full_span,
 
                                    parser_type,
 
                                    method,
 
                                    arguments,
 
                                    definition: target_definition_id,
 
                                    parent: ExpressionParent::None,
 
                                    unique_id_in_definition: -1,
 
                                }).upcast()
 
                            }
 
                        }
 
                    },
 
                    _ => {
 
                        return Err(ParseError::new_error_str_at_span(
 
                            &module.source, parser_type.elements[0].full_span,
 
                            "unexpected type in expression, note that casting expressions are not yet implemented"
 
                            &module.source, parser_type.full_span, "unexpected type in expression"
 
                        ))
 
                    }
 
                }
 
            } else {
 
                // Check for builtin keywords or builtin functions
 
                if ident_text == KW_LIT_NULL || ident_text == KW_LIT_TRUE || ident_text == KW_LIT_FALSE {
 
                    iter.consume();
 

	
 
                    // Parse builtin literal
 
                    let value = match ident_text {
 
                        KW_LIT_NULL => Literal::Null,
 
                        KW_LIT_TRUE => Literal::True,
 
@@ -1491,28 +1501,31 @@ impl PassDefinitions {
 
                    iter.consume();
 
                    let to_type = if Some(TokenKind::OpenAngle) == iter.next() {
 
                        iter.consume();
 
                        let definition_id = self.cur_definition;
 
                        let poly_vars = ctx.heap[definition_id].poly_vars();
 
                        consume_parser_type(
 
                            &module.source, iter, &ctx.symbols, &ctx.heap,
 
                            poly_vars, SymbolScope::Module(module.root_id), definition_id,
 
                            true, 1
 
                        )?
 
                    } else {
 
                        // Automatic casting with inferred target type
 
                        ParserType{ elements: vec![ParserTypeElement{
 
                            full_span: ident_span, // TODO: @Span fix
 
                            variant: ParserTypeVariant::Inferred,
 
                        }]}
 
                        ParserType{
 
                            elements: vec![ParserTypeElement{
 
                                element_span: ident_span,
 
                                variant: ParserTypeVariant::Inferred,
 
                            }],
 
                            full_span: ident_span
 
                        }
 
                    };
 

	
 
                    consume_token(&module.source, iter, TokenKind::OpenParen)?;
 
                    let subject = self.consume_expression(module, iter, ctx)?;
 
                    consume_token(&module.source, iter, TokenKind::CloseParen)?;
 

	
 
                    ctx.heap.alloc_cast_expression(|this| CastExpression{
 
                        this,
 
                        span: ident_span,
 
                        to_type,
 
                        subject,
 
                        parent: ExpressionParent::None,
 
@@ -1524,25 +1537,25 @@ impl PassDefinitions {
 
                    // then if a programmer mistyped a struct/function name the
 
                    // error messages will be rather cryptic. For polymorphic
 
                    // arguments we can't really do anything at all (because it
 
                    // uses the '<' token). In the other cases we try to provide
 
                    // a better error message.
 
                    iter.consume();
 
                    let next = iter.next();
 
                    if Some(TokenKind::ColonColon) == next {
 
                        return Err(ParseError::new_error_str_at_span(&module.source, ident_span, "unknown identifier"));
 
                    } else if Some(TokenKind::OpenParen) == next {
 
                        return Err(ParseError::new_error_str_at_span(
 
                            &module.source, ident_span,
 
                            "unknown identifier, did you mistype a union variant's or a function's name?"
 
                            "unknown identifier, did you mistype a union variant's, component's, or function's name?"
 
                        ));
 
                    } else if Some(TokenKind::OpenCurly) == next {
 
                        return Err(ParseError::new_error_str_at_span(
 
                            &module.source, ident_span,
 
                            "unknown identifier, did you mistype a struct type's name?"
 
                        ))
 
                    }
 

	
 
                    let ident_text = ctx.pool.intern(ident_text);
 
                    let identifier = Identifier { span: ident_span, value: ident_text };
 

	
 
                    ctx.heap.alloc_variable_expression(|this| VariableExpression {
 
@@ -1605,107 +1618,148 @@ impl PassDefinitions {
 
        )?;
 
        Ok(section.into_vec())
 
    }
 
}
 

	
 
/// Consumes a type. A type always starts with an identifier which may indicate
 
/// a builtin type or a user-defined type. The fact that it may contain
 
/// polymorphic arguments makes it a tree-like structure. Because we cannot rely
 
/// on knowing the exact number of polymorphic arguments we do not check for
 
/// these.
 
///
 
/// Note that the first depth index is used as a hack.
 
// TODO: @Optimize, @Span fix, @Cleanup
 
// TODO: @Optimize, @Cleanup
 
fn consume_parser_type(
 
    source: &InputSource, iter: &mut TokenIter, symbols: &SymbolTable, heap: &Heap, poly_vars: &[Identifier],
 
    cur_scope: SymbolScope, wrapping_definition: DefinitionId, allow_inference: bool, first_angle_depth: i32,
 
) -> Result<ParserType, ParseError> {
 
    struct Entry{
 
        element: ParserTypeElement,
 
        depth: i32,
 
    }
 

	
 
    // After parsing the array modified "[]", we need to insert an array type
 
    // before the most recently parsed type.
 
    fn insert_array_before(elements: &mut Vec<Entry>, depth: i32, span: InputSpan) {
 
        let index = elements.iter().rposition(|e| e.depth == depth).unwrap();
 
        let num_embedded = elements[index].element.variant.num_embedded();
 
        elements.insert(index, Entry{
 
            element: ParserTypeElement{ full_span: span, variant: ParserTypeVariant::Array },
 
            element: ParserTypeElement{ element_span: span, variant: ParserTypeVariant::Array },
 
            depth,
 
        });
 

	
 
        // Now the original element, and all of its children, should have their
 
        // depth incremented by 1
 
        elements[index + 1].depth += 1;
 
        if num_embedded != 0 {
 
            for idx in index + 2..elements.len() {
 
                let element = &mut elements[idx];
 
                if element.depth >= depth + 1 {
 
                    element.depth += 1;
 
                } else {
 
                    break;
 
                }
 
            }
 
        }
 
    }
 

	
 
    // Most common case we just have one type, perhaps with some array
 
    // annotations. This is both the hot-path, and simplifies the state machine
 
    // that follows and is responsible for parsing more complicated types.
 
    let element = consume_parser_type_ident(source, iter, symbols, heap, poly_vars, cur_scope, wrapping_definition, allow_inference)?;
 
    let element = consume_parser_type_ident(
 
        source, iter, symbols, heap, poly_vars, cur_scope,
 
        wrapping_definition, allow_inference
 
    )?;
 

	
 
    if iter.next() != Some(TokenKind::OpenAngle) {
 
        let num_embedded = element.variant.num_embedded();
 
        let mut num_array = 0;
 
        let first_pos = element.element_span.begin;
 
        let mut last_pos = element.element_span.end;
 
        let mut elements = Vec::with_capacity(num_embedded + 2); // type itself + embedded + 1 (maybe) array type
 

	
 
        // Consume any potential array elements
 
        while iter.next() == Some(TokenKind::OpenSquare) {
 
            let mut array_span = iter.next_span();
 
            iter.consume();
 

	
 
            let end_span = iter.next_span();
 
            array_span.end = end_span.end;
 
            consume_token(source, iter, TokenKind::CloseSquare)?;
 
            num_array += 1;
 
        }
 

	
 
        let array_span = element.full_span;
 
        let mut elements = Vec::with_capacity(num_array + num_embedded + 1);
 
        for _ in 0..num_array {
 
            elements.push(ParserTypeElement{ full_span: array_span, variant: ParserTypeVariant::Array });
 
            last_pos = end_span.end;
 
            elements.push(ParserTypeElement{ element_span: array_span, variant: ParserTypeVariant::Array });
 
        }
 

	
 
        // Push the element itself
 
        let element_span = element.element_span;
 
        elements.push(element);
 

	
 
        // Check if polymorphic arguments are expected
 
        if num_embedded != 0 {
 
            if !allow_inference {
 
                return Err(ParseError::new_error_str_at_span(source, array_span, "type inference is not allowed here"));
 
                return Err(ParseError::new_error_str_at_span(source, element_span, "type inference is not allowed here"));
 
            }
 

	
 
            for _ in 0..num_embedded {
 
                elements.push(ParserTypeElement { full_span: array_span, variant: ParserTypeVariant::Inferred });
 
                elements.push(ParserTypeElement { element_span, variant: ParserTypeVariant::Inferred });
 
            }
 
        }
 

	
 
        // When we have applied the initial-open-angle hack (e.g. consuming an
 
        // explicit type on a channel), then we consume the closing angles as
 
        // well.
 
        for _ in 0..first_angle_depth {
 
            let (_, angle_end_pos) = iter.next_positions();
 
            last_pos = angle_end_pos;
 
            consume_token(source, iter, TokenKind::CloseAngle)?;
 
        }
 

	
 
        return Ok(ParserType{ elements });
 
        return Ok(ParserType{
 
            elements,
 
            full_span: InputSpan::from_positions(first_pos, last_pos)
 
        });
 
    };
 

	
 
    // We have a polymorphic specification. So we start by pushing the item onto
 
    // our stack, then start adding entries together with the angle-brace depth
 
    // at which they're found.
 
    let mut elements = Vec::new();
 
    let first_pos = element.element_span.begin;
 
    let mut last_pos = element.element_span.end;
 
    elements.push(Entry{ element, depth: 0 });
 

	
 
    // Start out with the first '<' consumed.
 
    iter.consume();
 
    enum State { Ident, Open, Close, Comma }
 
    let mut state = State::Open;
 
    let mut angle_depth = first_angle_depth + 1;
 

	
 
    loop {
 
        let next = iter.next();
 

	
 
        match state {
 
            State::Ident => {
 
                // Just parsed an identifier, may expect comma, angled braces,
 
                // or the tokens indicating an array
 
                if Some(TokenKind::OpenAngle) == next {
 
                    angle_depth += 1;
 
                    state = State::Open;
 
                } else if Some(TokenKind::CloseAngle) == next {
 
                    let (_, end_angle_pos) = iter.next_positions();
 
                    last_pos = end_angle_pos;
 
                    angle_depth -= 1;
 
                    state = State::Close;
 
                } else if Some(TokenKind::ShiftRight) == next {
 
                    let (_, end_angle_pos) = iter.next_positions();
 
                    last_pos = end_angle_pos;
 
                    angle_depth -= 2;
 
                    state = State::Close;
 
                } else if Some(TokenKind::Comma) == next {
 
                    state = State::Comma;
 
                } else if Some(TokenKind::OpenSquare) == next {
 
                    let (start_pos, _) = iter.next_positions();
 
                    iter.consume(); // consume opening square
 
                    if iter.next() != Some(TokenKind::CloseSquare) {
 
                        return Err(ParseError::new_error_str_at_pos(
 
                            source, iter.last_valid_pos(),
 
                            "unexpected token: expected ']'"
 
                        ));
 
@@ -1725,27 +1779,31 @@ fn consume_parser_type(
 
            State::Open => {
 
                // Just parsed an opening angle bracket, expecting an identifier
 
                let element = consume_parser_type_ident(source, iter, symbols, heap, poly_vars, cur_scope, wrapping_definition, allow_inference)?;
 
                elements.push(Entry{ element, depth: angle_depth });
 
                state = State::Ident;
 
            },
 
            State::Close => {
 
                // Just parsed 1 or 2 closing angle brackets, expecting comma,
 
                // more closing brackets or the tokens indicating an array
 
                if Some(TokenKind::Comma) == next {
 
                    state = State::Comma;
 
                } else if Some(TokenKind::CloseAngle) == next {
 
                    let (_, end_angle_pos) = iter.next_positions();
 
                    last_pos = end_angle_pos;
 
                    angle_depth -= 1;
 
                    state = State::Close;
 
                } else if Some(TokenKind::ShiftRight) == next {
 
                    let (_, end_angle_pos) = iter.next_positions();
 
                    last_pos = end_angle_pos;
 
                    angle_depth -= 2;
 
                    state = State::Close;
 
                } else if Some(TokenKind::OpenSquare) == next {
 
                    let (start_pos, _) = iter.next_positions();
 
                    iter.consume();
 
                    if iter.next() != Some(TokenKind::CloseSquare) {
 
                        return Err(ParseError::new_error_str_at_pos(
 
                            source, iter.last_valid_pos(),
 
                            "unexpected token: expected ']'"
 
                        ));
 
                    }
 
                    let (_, end_pos) = iter.next_positions();
 
@@ -1759,139 +1817,146 @@ fn consume_parser_type(
 
                }
 

	
 
                iter.consume();
 
            },
 
            State::Comma => {
 
                // Just parsed a comma, expecting an identifier or more closing
 
                // braces
 
                if Some(TokenKind::Ident) == next {
 
                    let element = consume_parser_type_ident(source, iter, symbols, heap, poly_vars, cur_scope, wrapping_definition, allow_inference)?;
 
                    elements.push(Entry{ element, depth: angle_depth });
 
                    state = State::Ident;
 
                } else if Some(TokenKind::CloseAngle) == next {
 
                    let (_, end_angle_pos) = iter.next_positions();
 
                    last_pos = end_angle_pos;
 
                    iter.consume();
 
                    angle_depth -= 1;
 
                    state = State::Close;
 
                } else if Some(TokenKind::ShiftRight) == next {
 
                    let (_, end_angle_pos) = iter.next_positions();
 
                    last_pos = end_angle_pos;
 
                    iter.consume();
 
                    angle_depth -= 2;
 
                    state = State::Close;
 
                } else {
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        source, iter.last_valid_pos(),
 
                        "unexpected token: expected '>' or a type name"
 
                    ));
 
                }
 
            }
 
        }
 

	
 
        if angle_depth < 0 {
 
            return Err(ParseError::new_error_str_at_pos(source, iter.last_valid_pos(), "unmatched '>'"));
 
        } else if angle_depth == 0 {
 
            break;
 
        }
 
    }
 

	
 
    // If here then we found the correct number of angle braces. But we still
 
    // need to make sure that each encountered type has the correct number of
 
    // embedded types.
 
    let mut idx = 0;
 
    while idx < elements.len() {
 
    for idx in 0..elements.len() {
 
        let cur_element = &elements[idx];
 

	
 
        let expected_subtypes = cur_element.element.variant.num_embedded();
 
        let mut encountered_subtypes = 0;
 
        for peek_idx in idx + 1..elements.len() {
 
            let peek_element = &elements[peek_idx];
 
            if peek_element.depth == cur_element.depth + 1 {
 
                encountered_subtypes += 1;
 
            } else if peek_element.depth <= cur_element.depth {
 
                break;
 
            }
 
        }
 

	
 
        if expected_subtypes != encountered_subtypes {
 
            if encountered_subtypes == 0 {
 
                // Case where we have elided the embedded types, all of them
 
                // should be inferred.
 
                if !allow_inference {
 
                    return Err(ParseError::new_error_str_at_span(
 
                        source, cur_element.element.full_span,
 
                        source, cur_element.element.element_span,
 
                        "type inference is not allowed here"
 
                    ));
 
                }
 

	
 
                // Insert the missing types
 
                let inserted_span = cur_element.element.full_span;
 
                // Insert the missing types (in reverse order, but they're all
 
                // of the "inferred" type anyway).
 
                let inserted_span = cur_element.element.element_span;
 
                let inserted_depth = cur_element.depth + 1;
 
                elements.reserve(expected_subtypes);
 
                for _ in 0..expected_subtypes {
 
                    elements.insert(idx + 1, Entry{
 
                        element: ParserTypeElement{ full_span: inserted_span, variant: ParserTypeVariant::Inferred },
 
                        element: ParserTypeElement{ element_span: inserted_span, variant: ParserTypeVariant::Inferred },
 
                        depth: inserted_depth,
 
                    });
 
                }
 
            } else {
 
                // Mismatch in number of embedded types, produce a neat error
 
                // message.
 
                let type_name = String::from_utf8_lossy(source.section_at_span(cur_element.element.full_span));
 
                let type_name = String::from_utf8_lossy(source.section_at_span(cur_element.element.element_span));
 
                fn polymorphic_name_text(num: usize) -> &'static str {
 
                    if num == 1 { "polymorphic argument" } else { "polymorphic arguments" }
 
                }
 
                fn were_or_was(num: usize) -> &'static str {
 
                    if num == 1 { "was" } else { "were" }
 
                }
 

	
 
                if expected_subtypes == 0 {
 
                    return Err(ParseError::new_error_at_span(
 
                        source, cur_element.element.full_span,
 
                        source, cur_element.element.element_span,
 
                        format!(
 
                            "the type '{}' is not polymorphic, yet {} {} {} provided",
 
                            type_name, encountered_subtypes, polymorphic_name_text(encountered_subtypes),
 
                            were_or_was(encountered_subtypes)
 
                        )
 
                    ));
 
                }
 

	
 
                let maybe_infer_text = if allow_inference {
 
                    " (or none, to perform implicit type inference)"
 
                } else {
 
                    ""
 
                };
 

	
 
                return Err(ParseError::new_error_at_span(
 
                    source, cur_element.element.full_span,
 
                    source, cur_element.element.element_span,
 
                    format!(
 
                        "expected {} {}{} for the type '{}', but {} {} provided",
 
                        expected_subtypes, polymorphic_name_text(expected_subtypes),
 
                        maybe_infer_text, type_name, encountered_subtypes,
 
                        were_or_was(encountered_subtypes)
 
                    )
 
                ));
 
            }
 
        }
 

	
 
        idx += 1;
 
    }
 

	
 
    let mut constructed_elements = Vec::with_capacity(elements.len());
 
    for element in elements.into_iter() {
 
        constructed_elements.push(element.element);
 
    }
 

	
 
    Ok(ParserType{ elements: constructed_elements })
 
    Ok(ParserType{
 
        elements: constructed_elements,
 
        full_span: InputSpan::from_positions(first_pos, last_pos)
 
    })
 
}
 

	
 
/// Consumes an identifier for which we assume that it resolves to some kind of
 
/// type. Once we actually arrive at a type we will stop parsing. Hence there
 
/// may be trailing '::' tokens in the iterator.
 
/// may be trailing '::' tokens in the iterator, or the subsequent specification
 
/// of polymorphic arguments.
 
fn consume_parser_type_ident(
 
    source: &InputSource, iter: &mut TokenIter, symbols: &SymbolTable, heap: &Heap, poly_vars: &[Identifier],
 
    mut scope: SymbolScope, wrapping_definition: DefinitionId, allow_inference: bool,
 
) -> Result<ParserTypeElement, ParseError> {
 
    use ParserTypeVariant as PTV;
 
    let (mut type_text, mut type_span) = consume_any_ident(source, iter)?;
 

	
 
    let variant = match type_text {
 
        KW_TYPE_MESSAGE => PTV::Message,
 
        KW_TYPE_BOOL => PTV::Bool,
 
        KW_TYPE_UINT8 => PTV::UInt8,
 
        KW_TYPE_UINT16 => PTV::UInt16,
 
@@ -1976,48 +2041,48 @@ fn consume_parser_type_ident(
 
                            type_kind = Some(PTV::Definition(symbol_definition.definition_id, num_poly_vars as u32));
 
                            break;
 
                        }
 
                    }
 
                }
 
            }
 

	
 
            debug_assert!(type_kind.is_some());
 
            type_kind.unwrap()
 
        },
 
    };
 

	
 
    Ok(ParserTypeElement{ full_span: type_span, variant })
 
    Ok(ParserTypeElement{ element_span: type_span, variant })
 
}
 

	
 
/// Consumes polymorphic variables and throws them on the floor.
 
fn consume_polymorphic_vars_spilled(source: &InputSource, iter: &mut TokenIter, _ctx: &mut PassCtx) -> Result<(), ParseError> {
 
    maybe_consume_comma_separated_spilled(
 
        TokenKind::OpenAngle, TokenKind::CloseAngle, source, iter, _ctx,
 
        |source, iter, _ctx| {
 
            consume_ident(source, iter)?;
 
            Ok(())
 
        }, "a polymorphic variable"
 
    )?;
 
    Ok(())
 
}
 

	
 
/// Consumes the parameter list to functions/components
 
fn consume_parameter_list(
 
    source: &InputSource, iter: &mut TokenIter, ctx: &mut PassCtx,
 
    target: &mut ScopedSection<VariableId>, scope: SymbolScope, definition_id: DefinitionId
 
) -> Result<(), ParseError> {
 
    consume_comma_separated(
 
        TokenKind::OpenParen, TokenKind::CloseParen, source, iter, ctx,
 
        |source, iter, ctx| {
 
            let poly_vars = ctx.heap[definition_id].poly_vars(); // TODO: @Cleanup, this is really ugly. But rust...
 
            let poly_vars = ctx.heap[definition_id].poly_vars(); // Rust being rust, multiple lookups
 
            let parser_type = consume_parser_type(
 
                source, iter, &ctx.symbols, &ctx.heap, poly_vars, scope,
 
                definition_id, false, 0
 
            )?;
 
            let identifier = consume_ident_interned(source, iter, ctx)?;
 
            let parameter_id = ctx.heap.alloc_variable(|this| Variable{
 
                this,
 
                kind: VariableKind::Parameter,
 
                parser_type,
 
                identifier,
 
                relative_pos_in_block: 0,
 
                unique_id_in_scope: -1,
src/protocol/parser/pass_typing.rs
Show inline comments
 
@@ -3553,24 +3553,40 @@ impl PassTyping {
 
                    }
 

	
 
                    if !InferenceType::check_subtrees(section_a, 0, section_b, 0) {
 
                        // Not compatible
 
                        return Some((marker_a, section_a, section_b))
 
                    }
 
                }
 
            }
 

	
 
            None
 
        }
 

	
 
        // Helper function to check for polymorph mismatch between an inference
 
        // type and the polymorphic variables in the poly_data struct.
 
        fn has_explicit_poly_mismatch<'a>(
 
            poly_vars: &'a [InferenceType], arg: &'a InferenceType
 
        ) -> Option<(u32, &'a [InferenceTypePart], &'a [InferenceTypePart])> {
 
            for (marker, section) in arg.marker_iter() {
 
                debug_assert!((marker as usize) < poly_vars.len());
 
                let poly_section = &poly_vars[marker as usize].parts;
 
                if !InferenceType::check_subtrees(poly_section, 0, section, 0) {
 
                    return Some((marker, poly_section, section))
 
                }
 
            }
 

	
 
            None
 
        }
 

	
 
        // Helpers function to retrieve polyvar name and definition name
 
        fn get_poly_var_and_definition_name<'a>(ctx: &'a Ctx, poly_var_idx: u32, definition_id: DefinitionId) -> (&'a str, &'a str) {
 
            let definition = &ctx.heap[definition_id];
 
            let poly_var = definition.poly_vars()[poly_var_idx as usize].value.as_str();
 
            let func_name = definition.identifier().value.as_str();
 

	
 
            (poly_var, func_name)
 
        }
 

	
 
        // Helper function to construct initial error
 
        fn construct_main_error(ctx: &Ctx, poly_data: &ExtraData, poly_var_idx: u32, expr: &Expression) -> ParseError {
 
            match expr {
 
@@ -3698,24 +3714,52 @@ impl PassTyping {
 
                        )
 
                    )
 
                    .with_info_at_span(
 
                        &ctx.module.source, expr.span(), format!(
 
                            "While the {} inferred it to '{}'",
 
                            expr_return_name,
 
                            InferenceType::partial_display_name(&ctx.heap, section_ret)
 
                        )
 
                    );
 
            }
 
        }
 

	
 
        // Now check against the explicitly specified polymorphic variables (if
 
        // any).
 
        for (arg_idx, arg) in poly_data.embedded.iter().enumerate() {
 
            if let Some((poly_idx, poly_section, arg_section)) = has_explicit_poly_mismatch(&poly_data.poly_vars, arg) {
 
                let arg = &ctx.heap[expr_args[arg_idx]];
 
                return construct_main_error(ctx, poly_data, poly_idx, expr)
 
                    .with_info_at_span(
 
                        &ctx.module.source, arg.span(), format!(
 
                            "The polymorphic variable has type '{}' (which might have been partially inferred) while the argument inferred it to '{}'",
 
                            InferenceType::partial_display_name(&ctx.heap, poly_section),
 
                            InferenceType::partial_display_name(&ctx.heap, arg_section)
 
                        )
 
                    );
 
            }
 
        }
 

	
 
        if let Some((poly_idx, poly_section, ret_section)) = has_explicit_poly_mismatch(&poly_data.poly_vars, &poly_data.returned) {
 
            return construct_main_error(ctx, poly_data, poly_idx, expr)
 
                .with_info_at_span(
 
                    &ctx.module.source, expr.span(), format!(
 
                        "The polymorphic variable has type '{}' (which might have been partially inferred) while the {} inferred it to '{}'",
 
                        InferenceType::partial_display_name(&ctx.heap, poly_section),
 
                        expr_return_name,
 
                        InferenceType::partial_display_name(&ctx.heap, ret_section)
 
                    )
 
                )
 
        }
 

	
 
        unreachable!("construct_poly_arg_error without actual error found?")
 
    }
 
}
 

	
 
#[cfg(test)]
 
mod tests {
 
    use super::*;
 
    use crate::protocol::arena::Id;
 
    use InferenceTypePart as ITP;
 
    use InferenceType as IT;
 

	
 
    #[test]
src/protocol/parser/pass_validation_linking.rs
Show inline comments
 
@@ -817,26 +817,27 @@ impl Visitor for PassValidationLinking {
 
                            let field_ident = &struct_definition.fields[def_field_idx].identifier;
 
                            not_specified.push_str(field_ident.value.as_str());
 
                            num_not_specified += 1;
 
                        }
 
                    }
 

	
 
                    debug_assert!(num_not_specified > 0);
 
                    let msg = if num_not_specified == 1 {
 
                        format!("not all fields are specified, '{}' is missing", not_specified)
 
                    } else {
 
                        format!("not all fields are specified, [{}] are missing", not_specified)
 
                    };
 

	
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module.source, literal.parser_type.elements[0].full_span, msg
 
                        &ctx.module.source, literal.parser_type.full_span, msg
 
                    ));
 
                }
 

	
 
                // Need to traverse fields expressions in struct and evaluate
 
                // the poly args
 
                let mut expr_section = self.expression_buffer.start_section();
 
                for field in &literal.fields {
 
                    expr_section.push(field.value);
 
                }
 

	
 
                for expr_idx in 0..expr_section.len() {
 
                    let expr_id = expr_section[expr_idx];
 
@@ -850,62 +851,62 @@ impl Visitor for PassValidationLinking {
 
                // Make sure the variant exists
 
                let type_definition = ctx.types.get_base_definition(&literal.definition).unwrap();
 
                let enum_definition = type_definition.definition.as_enum();
 

	
 
                let variant_idx = enum_definition.variants.iter().position(|v| {
 
                    v.identifier == literal.variant
 
                });
 

	
 
                if variant_idx.is_none() {
 
                    let literal = ctx.heap[id].value.as_enum();
 
                    let ast_definition = ctx.heap[literal.definition].as_enum();
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module.source, literal.parser_type.elements[0].full_span, format!(
 
                        &ctx.module.source, literal.parser_type.full_span, format!(
 
                            "the variant '{}' does not exist on the enum '{}'",
 
                            literal.variant.value.as_str(), ast_definition.identifier.value.as_str()
 
                        )
 
                    ));
 
                }
 

	
 
                literal.variant_idx = variant_idx.unwrap();
 
            },
 
            Literal::Union(literal) => {
 
                // Make sure the variant exists
 
                let type_definition = ctx.types.get_base_definition(&literal.definition).unwrap();
 
                let union_definition = type_definition.definition.as_union();
 

	
 
                let variant_idx = union_definition.variants.iter().position(|v| {
 
                    v.identifier == literal.variant
 
                });
 
                if variant_idx.is_none() {
 
                    let literal = ctx.heap[id].value.as_union();
 
                    let ast_definition = ctx.heap[literal.definition].as_union();
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module.source, literal.parser_type.elements[0].full_span, format!(
 
                        &ctx.module.source, literal.parser_type.full_span, format!(
 
                            "the variant '{}' does not exist on the union '{}'",
 
                            literal.variant.value.as_str(), ast_definition.identifier.value.as_str()
 
                        )
 
                    ));
 
                }
 

	
 
                literal.variant_idx = variant_idx.unwrap();
 

	
 
                // Make sure the number of specified values matches the expected
 
                // number of embedded values in the union variant.
 
                let union_variant = &union_definition.variants[literal.variant_idx];
 
                if union_variant.embedded.len() != literal.values.len() {
 
                    let literal = ctx.heap[id].value.as_union();
 
                    let ast_definition = ctx.heap[literal.definition].as_union();
 
                    return Err(ParseError::new_error_at_span(
 
                        &ctx.module.source, literal.parser_type.elements[0].full_span, format!(
 
                        &ctx.module.source, literal.parser_type.full_span, format!(
 
                            "The variant '{}' of union '{}' expects {} embedded values, but {} were specified",
 
                            literal.variant.value.as_str(), ast_definition.identifier.value.as_str(),
 
                            union_variant.embedded.len(), literal.values.len()
 
                        ),
 
                    ))
 
                }
 

	
 
                // Traverse embedded values of union (if any) and evaluate the
 
                // polymorphic arguments
 
                let upcast_id = id.upcast();
 
                let mut expr_section = self.expression_buffer.start_section();
 
                for value in &literal.values {
 
@@ -1158,27 +1159,31 @@ impl Visitor for PassValidationLinking {
 
                        )
 
                    ));
 
                }
 

	
 
                // By now we know that this is a valid binding expression. Given
 
                // that a binding expression must be nested under an if/while
 
                // statement, we now add the variable to the (implicit) block
 
                // statement following the if/while statement.
 
                let bound_identifier = var_expr.identifier.clone();
 
                let bound_variable_id = ctx.heap.alloc_variable(|this| Variable{
 
                    this,
 
                    kind: VariableKind::Binding,
 
                    parser_type: ParserType{ elements: vec![
 
                        ParserTypeElement{ full_span: bound_identifier.span, variant: ParserTypeVariant::Inferred }
 
                    ]},
 
                    parser_type: ParserType{
 
                        elements: vec![ParserTypeElement{
 
                            element_span: bound_identifier.span,
 
                            variant: ParserTypeVariant::Inferred
 
                        }],
 
                        full_span: bound_identifier.span
 
                    },
 
                    identifier: bound_identifier,
 
                    relative_pos_in_block: 0,
 
                    unique_id_in_scope: -1,
 
                });
 

	
 
                let body_stmt_id = match &ctx.heap[self.in_test_expr] {
 
                    Statement::If(stmt) => stmt.true_body,
 
                    Statement::While(stmt) => stmt.body,
 
                    _ => unreachable!(),
 
                };
 
                let body_scope = Scope::Regular(body_stmt_id);
 
                self.checked_at_single_scope_add_local(ctx, body_scope, 0, bound_variable_id)?;
src/protocol/parser/type_table.rs
Show inline comments
 
@@ -857,25 +857,25 @@ impl TypeTable {
 
                    // As we're parsing the type definitions where these kinds
 
                    // of types are impossible/disallowed to express:
 
                    unreachable!("illegal ParserTypeVariant within type definition");
 
                },
 
                PTV::PolymorphicArgument(_, _) => {
 
                    set_resolve_result(ResolveResult::PolymoprhicArgument);
 
                },
 
                PTV::Definition(embedded_id, _) => {
 
                    let definition = &ctx.heap[embedded_id];
 
                    if !(definition.is_struct() || definition.is_enum() || definition.is_union()) {
 
                        let module_source = &modules[root_id.index as usize].source;
 
                        return Err(ParseError::new_error_str_at_span(
 
                            module_source, element.full_span, "expected a datatype (struct, enum or union)"
 
                            module_source, element.element_span, "expected a datatype (struct, enum or union)"
 
                        ))
 
                    }
 

	
 
                    if self.lookup.contains_key(&embedded_id) {
 
                        set_resolve_result(ResolveResult::Resolved(definition.defined_in(), embedded_id))
 
                    } else {
 
                        return Ok(ResolveResult::Unresolved(definition.defined_in(), embedded_id))
 
                    }
 
                }
 
            }
 
        }
 

	
src/protocol/tests/eval_binding.rs
Show inline comments
 
@@ -199,23 +199,23 @@ fn test_binding_fizz_buzz() {
 
                    valid = false;
 
                }
 

	
 
                idx += 1;
 
            }
 

	
 
            return valid;
 
        }
 

	
 
        func fizz_buzz() -> bool {
 
            // auto fizz_more_slow = construct_fizz_buzz_very_slow(100);
 
            // return test_fizz_buzz(fizz_more_slow);
 
            // auto fizz_less_slow = construct_fizz_buzz_slightly_less_slow(100);
 
            // auto fizz_less_slow = construct_fizz_buzz_slightly_less_slow(10000000);
 
            // return test_fizz_buzz(fizz_less_slow);
 

	
 
            auto fizz_more_slow2 = construct_fizz_buzz_very_slow(100);
 
            auto fizz_less_slow2 = construct_fizz_buzz_slightly_less_slow(100);
 
            return test_fizz_buzz(fizz_more_slow2) && test_fizz_buzz(fizz_less_slow2);
 
        }
 
    ").for_function("fizz_buzz", |f| { f
 
        .call_ok(Some(Value::Bool(true)));
 
    });
 
}
 
\ No newline at end of file
src/protocol/tests/parser_inference.rs
Show inline comments
 
@@ -419,13 +419,60 @@ fn test_failed_polymorph_inference() {
 
        func construct<T1, T2>(T1 l, T2 r) -> Node<T1, T2> { return Node{ l: l, r: r }; }
 
        func fix_poly<T>(Node<T, T> a) -> s32 { return 0; }
 
        func test() -> s32 {
 
            s8 assigned = 0;
 
            s64 another = 1;
 
            auto thing = construct(assigned, construct(another, 1));
 
            fix_poly(thing.r);
 
            thing.r.r = assigned;
 
            return 0;
 
        }
 
        ",
 
    );
 
}
 

	
 

	
 
#[test]
 
fn test_explicit_polymorph_argument() {
 
    // Failed because array was put at same type depth as u32. So interpreted
 
    // as a function with two polymorphic arguments
 
    Tester::new_single_source_expect_ok("explicit with array", "
 
    func foo<T>(T a, T b) -> T {
 
        return a @ b;
 
    }
 
    func test() -> u32 {
 
        return foo<u32[]>({1}, {2})[1];
 
    }").for_function("test", |f| { f
 
        .call_ok(Some(Value::UInt32(2)));
 
    });
 

	
 
    // Failed because type inferencer did not construct polymorph errors by
 
    // considering that argument/return types failed against explicitly
 
    // specified polymorphic arguments
 
    Tester::new_single_source_expect_err("explicit polymorph mismatch", "
 
    func foo<T>(T a, T b) -> T { return a + b; }
 
    struct Bar<A, B>{A a, B b}
 
    func test() -> u32 {
 
        return foo<Bar<u32, u64>[]>(5, 6);
 
    }").error(|e| { e
 
        .assert_num(2)
 
        .assert_occurs_at(0, "foo<Bar")
 
        .assert_msg_has(0, "'T' of 'foo'")
 
        .assert_occurs_at(1, "5, ")
 
        .assert_msg_has(1, "has type 'Bar<u32, u64>[]")
 
        .assert_msg_has(1, "inferred it to 'integerlike'");
 
    });
 

	
 
    // Similar to above, now for return type
 
    Tester::new_single_source_expect_err("explicit polymorph mismatch", "
 
    func foo<T>(T a, T b) -> T { return a + b; }
 
    func test() -> u32 {
 
        return foo<u64>(5, 6);
 
    }").error(|e| { e
 
        .assert_num(2)
 
        .assert_occurs_at(0, "foo<u64")
 
        .assert_msg_has(0, "'T' of 'foo'")
 
        .assert_occurs_at(1, "foo<u64")
 
        .assert_msg_has(1, "has type 'u64'")
 
        .assert_msg_has(1, "return type inferred it to 'u32'");
 
    });
 
}
 
\ No newline at end of file
0 comments (0 inline, 0 general)