Changeset - 6c13a3dd4ca2
[Not reviewed]
0 2 0
mh - 4 years ago 2021-12-11 01:19:21
contact@maxhenger.nl
Remove old type parsing code
2 files changed with 58 insertions and 487 deletions:
0 comments (0 inline, 0 general)
src/protocol/parser/pass_definitions.rs
Show inline comments
 
@@ -111,51 +111,51 @@ impl PassDefinitions {
 
    }
 

	
 
    fn visit_struct_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_STRUCT)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        // Parse struct definition
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        let mut fields_section = self.struct_fields.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
                let start_pos = iter.last_valid_pos();
 
                let parser_type = consume_parser_type(
 
                    source, iter, &ctx.symbols, &ctx.heap, poly_vars, module_scope,
 
                    definition_id, false, 0
 
                let parser_type = self.type_parser.consume_parser_type(
 
                    iter, &ctx.heap, source, &ctx.symbols, poly_vars, definition_id,
 
                    module_scope, false, None
 
                )?;
 
                let field = consume_ident_interned(source, iter, ctx)?;
 
                Ok(StructFieldDefinition{
 
                    span: InputSpan::from_positions(start_pos, field.span.end),
 
                    field, parser_type
 
                })
 
            },
 
            &mut fields_section, "a struct field", "a list of struct fields", None
 
        )?;
 

	
 
        // Transfer to preallocated definition
 
        let struct_def = ctx.heap[definition_id].as_struct_mut();
 
        struct_def.fields = fields_section.into_vec();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_enum_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        consume_exact_ident(&module.source, iter, KW_ENUM)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
@@ -198,152 +198,155 @@ impl PassDefinitions {
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        // Parse union definition
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        let mut variants_section = self.union_variants.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let identifier = consume_ident_interned(source, iter, ctx)?;
 
                let mut close_pos = identifier.span.end;
 

	
 
                let mut types_section = self.parser_types.start_section();
 

	
 
                let has_embedded = maybe_consume_comma_separated(
 
                    TokenKind::OpenParen, TokenKind::CloseParen, source, iter, ctx,
 
                    |source, iter, ctx| {
 
                        let poly_vars = ctx.heap[definition_id].poly_vars();
 
                        consume_parser_type(
 
                            source, iter, &ctx.symbols, &ctx.heap, poly_vars,
 
                            module_scope, definition_id, false, 0
 
                        self.type_parser.consume_parser_type(
 
                            iter, &ctx.heap, source, &ctx.symbols, poly_vars, definition_id,
 
                            module_scope, false, None
 
                        )
 
                    },
 
                    &mut types_section, "an embedded type", Some(&mut close_pos)
 
                )?;
 
                let value = if has_embedded {
 
                    types_section.into_vec()
 
                } else {
 
                    types_section.forget();
 
                    Vec::new()
 
                };
 

	
 
                Ok(UnionVariantDefinition{
 
                    span: InputSpan::from_positions(identifier.span.begin, close_pos),
 
                    identifier,
 
                    value
 
                })
 
            },
 
            &mut variants_section, "a union variant", "a list of union variants", None
 
        )?;
 

	
 
        // Transfer to AST
 
        let union_def = ctx.heap[definition_id].as_union_mut();
 
        union_def.variants = variants_section.into_vec();
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_function_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        // Retrieve function name
 
        consume_exact_ident(&module.source, iter, KW_FUNCTION)?;
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated DefinitionId
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        // Parse function's argument list
 
        let mut parameter_section = self.variables.start_section();
 
        consume_parameter_list(
 
            &module.source, iter, ctx, &mut parameter_section, module_scope, definition_id
 
            &mut self.type_parser, &module.source, iter, ctx, &mut parameter_section, module_scope, definition_id
 
        )?;
 
        let parameters = parameter_section.into_vec();
 

	
 
        // Consume return types
 
        consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
        let mut return_types = self.parser_types.start_section();
 
        let mut open_curly_pos = iter.last_valid_pos(); // bogus value
 
        consume_comma_separated_until(
 
            TokenKind::OpenCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let poly_vars = ctx.heap[definition_id].poly_vars();
 
                consume_parser_type(source, iter, &ctx.symbols, &ctx.heap, poly_vars, module_scope, definition_id, false, 0)
 
                self.type_parser.consume_parser_type(
 
                    iter, &ctx.heap, source, &ctx.symbols, poly_vars, definition_id,
 
                    module_scope, false, None
 
                )
 
            },
 
            &mut return_types, "a return type", Some(&mut open_curly_pos)
 
        )?;
 
        let return_types = return_types.into_vec();
 

	
 
        match return_types.len() {
 
            0 => return Err(ParseError::new_error_str_at_pos(&module.source, open_curly_pos, "expected a return type")),
 
            1 => {},
 
            _ => return Err(ParseError::new_error_str_at_pos(&module.source, open_curly_pos, "multiple return types are not (yet) allowed")),
 
        }
 

	
 
        // Consume block
 
        let body = self.consume_block_statement_without_leading_curly(module, iter, ctx, open_curly_pos)?;
 

	
 
        // Assign everything in the preallocated AST node
 
        let function = ctx.heap[definition_id].as_function_mut();
 
        function.return_types = return_types;
 
        function.parameters = parameters;
 
        function.body = body;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_component_definition(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<(), ParseError> {
 
        // Consume component variant and name
 
        let (_variant_text, _) = consume_any_ident(&module.source, iter)?;
 
        debug_assert!(_variant_text == KW_PRIMITIVE || _variant_text == KW_COMPOSITE);
 
        let (ident_text, _) = consume_ident(&module.source, iter)?;
 

	
 
        // Retrieve preallocated definition
 
        let module_scope = SymbolScope::Module(module.root_id);
 
        let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text)
 
            .unwrap().variant.as_definition().definition_id;
 
        self.cur_definition = definition_id;
 

	
 
        consume_polymorphic_vars_spilled(&module.source, iter, ctx)?;
 

	
 
        // Parse component's argument list
 
        let mut parameter_section = self.variables.start_section();
 
        consume_parameter_list(
 
            &module.source, iter, ctx, &mut parameter_section, module_scope, definition_id
 
            &mut self.type_parser, &module.source, iter, ctx, &mut parameter_section, module_scope, definition_id
 
        )?;
 
        let parameters = parameter_section.into_vec();
 

	
 
        // Consume block
 
        let body = self.consume_block_statement(module, iter, ctx)?;
 

	
 
        // Assign everything in the AST node
 
        let component = ctx.heap[definition_id].as_component_mut();
 
        component.parameters = parameters;
 
        component.body = body;
 

	
 
        Ok(())
 
    }
 

	
 
    /// Consumes a block statement. If the resulting statement is not a block
 
    /// (e.g. for a shorthand "if (expr) single_statement") then it will be
 
    /// wrapped in one
 
    fn consume_block_or_wrapped_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<BlockStatementId, ParseError> {
 
        if Some(TokenKind::OpenCurly) == iter.next() {
 
            // This is a block statement
 
            self.consume_block_statement(module, iter, ctx)
 
        } else {
 
@@ -719,55 +722,56 @@ impl PassDefinitions {
 
            return Err(ParseError::new_error_str_at_span(
 
                &module.source, InputSpan::from_positions(start_pos, iter.last_valid_pos()), "expected a call expression"
 
            ));
 
        }
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
        debug_assert!(!call_id.is_invalid());
 
        Ok(ctx.heap.alloc_new_statement(|this| NewStatement{
 
            this,
 
            span: new_span,
 
            expression: call_id,
 
            next: StatementId::new_invalid(),
 
        }))
 
    }
 

	
 
    fn consume_channel_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<ChannelStatementId, ParseError> {
 
        // Consume channel specification
 
        let channel_span = consume_exact_ident(&module.source, iter, KW_STMT_CHANNEL)?;
 
        let (inner_port_type, end_pos) = if Some(TokenKind::OpenAngle) == iter.next() {
 
            // Retrieve the type of the channel, we're cheating a bit here by
 
            // consuming the first '<' and setting the initial angle depth to 1
 
            // such that our final '>' will be consumed as well.
 
            let angle_start_pos = iter.next_start_position();
 
            iter.consume();
 
            let definition_id = self.cur_definition;
 
            let poly_vars = ctx.heap[definition_id].poly_vars();
 
            let parser_type = consume_parser_type(
 
                &module.source, iter, &ctx.symbols, &ctx.heap,
 
                poly_vars, SymbolScope::Module(module.root_id), definition_id,
 
                true, 1
 
            let parser_type = self.type_parser.consume_parser_type(
 
                iter, &ctx.heap, &module.source, &ctx.symbols, poly_vars,
 
                definition_id, SymbolScope::Module(module.root_id),
 
                true, Some(angle_start_pos)
 
            )?;
 

	
 
            (parser_type.elements, parser_type.full_span.end)
 
        } else {
 
            // Assume inferred
 
            (
 
                vec![ParserTypeElement{
 
                    element_span: channel_span,
 
                    variant: ParserTypeVariant::Inferred
 
                }],
 
                channel_span.end
 
            )
 
        };
 

	
 
        let from_identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
        let to_identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
        consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
        // Construct ports
 
        let port_type_span = InputSpan::from_positions(channel_span.begin, end_pos);
 
        let port_type_len = inner_port_type.len() + 1;
 
        let mut from_port_type = ParserType{ elements: Vec::with_capacity(port_type_len), full_span: port_type_span };
 
        from_port_type.elements.push(ParserTypeElement{
 
@@ -838,51 +842,51 @@ impl PassDefinitions {
 
            section.push(stmt_id.upcast());
 
        } else {
 
            // Produce the labeled statement using the first statement, and push
 
            // the remaining ones at the end.
 
            let inner_statements = inner_section.into_vec();
 
            section.push(stmt_id.upcast());
 
            for idx in 1..inner_statements.len() {
 
                section.push(inner_statements[idx])
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn maybe_consume_memory_statement(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx
 
    ) -> Result<Option<(MemoryStatementId, ExpressionStatementId)>, ParseError> {
 
        // This is a bit ugly. It would be nicer if we could somehow
 
        // consume the expression with a type hint if we do get a valid
 
        // type, but we don't get an identifier following it
 
        let iter_state = iter.save();
 
        let definition_id = self.cur_definition;
 
        let poly_vars = ctx.heap[definition_id].poly_vars();
 

	
 
        let parser_type = consume_parser_type(
 
            &module.source, iter, &ctx.symbols, &ctx.heap, poly_vars,
 
            SymbolScope::Definition(definition_id), definition_id, true, 0
 
        let parser_type = self.type_parser.consume_parser_type(
 
            iter, &ctx.heap, &module.source, &ctx.symbols, poly_vars,
 
            definition_id, SymbolScope::Definition(definition_id), true, None
 
        );
 

	
 
        if let Ok(parser_type) = parser_type {
 
            if Some(TokenKind::Ident) == iter.next() {
 
                // Assume this is a proper memory statement
 
                let identifier = consume_ident_interned(&module.source, iter, ctx)?;
 
                let memory_span = InputSpan::from_positions(parser_type.full_span.begin, identifier.span.end);
 
                let assign_span = consume_token(&module.source, iter, TokenKind::Equal)?;
 

	
 
                let initial_expr_begin_pos = iter.last_valid_pos();
 
                let initial_expr_id = self.consume_expression(module, iter, ctx)?;
 
                let initial_expr_end_pos = iter.last_valid_pos();
 
                consume_token(&module.source, iter, TokenKind::SemiColon)?;
 

	
 
                // Allocate the memory statement with the variable
 
                let local_id = ctx.heap.alloc_variable(|this| Variable{
 
                    this,
 
                    kind: VariableKind::Local,
 
                    identifier: identifier.clone(),
 
                    parser_type,
 
                    relative_pos_in_block: 0,
 
                    unique_id_in_scope: -1,
 
                });
 
                let memory_stmt_id = ctx.heap.alloc_memory_statement(|this| MemoryStatement{
 
@@ -1372,51 +1376,51 @@ impl PassDefinitions {
 
            let (character, span) = consume_character_literal(&module.source, iter)?;
 

	
 
            ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                this, span,
 
                value: Literal::Character(character),
 
                parent: ExpressionParent::None,
 
                unique_id_in_definition: -1,
 
            }).upcast()
 
        } else if next == Some(TokenKind::Ident) {
 
            // May be a variable, a type instantiation or a function call. If we
 
            // have a single identifier that we cannot find in the type table
 
            // then we're going to assume that we're dealing with a variable.
 
            let ident_span = iter.next_span();
 
            let ident_text = module.source.section_at_span(ident_span);
 
            let symbol = ctx.symbols.get_symbol_by_name(SymbolScope::Module(module.root_id), ident_text);
 

	
 
            if symbol.is_some() {
 
                // The first bit looked like a symbol, so we're going to follow
 
                // that all the way through, assume we arrive at some kind of
 
                // function call or type instantiation
 
                use ParserTypeVariant as PTV;
 

	
 
                let symbol_scope = SymbolScope::Definition(self.cur_definition);
 
                let poly_vars = ctx.heap[self.cur_definition].poly_vars();
 
                let parser_type = consume_parser_type(
 
                    &module.source, iter, &ctx.symbols, &ctx.heap, poly_vars, symbol_scope,
 
                    self.cur_definition, true, 0
 
                let parser_type = self.type_parser.consume_parser_type(
 
                    iter, &ctx.heap, &module.source, &ctx.symbols, poly_vars, self.cur_definition,
 
                    symbol_scope, true, None
 
                )?;
 
                debug_assert!(!parser_type.elements.is_empty());
 
                match parser_type.elements[0].variant {
 
                    PTV::Definition(target_definition_id, _) => {
 
                        let definition = &ctx.heap[target_definition_id];
 
                        match definition {
 
                            Definition::Struct(_) => {
 
                                // Struct literal
 
                                let mut last_token = iter.last_valid_pos();
 
                                let mut struct_fields = Vec::new();
 
                                consume_comma_separated(
 
                                    TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
                                    |source, iter, ctx| {
 
                                        let identifier = consume_ident_interned(source, iter, ctx)?;
 
                                        consume_token(source, iter, TokenKind::Colon)?;
 
                                        let value = self.consume_expression(module, iter, ctx)?;
 
                                        Ok(LiteralStructField{ identifier, value, field_idx: 0 })
 
                                    },
 
                                    &mut struct_fields, "a struct field", "a list of struct fields", Some(&mut last_token)
 
                                )?;
 

	
 
                                ctx.heap.alloc_literal_expression(|this| LiteralExpression{
 
                                    this,
 
                                    span: InputSpan::from_positions(ident_span.begin, last_token),
 
@@ -1550,55 +1554,56 @@ impl PassDefinitions {
 
                        unique_id_in_definition: -1,
 
                    }).upcast()
 
                } else if ident_text == KW_LET {
 
                    // Binding expression
 
                    let operator_span = iter.next_span();
 
                    iter.consume();
 

	
 
                    let bound_to = self.consume_prefix_expression(module, iter, ctx)?;
 
                    consume_token(&module.source, iter, TokenKind::Equal)?;
 
                    let bound_from = self.consume_prefix_expression(module, iter, ctx)?;
 

	
 
                    let full_span = InputSpan::from_positions(
 
                        operator_span.begin, ctx.heap[bound_from].full_span().end,
 
                    );
 

	
 
                    ctx.heap.alloc_binding_expression(|this| BindingExpression{
 
                        this, operator_span, full_span, bound_to, bound_from,
 
                        parent: ExpressionParent::None,
 
                        unique_id_in_definition: -1,
 
                    }).upcast()
 
                } else if ident_text == KW_CAST {
 
                    // Casting expression
 
                    iter.consume();
 
                    let to_type = if Some(TokenKind::OpenAngle) == iter.next() {
 
                        let angle_start_pos = iter.next_start_position();
 
                        iter.consume();
 
                        let definition_id = self.cur_definition;
 
                        let poly_vars = ctx.heap[definition_id].poly_vars();
 
                        consume_parser_type(
 
                            &module.source, iter, &ctx.symbols, &ctx.heap,
 
                            poly_vars, SymbolScope::Module(module.root_id), definition_id,
 
                            true, 1
 
                        self.type_parser.consume_parser_type(
 
                            iter, &ctx.heap, &module.source, &ctx.symbols,
 
                            poly_vars, definition_id, SymbolScope::Module(module.root_id),
 
                            true, Some(angle_start_pos)
 
                        )?
 
                    } else {
 
                        // Automatic casting with inferred target type
 
                        ParserType{
 
                            elements: vec![ParserTypeElement{
 
                                element_span: ident_span,
 
                                variant: ParserTypeVariant::Inferred,
 
                            }],
 
                            full_span: ident_span
 
                        }
 
                    };
 

	
 
                    consume_token(&module.source, iter, TokenKind::OpenParen)?;
 
                    let subject = self.consume_expression(module, iter, ctx)?;
 
                    let mut full_span = iter.next_span();
 
                    full_span.begin = to_type.full_span.begin;
 
                    consume_token(&module.source, iter, TokenKind::CloseParen)?;
 

	
 
                    ctx.heap.alloc_cast_expression(|this| CastExpression{
 
                        this,
 
                        cast_span: to_type.full_span,
 
                        full_span, to_type, subject,
 
                        parent: ExpressionParent::None,
 
                        unique_id_in_definition: -1,
 
@@ -1676,513 +1681,64 @@ impl PassDefinitions {
 
            result = ctx.heap.alloc_binary_expression(|this| BinaryExpression{
 
                this, operator_span, full_span, left, operation, right,
 
                parent: ExpressionParent::None,
 
                unique_id_in_definition: -1,
 
            }).upcast();
 
        }
 

	
 
        Ok(result)
 
    }
 

	
 
    #[inline]
 
    fn consume_expression_list(
 
        &mut self, module: &Module, iter: &mut TokenIter, ctx: &mut PassCtx, end_pos: Option<&mut InputPosition>
 
    ) -> Result<Vec<ExpressionId>, ParseError> {
 
        let mut section = self.expressions.start_section();
 
        consume_comma_separated(
 
            TokenKind::OpenParen, TokenKind::CloseParen, &module.source, iter, ctx,
 
            |_source, iter, ctx| self.consume_expression(module, iter, ctx),
 
            &mut section, "an expression", "a list of expressions", end_pos
 
        )?;
 
        Ok(section.into_vec())
 
    }
 
}
 

	
 
/// Consumes a type. A type always starts with an identifier which may indicate
 
/// a builtin type or a user-defined type. The fact that it may contain
 
/// polymorphic arguments makes it a tree-like structure. Because we cannot rely
 
/// on knowing the exact number of polymorphic arguments we do not check for
 
/// these.
 
///
 
/// Note that the first depth index is used as a hack.
 
// TODO: @Optimize, @Cleanup
 
fn consume_parser_type(
 
    source: &InputSource, iter: &mut TokenIter, symbols: &SymbolTable, heap: &Heap, poly_vars: &[Identifier],
 
    cur_scope: SymbolScope, wrapping_definition: DefinitionId, allow_inference: bool, first_angle_depth: i32,
 
) -> Result<ParserType, ParseError> {
 
    struct Entry{
 
        element: ParserTypeElement,
 
        depth: i32,
 
    }
 

	
 
    // After parsing the array modifier "[]", we need to insert an array type
 
    // before the most recently parsed type.
 
    fn insert_array_before(elements: &mut Vec<Entry>, depth: i32, span: InputSpan) {
 
        let index = elements.iter().rposition(|e| e.depth == depth).unwrap();
 
        let num_embedded = elements[index].element.variant.num_embedded();
 
        elements.insert(index, Entry{
 
            element: ParserTypeElement{ element_span: span, variant: ParserTypeVariant::Array },
 
            depth,
 
        });
 

	
 
        // Now the original element, and all of its children, should have their
 
        // depth incremented by 1
 
        elements[index + 1].depth += 1;
 
        if num_embedded != 0 {
 
            for idx in index + 2..elements.len() {
 
                let element = &mut elements[idx];
 
                if element.depth >= depth + 1 {
 
                    element.depth += 1;
 
                } else {
 
                    break;
 
                }
 
            }
 
        }
 
    }
 

	
 
    // Most common case we just have one type, perhaps with some array
 
    // annotations. This is both the hot-path, and simplifies the state machine
 
    // that follows and is responsible for parsing more complicated types.
 
    let element = consume_parser_type_ident(
 
        source, iter, symbols, heap, poly_vars, cur_scope,
 
        wrapping_definition, allow_inference
 
    )?;
 

	
 
    if iter.next() != Some(TokenKind::OpenAngle) {
 
        let num_embedded = element.variant.num_embedded();
 
        let first_pos = element.element_span.begin;
 
        let mut last_pos = element.element_span.end;
 
        let mut elements = Vec::with_capacity(num_embedded + 2); // type itself + embedded + 1 (maybe) array type
 

	
 
        // Consume any potential array elements
 
        while iter.next() == Some(TokenKind::OpenSquare) {
 
            let mut array_span = iter.next_span();
 
            iter.consume();
 

	
 
            let end_span = iter.next_span();
 
            array_span.end = end_span.end;
 
            consume_token(source, iter, TokenKind::CloseSquare)?;
 

	
 
            last_pos = end_span.end;
 
            elements.push(ParserTypeElement{ element_span: array_span, variant: ParserTypeVariant::Array });
 
        }
 

	
 
        // Push the element itself
 
        let element_span = element.element_span;
 
        elements.push(element);
 

	
 
        // Check if polymorphic arguments are expected
 
        if num_embedded != 0 {
 
            if !allow_inference {
 
                return Err(ParseError::new_error_str_at_span(source, element_span, "type inference is not allowed here"));
 
            }
 

	
 
            for _ in 0..num_embedded {
 
                elements.push(ParserTypeElement { element_span, variant: ParserTypeVariant::Inferred });
 
            }
 
        }
 

	
 
        // When we have applied the initial-open-angle hack (e.g. consuming an
 
        // explicit type on a channel), then we consume the closing angles as
 
        // well.
 
        for _ in 0..first_angle_depth {
 
            let (_, angle_end_pos) = iter.next_positions();
 
            last_pos = angle_end_pos;
 
            consume_token(source, iter, TokenKind::CloseAngle)?;
 
        }
 

	
 
        return Ok(ParserType{
 
            elements,
 
            full_span: InputSpan::from_positions(first_pos, last_pos)
 
        });
 
    };
 

	
 
    // We have a polymorphic specification. So we start by pushing the item onto
 
    // our stack, then start adding entries together with the angle-brace depth
 
    // at which they're found.
 
    let mut elements = Vec::new();
 
    let first_pos = element.element_span.begin;
 
    let mut last_pos = element.element_span.end;
 
    elements.push(Entry{ element, depth: 0 });
 

	
 
    // Start out with the first '<' consumed.
 
    iter.consume();
 
    enum State { Ident, Open, Close, Comma }
 
    let mut state = State::Open;
 
    let mut angle_depth = first_angle_depth + 1;
 

	
 
    loop {
 
        let next = iter.next();
 

	
 
        match state {
 
            State::Ident => {
 
                // Just parsed an identifier, may expect comma, angled braces,
 
                // or the tokens indicating an array
 
                if Some(TokenKind::OpenAngle) == next {
 
                    angle_depth += 1;
 
                    state = State::Open;
 
                } else if Some(TokenKind::CloseAngle) == next {
 
                    let (_, end_angle_pos) = iter.next_positions();
 
                    last_pos = end_angle_pos;
 
                    angle_depth -= 1;
 
                    state = State::Close;
 
                } else if Some(TokenKind::ShiftRight) == next {
 
                    let (_, end_angle_pos) = iter.next_positions();
 
                    last_pos = end_angle_pos;
 
                    angle_depth -= 2;
 
                    state = State::Close;
 
                } else if Some(TokenKind::Comma) == next {
 
                    state = State::Comma;
 
                } else if Some(TokenKind::OpenSquare) == next {
 
                    let (start_pos, _) = iter.next_positions();
 
                    iter.consume(); // consume opening square
 
                    if iter.next() != Some(TokenKind::CloseSquare) {
 
                        return Err(ParseError::new_error_str_at_pos(
 
                            source, iter.last_valid_pos(),
 
                            "unexpected token: expected ']'"
 
                        ));
 
                    }
 
                    let (_, end_pos) = iter.next_positions();
 
                    let array_span = InputSpan::from_positions(start_pos, end_pos);
 
                    insert_array_before(&mut elements, angle_depth, array_span);
 
                } else {
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        source, iter.last_valid_pos(),
 
                        "unexpected token: expected '<', '>', ',' or '['")
 
                    );
 
                }
 

	
 
                iter.consume();
 
            },
 
            State::Open => {
 
                // Just parsed an opening angle bracket, expecting an identifier
 
                let element = consume_parser_type_ident(source, iter, symbols, heap, poly_vars, cur_scope, wrapping_definition, allow_inference)?;
 
                elements.push(Entry{ element, depth: angle_depth });
 
                state = State::Ident;
 
            },
 
            State::Close => {
 
                // Just parsed 1 or 2 closing angle brackets, expecting comma,
 
                // more closing brackets or the tokens indicating an array
 
                if Some(TokenKind::Comma) == next {
 
                    state = State::Comma;
 
                } else if Some(TokenKind::CloseAngle) == next {
 
                    let (_, end_angle_pos) = iter.next_positions();
 
                    last_pos = end_angle_pos;
 
                    angle_depth -= 1;
 
                    state = State::Close;
 
                } else if Some(TokenKind::ShiftRight) == next {
 
                    let (_, end_angle_pos) = iter.next_positions();
 
                    last_pos = end_angle_pos;
 
                    angle_depth -= 2;
 
                    state = State::Close;
 
                } else if Some(TokenKind::OpenSquare) == next {
 
                    let (start_pos, _) = iter.next_positions();
 
                    iter.consume();
 
                    if iter.next() != Some(TokenKind::CloseSquare) {
 
                        return Err(ParseError::new_error_str_at_pos(
 
                            source, iter.last_valid_pos(),
 
                            "unexpected token: expected ']'"
 
                        ));
 
                    }
 
                    let (_, end_pos) = iter.next_positions();
 
                    let array_span = InputSpan::from_positions(start_pos, end_pos);
 
                    insert_array_before(&mut elements, angle_depth, array_span);
 
                } else {
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        source, iter.last_valid_pos(),
 
                        "unexpected token: expected ',', '>', or '['")
 
                    );
 
                }
 

	
 
                iter.consume();
 
            },
 
            State::Comma => {
 
                // Just parsed a comma, expecting an identifier or more closing
 
                // braces
 
                if Some(TokenKind::Ident) == next {
 
                    let element = consume_parser_type_ident(source, iter, symbols, heap, poly_vars, cur_scope, wrapping_definition, allow_inference)?;
 
                    elements.push(Entry{ element, depth: angle_depth });
 
                    state = State::Ident;
 
                } else if Some(TokenKind::CloseAngle) == next {
 
                    let (_, end_angle_pos) = iter.next_positions();
 
                    last_pos = end_angle_pos;
 
                    iter.consume();
 
                    angle_depth -= 1;
 
                    state = State::Close;
 
                } else if Some(TokenKind::ShiftRight) == next {
 
                    let (_, end_angle_pos) = iter.next_positions();
 
                    last_pos = end_angle_pos;
 
                    iter.consume();
 
                    angle_depth -= 2;
 
                    state = State::Close;
 
                } else {
 
                    return Err(ParseError::new_error_str_at_pos(
 
                        source, iter.last_valid_pos(),
 
                        "unexpected token: expected '>' or a type name"
 
                    ));
 
                }
 
            }
 
        }
 

	
 
        if angle_depth < 0 {
 
            return Err(ParseError::new_error_str_at_pos(source, iter.last_valid_pos(), "unmatched '>'"));
 
        } else if angle_depth == 0 {
 
            break;
 
        }
 
    }
 

	
 
    // If here then we have found the correct number of angle braces.
 

	
 
    // Check for trailing array identifiers
 
    while Some(TokenKind::OpenSquare) == iter.next() {
 
        let (array_start, _) = iter.next_positions();
 
        iter.consume();
 
        if Some(TokenKind::CloseSquare) != iter.next() {
 
            return Err(ParseError::new_error_str_at_pos(
 
                source, iter.last_valid_pos(),
 
                "unexpected token: expected ']'"
 
            ));
 
        }
 
        let (_, array_end) = iter.next_positions();
 
        iter.consume();
 
        insert_array_before(&mut elements, 0, InputSpan::from_positions(array_start, array_end))
 
    }
 

	
 
    // If here then we found the correct number of angle braces. But we still
 
    // need to make sure that each encountered type has the correct number of
 
    // embedded types.
 
    for idx in 0..elements.len() {
 
        let cur_element = &elements[idx];
 

	
 
        let expected_subtypes = cur_element.element.variant.num_embedded();
 
        let mut encountered_subtypes = 0;
 
        for peek_idx in idx + 1..elements.len() {
 
            let peek_element = &elements[peek_idx];
 
            if peek_element.depth == cur_element.depth + 1 {
 
                encountered_subtypes += 1;
 
            } else if peek_element.depth <= cur_element.depth {
 
                break;
 
            }
 
        }
 

	
 
        if expected_subtypes != encountered_subtypes {
 
            if encountered_subtypes == 0 {
 
                // Case where we have elided the embedded types, all of them
 
                // should be inferred.
 
                if !allow_inference {
 
                    return Err(ParseError::new_error_str_at_span(
 
                        source, cur_element.element.element_span,
 
                        "type inference is not allowed here"
 
                    ));
 
                }
 

	
 
                // Insert the missing types (in reverse order, but they're all
 
                // of the "inferred" type anyway).
 
                let inserted_span = cur_element.element.element_span;
 
                let inserted_depth = cur_element.depth + 1;
 
                elements.reserve(expected_subtypes);
 
                for _ in 0..expected_subtypes {
 
                    elements.insert(idx + 1, Entry{
 
                        element: ParserTypeElement{ element_span: inserted_span, variant: ParserTypeVariant::Inferred },
 
                        depth: inserted_depth,
 
                    });
 
                }
 
            } else {
 
                // Mismatch in number of embedded types, produce a neat error
 
                // message.
 
                let type_name = String::from_utf8_lossy(source.section_at_span(cur_element.element.element_span));
 
                fn polymorphic_name_text(num: usize) -> &'static str {
 
                    if num == 1 { "polymorphic argument" } else { "polymorphic arguments" }
 
                }
 
                fn were_or_was(num: usize) -> &'static str {
 
                    if num == 1 { "was" } else { "were" }
 
                }
 

	
 
                if expected_subtypes == 0 {
 
                    return Err(ParseError::new_error_at_span(
 
                        source, cur_element.element.element_span,
 
                        format!(
 
                            "the type '{}' is not polymorphic, yet {} {} {} provided",
 
                            type_name, encountered_subtypes, polymorphic_name_text(encountered_subtypes),
 
                            were_or_was(encountered_subtypes)
 
                        )
 
                    ));
 
                }
 

	
 
                let maybe_infer_text = if allow_inference {
 
                    " (or none, to perform implicit type inference)"
 
                } else {
 
                    ""
 
                };
 

	
 
                return Err(ParseError::new_error_at_span(
 
                    source, cur_element.element.element_span,
 
                    format!(
 
                        "expected {} {}{} for the type '{}', but {} {} provided",
 
                        expected_subtypes, polymorphic_name_text(expected_subtypes),
 
                        maybe_infer_text, type_name, encountered_subtypes,
 
                        were_or_was(encountered_subtypes)
 
                    )
 
                ));
 
            }
 
        }
 
    }
 

	
 
    let mut constructed_elements = Vec::with_capacity(elements.len());
 
    for element in elements.into_iter() {
 
        constructed_elements.push(element.element);
 
    }
 

	
 
    Ok(ParserType{
 
        elements: constructed_elements,
 
        full_span: InputSpan::from_positions(first_pos, last_pos)
 
    })
 
}
 

	
 
/// Consumes an identifier for which we assume that it resolves to some kind of
 
/// type. Once we actually arrive at a type we will stop parsing. Hence there
 
/// may be trailing '::' tokens in the iterator, or the subsequent specification
 
/// of polymorphic arguments.
 
fn consume_parser_type_ident(
 
    source: &InputSource, iter: &mut TokenIter, symbols: &SymbolTable, heap: &Heap, poly_vars: &[Identifier],
 
    mut scope: SymbolScope, wrapping_definition: DefinitionId, allow_inference: bool,
 
) -> Result<ParserTypeElement, ParseError> {
 
    use ParserTypeVariant as PTV;
 
    let (mut type_text, mut type_span) = consume_any_ident(source, iter)?;
 

	
 
    let variant = match type_text {
 
        KW_TYPE_MESSAGE => PTV::Message,
 
        KW_TYPE_BOOL => PTV::Bool,
 
        KW_TYPE_UINT8 => PTV::UInt8,
 
        KW_TYPE_UINT16 => PTV::UInt16,
 
        KW_TYPE_UINT32 => PTV::UInt32,
 
        KW_TYPE_UINT64 => PTV::UInt64,
 
        KW_TYPE_SINT8 => PTV::SInt8,
 
        KW_TYPE_SINT16 => PTV::SInt16,
 
        KW_TYPE_SINT32 => PTV::SInt32,
 
        KW_TYPE_SINT64 => PTV::SInt64,
 
        KW_TYPE_IN_PORT => PTV::Input,
 
        KW_TYPE_OUT_PORT => PTV::Output,
 
        KW_TYPE_CHAR => PTV::Character,
 
        KW_TYPE_STRING => PTV::String,
 
        KW_TYPE_INFERRED => {
 
            if !allow_inference {
 
                return Err(ParseError::new_error_str_at_span(source, type_span, "type inference is not allowed here"));
 
            }
 

	
 
            PTV::Inferred
 
        },
 
        _ => {
 
            // Must be some kind of symbolic type
 
            let mut type_kind = None;
 
            for (poly_idx, poly_var) in poly_vars.iter().enumerate() {
 
                if poly_var.value.as_bytes() == type_text {
 
                    type_kind = Some(PTV::PolymorphicArgument(wrapping_definition, poly_idx as u32));
 
                }
 
            }
 

	
 
            if type_kind.is_none() {
 
                // Check symbol table for definition. To be fair, the language
 
                // only allows a single namespace for now. That said:
 
                let last_symbol = symbols.get_symbol_by_name(scope, type_text);
 
                if last_symbol.is_none() {
 
                    return Err(ParseError::new_error_str_at_span(source, type_span, "unknown type"));
 
                }
 
                let mut last_symbol = last_symbol.unwrap();
 

	
 
                loop {
 
                    match &last_symbol.variant {
 
                        SymbolVariant::Module(symbol_module) => {
 
                            // Expecting more identifiers
 
                            if Some(TokenKind::ColonColon) != iter.next() {
 
                                return Err(ParseError::new_error_str_at_span(source, type_span, "expected a type but got a module"));
 
                            }
 

	
 
                            consume_token(source, iter, TokenKind::ColonColon)?;
 

	
 
                            // Consume next part of type and prepare for next
 
                            // lookup loop
 
                            let (next_text, next_span) = consume_any_ident(source, iter)?;
 
                            let old_text = type_text;
 
                            type_text = next_text;
 
                            type_span.end = next_span.end;
 
                            scope = SymbolScope::Module(symbol_module.root_id);
 

	
 
                            let new_symbol = symbols.get_symbol_by_name_defined_in_scope(scope, type_text);
 
                            if new_symbol.is_none() {
 
                                // If the type is imported in the module then notify the programmer
 
                                // that imports do not leak outside of a module
 
                                let type_name = String::from_utf8_lossy(type_text);
 
                                let module_name = String::from_utf8_lossy(old_text);
 
                                let suffix = if symbols.get_symbol_by_name(scope, type_text).is_some() {
 
                                    format!(
 
                                        ". The module '{}' does import '{}', but these imports are not visible to other modules",
 
                                        &module_name, &type_name
 
                                    )
 
                                } else {
 
                                    String::new()
 
                                };
 

	
 
                                return Err(ParseError::new_error_at_span(
 
                                    source, next_span,
 
                                    format!("unknown type '{}' in module '{}'{}", type_name, module_name, suffix)
 
                                ));
 
                            }
 

	
 
                            last_symbol = new_symbol.unwrap();
 
                        },
 
                        SymbolVariant::Definition(symbol_definition) => {
 
                            let num_poly_vars = heap[symbol_definition.definition_id].poly_vars().len();
 
                            type_kind = Some(PTV::Definition(symbol_definition.definition_id, num_poly_vars as u32));
 
                            break;
 
                        }
 
                    }
 
                }
 
            }
 

	
 
            debug_assert!(type_kind.is_some());
 
            type_kind.unwrap()
 
        },
 
    };
 

	
 
    Ok(ParserTypeElement{ element_span: type_span, variant })
 
}
 

	
 
/// Consumes polymorphic variables and throws them on the floor.
 
fn consume_polymorphic_vars_spilled(source: &InputSource, iter: &mut TokenIter, _ctx: &mut PassCtx) -> Result<(), ParseError> {
 
    maybe_consume_comma_separated_spilled(
 
        TokenKind::OpenAngle, TokenKind::CloseAngle, source, iter, _ctx,
 
        |source, iter, _ctx| {
 
            consume_ident(source, iter)?;
 
            Ok(())
 
        }, "a polymorphic variable"
 
    )?;
 
    Ok(())
 
}
 

	
 
/// Consumes the parameter list to functions/components
 
fn consume_parameter_list(
 
    source: &InputSource, iter: &mut TokenIter, ctx: &mut PassCtx,
 
    target: &mut ScopedSection<VariableId>, scope: SymbolScope, definition_id: DefinitionId
 
    parser: &mut ParserTypeParser, source: &InputSource, iter: &mut TokenIter,
 
    ctx: &mut PassCtx, target: &mut ScopedSection<VariableId>,
 
    scope: SymbolScope, definition_id: DefinitionId
 
) -> Result<(), ParseError> {
 
    consume_comma_separated(
 
        TokenKind::OpenParen, TokenKind::CloseParen, source, iter, ctx,
 
        |source, iter, ctx| {
 
            let poly_vars = ctx.heap[definition_id].poly_vars(); // Rust being rust, multiple lookups
 
            let parser_type = consume_parser_type(
 
                source, iter, &ctx.symbols, &ctx.heap, poly_vars, scope,
 
                definition_id, false, 0
 
            let parser_type = parser.consume_parser_type(
 
                iter, &ctx.heap, source, &ctx.symbols, poly_vars, definition_id,
 
                scope, false, None
 
            )?;
 
            let identifier = consume_ident_interned(source, iter, ctx)?;
 
            let parameter_id = ctx.heap.alloc_variable(|this| Variable{
 
                this,
 
                kind: VariableKind::Parameter,
 
                parser_type,
 
                identifier,
 
                relative_pos_in_block: 0,
 
                unique_id_in_scope: -1,
 
            });
 
            Ok(parameter_id)
 
        },
 
        target, "a parameter", "a parameter list", None
 
    )
 
}
 
\ No newline at end of file
src/protocol/parser/pass_definitions_types.rs
Show inline comments
 
use crate::protocol::parser::*;
 
use crate::protocol::parser::token_parsing::*;
 

	
 
#[derive(Debug)]
 
struct Entry {
 
    element: ParserTypeElement,
 
    depth: i32,
 
}
 

	
 
#[derive(Copy, Clone, PartialEq, Eq)]
 
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
 
enum DepthKind {
 
    Tuple, // because we had a `(` token
 
    PolyArgs, // because we had a `<` token
 
}
 

	
 
#[derive(Debug)]
 
struct DepthElement {
 
    kind: DepthKind,
 
    entry_index: u32, // in `entries` array of parser
 
    pos: InputPosition,
 
}
 

	
 
/// Current parsing state, for documentation's sake: types may be named, or may
 
/// be tuples. Named types may have polymorphic arguments (if the type
 
/// declaration allows) and a type may be turned into an array of that type by
 
/// postfixing a "[]".
 
#[derive(Debug)]
 
enum ParseState {
 
    TypeMaybePolyArgs,  // just parsed a type, might have poly arguments
 
    TypeNeverPolyArgs,  // just parsed a type that cannot have poly arguments
 
    PolyArgStart,       // just opened a polymorphic argument list
 
    TupleStart,         // just opened a tuple list
 
    ParsedComma,        // just had a comma
 
}
 

	
 
/// Parsers tokens into `ParserType` instances (yes, the name of the struct is
 
/// silly). Essentially a little state machine with its own temporary storage.
 
#[derive(Debug)]
 
pub(crate) struct ParserTypeParser {
 
    entries: Vec<Entry>,
 
    depths: Vec<DepthElement>,
 
    parse_state: ParseState,
 
    first_pos: InputPosition,
 
    last_pos: InputPosition,
 
}
 

	
 
impl ParserTypeParser {
 
    pub(crate) fn new() -> Self {
 
        return Self{
 
            entries: Vec::with_capacity(16),
 
            depths: Vec::with_capacity(16),
 
            parse_state: ParseState::TypeMaybePolyArgs,
 
            first_pos: InputPosition{ line: 0, offset: 0 },
 
            last_pos: InputPosition{ line: 0, offset: 0 }
 
        }
 
    }
 

	
 
    pub(crate) fn consume_parser_type(
 
        &mut self, iter: &mut TokenIter, heap: &Heap, source: &InputSource,
 
        symbols: &SymbolTable, poly_vars: &[Identifier],
 
        wrapping_definition: DefinitionId, cur_scope: SymbolScope,
 
        allow_inference: bool, inside_angular_bracket: Option<InputPosition>,
 
    ) -> Result<ParserType, ParseError> {
 
        // Prepare
 
        self.entries.clear();
 
        self.depths.clear();
 

	
 
        // Setup processing
 
        if let Some(bracket_pos) = inside_angular_bracket {
 
            self.push_depth(DepthKind::PolyArgs, u32::MAX, bracket_pos);
 
        }
 

	
 
        let initial_state = match iter.next() {
 
            Some(TokenKind::Ident) => {
 
                let element = Self::consume_parser_type_element(
 
                    iter, source, heap, symbols, wrapping_definition, poly_vars, cur_scope, allow_inference
 
                )?;
 
                self.first_pos = element.element_span.begin;
 
                self.last_pos = element.element_span.end;
 

	
 
                self.entries.push(Entry{
 
                    element,
 
                    depth: self.cur_depth(),
 
                });
 

	
 
                ParseState::TypeMaybePolyArgs
 
                // Due to the nature of the subsequent type parsing algorithm,
 
                // we check the opening polymorphic argument list paren here.
 
                if let Some(TokenKind::OpenAngle) = iter.next() {
 
                    self.consume_open_angle(iter);
 
                    ParseState::PolyArgStart
 
                } else {
 
                    ParseState::TypeMaybePolyArgs
 
                }
 
            },
 
            Some(TokenKind::OpenParen) => {
 
                let tuple_start_pos = iter.next_start_position();
 
                self.first_pos = tuple_start_pos; // last pos will be set later, this is a tuple
 

	
 
                let tuple_entry_index = self.entries.len() as u32;
 
                self.push_depth(DepthKind::Tuple, tuple_entry_index, tuple_start_pos);
 
                self.entries.push(Entry{
 
                    element: ParserTypeElement{
 
                        element_span: InputSpan::from_positions(tuple_start_pos, tuple_start_pos),
 
                        variant: ParserTypeVariant::Tuple(0),
 
                    },
 
                    depth: self.cur_depth(),
 
                });
 
                iter.consume();
 

	
 
                ParseState::PolyArgStart
 
            },
 
            _ => return Err(ParseError::new_error_str_at_pos(source, iter.last_valid_pos(), "expected a type")),
 
        };
 

	
 
        self.parse_state = initial_state;
 

	
 
        // Depth stack and entries are initialized, continue until depth stack
 
@@ -199,65 +210,68 @@ impl ParserTypeParser {
 
            let cur_element = &self.entries[el_index];
 

	
 
            // Peek ahead to see how many embedded types we have
 
            let mut encountered_embedded = 0;
 
            for peek_index in el_index + 1..num_entries {
 
                let peek_element = &self.entries[peek_index];
 
                if peek_element.depth == cur_element.depth + 1 {
 
                    encountered_embedded += 1;
 
                } else if peek_element.depth <= cur_element.depth {
 
                    break;
 
                }
 
            }
 

	
 
            // If we're dealing with a tuple then we don't need to determine if
 
            // the number of embedded types is correct, we simply need to set it
 
            // to whatever what was encountered.
 
            if let ParserTypeVariant::Tuple(_) = cur_element.element.variant {
 
                self.entries[el_index].element.variant = ParserTypeVariant::Tuple(encountered_embedded);
 
            } else {
 
                let expected_embedded = cur_element.element.variant.num_embedded() as u32;
 
                if expected_embedded != encountered_embedded {
 
                    if encountered_embedded == 0 {
 
                        // Every polymorphic argument should be inferred
 
                        if !allow_inference {
 
                            println!("DEBUG: Ended up with {:?}", self.entries);
 
                            return Err(ParseError::new_error_str_at_span(
 
                                source, cur_element.element.element_span,
 
                                "type inference is not allowed here"
 
                            ));
 
                        }
 

	
 
                        // Insert missing types
 
                        let inserted_span = cur_element.element.element_span;
 
                        let inserted_depth = cur_element.depth + 1;
 
                        self.entries.reserve(expected_embedded as usize);
 
                        self.entries.insert(el_index + 1, Entry{
 
                            element: ParserTypeElement{
 
                                element_span: inserted_span,
 
                                variant: ParserTypeVariant::Inferred,
 
                            },
 
                            depth: inserted_depth,
 
                        });
 
                        for _ in 0..expected_embedded {
 
                            self.entries.insert(el_index + 1, Entry {
 
                                element: ParserTypeElement {
 
                                    element_span: inserted_span,
 
                                    variant: ParserTypeVariant::Inferred,
 
                                },
 
                                depth: inserted_depth,
 
                            });
 
                        }
 
                    } else {
 
                        // Mismatch in number of embedded types
 
                        return Err(Self::construct_poly_arg_mismatch_error(
 
                            source, cur_element.element.element_span, allow_inference,
 
                            expected_embedded, encountered_embedded
 
                        ));
 
                    }
 
                }
 
            }
 
        }
 

	
 
        // Convert the results from parsing into the `ParserType`
 
        let mut elements = Vec::with_capacity(self.entries.len());
 
        debug_assert!(!self.entries.is_empty());
 

	
 
        for entry in self.entries.drain(..) {
 
            elements.push(entry.element)
 
        }
 

	
 
        return Ok(ParserType{
 
            elements,
 
            full_span: InputSpan::from_positions(self.first_pos, self.last_pos),
 
        });
 
    }
 
@@ -349,48 +363,49 @@ impl ParserTypeParser {
 
        return Ok(())
 
    }
 

	
 
    #[inline]
 
    fn consume_comma(&mut self, iter: &mut TokenIter) {
 
        iter.consume();
 
        self.parse_state = ParseState::ParsedComma;
 
    }
 

	
 
    #[inline]
 
    fn consume_square_parens(&mut self, source: &InputSource, iter: &mut TokenIter) -> Result<(), ParseError> {
 
        // Consume the opening square paren that forms the postfixed array type
 
        let array_start_pos = iter.next_start_position();
 
        iter.consume();
 
        if iter.next() != Some(TokenKind::CloseSquare) {
 
            return Err(ParseError::new_error_str_at_pos(
 
                source, iter.last_valid_pos(),
 
                "unexpected token: expected ']'"
 
            ));
 
        }
 

	
 
        let (_, array_end_pos) = iter.next_positions();
 
        let array_span = InputSpan::from_positions(array_start_pos, array_end_pos);
 
        self.last_pos = array_end_pos;
 
        iter.consume();
 

	
 
        // In the language we put the array specification after a type, in the
 
        // type tree we need to make the array type the parent, so:
 
        let insert_depth = self.cur_depth();
 
        let insert_at = self.entries.iter().rposition(|e| e.depth == insert_depth).unwrap();
 
        let num_embedded = self.entries[insert_at].element.variant.num_embedded();
 

	
 
        self.entries.insert(insert_at, Entry{
 
            element: ParserTypeElement{
 
                element_span: array_span,
 
                variant: ParserTypeVariant::Array,
 
            },
 
            depth: insert_depth
 
        });
 

	
 
        // Need to increment the depth of the child types
 
        self.entries[insert_at + 1].depth += 1; // element we applied the array type to
 
        if num_embedded != 0 {
 
            for index in insert_at + 2..self.entries.len() {
 
                let element = &mut self.entries[index];
 
                if element.depth >= insert_depth + 1 {
 
                    element.depth += 1;
 
                } else {
 
                    break;
0 comments (0 inline, 0 general)