Changeset - c87205ed6292
[Not reviewed]
0 10 0
MH - 4 years ago 2021-04-02 14:59:36
contact@maxhenger.nl
cleanup consume_type function name and rename ParseError
10 files changed with 224 insertions and 230 deletions:
0 comments (0 inline, 0 general)
src/protocol/inputsource.rs
Show inline comments
 
@@ -84,15 +84,12 @@ impl InputSource {
 
    pub fn seek(&mut self, pos: InputPosition) {
 
        debug_assert!(pos.offset < self.input.len());
 
        self.line = pos.line;
 
        self.column = pos.column;
 
        self.offset = pos.offset;
 
    }
 
    // pub fn error<S: ToString>(&self, message: S) -> ParseError {
 
    //     self.pos().parse_error(message)
 
    // }
 
    pub fn is_eof(&self) -> bool {
 
        self.next() == None
 
    }
 

	
 
    pub fn next(&self) -> Option<u8> {
 
        if self.offset < self.input.len() {
 
@@ -164,15 +161,12 @@ impl InputPosition {
 
                break;
 
            }
 
            end += 1;
 
        }
 
        &source.input[start..end]
 
    }
 
    // fn parse_error<S: ToString>(&self, message: S) -> ParseError {
 
    //     ParseError { position: *self, message: message.to_string(), backtrace: Backtrace::new() }
 
    // }
 
    fn eval_error<S: ToString>(&self, message: S) -> EvalError {
 
        EvalError { position: *self, message: message.to_string(), backtrace: Backtrace::new() }
 
    }
 

	
 
    pub(crate) fn col(&self) -> usize { self.column }
 
}
 
@@ -275,17 +269,17 @@ impl fmt::Display for ParseErrorStatement {
 

	
 
        Ok(())
 
    }
 
}
 

	
 
#[derive(Debug)]
 
pub struct ParseError2 {
 
pub struct ParseError {
 
    pub(crate) statements: Vec<ParseErrorStatement>
 
}
 

	
 
impl fmt::Display for ParseError2 {
 
impl fmt::Display for ParseError {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        if self.statements.is_empty() {
 
            return Ok(())
 
        }
 

	
 
        self.statements[0].fmt(f)?;
 
@@ -295,13 +289,13 @@ impl fmt::Display for ParseError2 {
 
        }
 

	
 
        Ok(())
 
    }
 
}
 

	
 
impl ParseError2 {
 
impl ParseError {
 
    pub fn empty() -> Self {
 
        Self{ statements: Vec::new() }
 
    }
 

	
 
    pub fn new_error(source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        Self{ statements: vec!(ParseErrorStatement::from_source(ParseErrorType::Error, source, position, msg))}
src/protocol/lexer.rs
Show inline comments
 
@@ -113,16 +113,16 @@ pub struct Lexer<'a> {
 
}
 

	
 
impl Lexer<'_> {
 
    pub fn new(source: &mut InputSource) -> Lexer {
 
        Lexer { source, level: 0 }
 
    }
 
    fn error_at_pos(&self, msg: &str) -> ParseError2 {
 
        ParseError2::new_error(self.source, self.source.pos(), msg)
 
    fn error_at_pos(&self, msg: &str) -> ParseError {
 
        ParseError::new_error(self.source, self.source.pos(), msg)
 
    }
 
    fn consume_line(&mut self) -> Result<Vec<u8>, ParseError2> {
 
    fn consume_line(&mut self) -> Result<Vec<u8>, ParseError> {
 
        let mut result: Vec<u8> = Vec::new();
 
        let mut next = self.source.next();
 
        while next.is_some() && next != Some(b'\n') && next != Some(b'\r') {
 
            if !(is_vchar(next) || is_wsp(next)) {
 
                return Err(self.error_at_pos("Expected visible character or whitespace"));
 
            }
 
@@ -135,13 +135,13 @@ impl Lexer<'_> {
 
        }
 
        if next == Some(b'\r') && self.source.next() == Some(b'\n') {
 
            self.source.consume();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_whitespace(&mut self, expected: bool) -> Result<(), ParseError2> {
 
    fn consume_whitespace(&mut self, expected: bool) -> Result<(), ParseError> {
 
        let mut found = false;
 
        let mut next = self.source.next();
 
        while next.is_some() {
 
            if next == Some(b' ')
 
                || next == Some(b'\t')
 
                || next == Some(b'\r')
 
@@ -205,13 +205,13 @@ impl Lexer<'_> {
 

	
 
        // Word boundary
 
        let next = self.source.lookahead(keyword.len());
 
        if next.is_none() { return true; }
 
        return !is_ident_rest(next);
 
    }
 
    fn consume_keyword(&mut self, keyword: &[u8]) -> Result<(), ParseError2> {
 
    fn consume_keyword(&mut self, keyword: &[u8]) -> Result<(), ParseError> {
 
        let len = keyword.len();
 
        for i in 0..len {
 
            let expected = Some(lowercase(keyword[i]));
 
            let next = self.source.next();
 
            if next != expected {
 
                return Err(self.error_at_pos(&format!("Expected keyword '{}'", String::from_utf8_lossy(keyword))));
 
@@ -225,13 +225,13 @@ impl Lexer<'_> {
 
        }
 
        Ok(())
 
    }
 
    fn has_string(&self, string: &[u8]) -> bool {
 
        self.source.has(string)
 
    }
 
    fn consume_string(&mut self, string: &[u8]) -> Result<(), ParseError2> {
 
    fn consume_string(&mut self, string: &[u8]) -> Result<(), ParseError> {
 
        let len = string.len();
 
        for i in 0..len {
 
            let expected = Some(string[i]);
 
            let next = self.source.next();
 
            if next != expected {
 
                return Err(self.error_at_pos(&format!("Expected {}", String::from_utf8_lossy(string))));
 
@@ -244,14 +244,14 @@ impl Lexer<'_> {
 
    /// `Ok(None)` will be returned. Otherwise will consume the comma separated
 
    /// values, allowing a trailing comma. If no comma is found and the closing
 
    /// delimiter is not found, then a parse error with `expected_end_msg` is
 
    /// returned.
 
    fn consume_comma_separated<T, F>(
 
        &mut self, h: &mut Heap, open: u8, close: u8, expected_end_msg: &str, func: F
 
    ) -> Result<Option<Vec<T>>, ParseError2>
 
        where F: Fn(&mut Lexer, &mut Heap) -> Result<T, ParseError2>
 
    ) -> Result<Option<Vec<T>>, ParseError>
 
        where F: Fn(&mut Lexer, &mut Heap) -> Result<T, ParseError>
 
    {
 
        if Some(open) != self.source.next() {
 
            return Ok(None)
 
        }
 

	
 
        self.source.consume();
 
@@ -261,13 +261,13 @@ impl Lexer<'_> {
 

	
 
        loop {
 
            if Some(close) == self.source.next() {
 
                self.source.consume();
 
                break;
 
            } else if !had_comma {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    &self.source, self.source.pos(), expected_end_msg
 
                ));
 
            }
 

	
 
            elements.push(func(self, h)?);
 
            self.consume_whitespace(false)?;
 
@@ -311,13 +311,13 @@ impl Lexer<'_> {
 
            if had_comma {
 
                self.source.consume();
 
                if self.consume_whitespace(false).is_err() { return Err(()); }
 
            }
 
        }
 
    }
 
    fn consume_ident(&mut self) -> Result<Vec<u8>, ParseError2> {
 
    fn consume_ident(&mut self) -> Result<Vec<u8>, ParseError> {
 
        if !self.has_identifier() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let mut result = Vec::new();
 
        let mut next = self.source.next();
 
        result.push(next.unwrap());
 
@@ -330,13 +330,13 @@ impl Lexer<'_> {
 
        }
 
        Ok(result)
 
    }
 
    fn has_integer(&mut self) -> bool {
 
        is_integer_start(self.source.next())
 
    }
 
    fn consume_integer(&mut self) -> Result<i64, ParseError2> {
 
    fn consume_integer(&mut self) -> Result<i64, ParseError> {
 
        let position = self.source.pos();
 
        let mut data = Vec::new();
 
        let mut next = self.source.next();
 
        while is_integer_rest(next) {
 
            data.push(next.unwrap());
 
            self.source.consume();
 
@@ -363,13 +363,13 @@ impl Lexer<'_> {
 
                // Assume decimal
 
                let data = String::from_utf8_lossy(&data);
 
                i64::from_str_radix(&data, 10)
 
            };
 

	
 
            if let Err(_err) = parsed {
 
                return Err(ParseError2::new_error(&self.source, position, "Invalid integer constant"));
 
                return Err(ParseError::new_error(&self.source, position, "Invalid integer constant"));
 
            }
 

	
 
            Ok(parsed.unwrap())
 
        }
 
    }
 

	
 
@@ -423,29 +423,29 @@ impl Lexer<'_> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return false;
 
        }
 
        let next = self.source.next();
 
        is_ident_start(next)
 
    }
 
    fn consume_identifier(&mut self) -> Result<Identifier, ParseError2> {
 
    fn consume_identifier(&mut self) -> Result<Identifier, ParseError> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let position = self.source.pos();
 
        let value = self.consume_ident()?;
 
        Ok(Identifier{ position, value })
 
    }
 
    fn consume_identifier_spilled(&mut self) -> Result<(), ParseError2> {
 
    fn consume_identifier_spilled(&mut self) -> Result<(), ParseError> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        self.consume_ident()?;
 
        Ok(())
 
    }
 

	
 
    fn consume_namespaced_identifier(&mut self, h: &mut Heap) -> Result<NamespacedIdentifier, ParseError2> {
 
    fn consume_namespaced_identifier(&mut self, h: &mut Heap) -> Result<NamespacedIdentifier, ParseError> {
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        // Consumes a part of the namespaced identifier, returns a boolean
 
        // indicating whether polymorphic arguments were specified.
 
@@ -453,13 +453,13 @@ impl Lexer<'_> {
 
        //  arguments, assume we have reached the end of the namespaced 
 
        //  identifier and are instead dealing with a less-than operator. Ugly?
 
        //  Yes. Needs tokenizer? Yes. 
 
        fn consume_part(
 
            l: &mut Lexer, h: &mut Heap, ident: &mut NamespacedIdentifier,
 
            backup_pos: &mut InputPosition
 
        ) -> Result<(), ParseError2> {
 
        ) -> Result<(), ParseError> {
 
            // Consume identifier
 
            if !ident.value.is_empty() {
 
                ident.value.extend(b"::");
 
            }
 
            let ident_start = ident.value.len();
 
            ident.value.extend(l.consume_ident()?);
 
@@ -511,20 +511,20 @@ impl Lexer<'_> {
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(ident)
 
    }
 

	
 
    fn consume_namespaced_identifier_spilled(&mut self) -> Result<(), ParseError2> {
 
    fn consume_namespaced_identifier_spilled(&mut self) -> Result<(), ParseError> {
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        debug_log!("consume_nsident2_spilled: {}", debug_line!(self.source));
 

	
 
        fn consume_part_spilled(l: &mut Lexer, backup_pos: &mut InputPosition) -> Result<(), ParseError2> {
 
        fn consume_part_spilled(l: &mut Lexer, backup_pos: &mut InputPosition) -> Result<(), ParseError> {
 
            l.consume_ident()?;
 
            *backup_pos = l.source.pos();
 
            l.consume_whitespace(false)?;
 
            match l.maybe_consume_poly_args_spilled_without_pos_recovery() {
 
                Ok(true) => { *backup_pos = l.source.pos(); },
 
                Ok(false) => {},
 
@@ -549,13 +549,13 @@ impl Lexer<'_> {
 

	
 
    // Types and type annotations
 

	
 
    /// Consumes a type definition. When called the input position should be at
 
    /// the type specification. When done the input position will be at the end
 
    /// of the type specifications (hence may be at whitespace).
 
    fn consume_type2(&mut self, h: &mut Heap, allow_inference: bool) -> Result<ParserTypeId, ParseError2> {
 
    fn consume_type(&mut self, h: &mut Heap, allow_inference: bool) -> Result<ParserTypeId, ParseError> {
 
        // Small helper function to convert in/out polymorphic arguments. Not
 
        // pretty, but return boolean is true if the error is due to inference
 
        // not being allowed
 
        let reduce_port_poly_args = |
 
            heap: &mut Heap,
 
            port_pos: &InputPosition,
 
@@ -574,13 +574,13 @@ impl Lexer<'_> {
 
                1 => Ok(args[0]),
 
                _ => Err(false)
 
            }
 
        };
 

	
 
        // Consume the type
 
        debug_log!("consume_type2: {}", debug_line!(self.source));
 
        debug_log!("consume_type: {}", debug_line!(self.source));
 
        let pos = self.source.pos();
 
        let parser_type_variant = if self.has_keyword(b"msg") {
 
            self.consume_keyword(b"msg")?;
 
            ParserTypeVariant::Message
 
        } else if self.has_keyword(b"boolean") {
 
            self.consume_keyword(b"boolean")?;
 
@@ -599,13 +599,13 @@ impl Lexer<'_> {
 
            ParserTypeVariant::Long
 
        } else if self.has_keyword(b"str") {
 
            self.consume_keyword(b"str")?;
 
            ParserTypeVariant::String
 
        } else if self.has_keyword(b"auto") {
 
            if !allow_inference {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                        &self.source, pos,
 
                        "Type inference is not allowed here"
 
                ));
 
            }
 

	
 
            self.consume_keyword(b"auto")?;
 
@@ -619,26 +619,26 @@ impl Lexer<'_> {
 
                .map_err(|infer_error|  {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'in' only allows for 1 polymorphic argument"
 
                    };
 
                    ParseError2::new_error(&self.source, pos, msg)
 
                    ParseError::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Input(poly_arg)
 
        } else if self.has_keyword(b"out") {
 
            self.consume_keyword(b"out")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?.unwrap_or_default();
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error| {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'out' only allows for 1 polymorphic argument, but {} were specified"
 
                    };
 
                    ParseError2::new_error(&self.source, pos, msg)
 
                    ParseError::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Output(poly_arg)
 
        } else {
 
            // Must be a symbolic type
 
            let identifier = self.consume_namespaced_identifier(h)?;
 
            ParserTypeVariant::Symbolic(SymbolicParserType{identifier, variant: None, poly_args2: Vec::new()})
 
@@ -647,13 +647,13 @@ impl Lexer<'_> {
 
        // If the type was a basic type (not supporting polymorphic type
 
        // arguments), then we make sure the user did not specify any of them.
 
        let mut backup_pos = self.source.pos();
 
        if !parser_type_variant.supports_polymorphic_args() {
 
            self.consume_whitespace(false)?;
 
            if let Some(b'<') = self.source.next() {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    &self.source, self.source.pos(),
 
                    "This type does not allow polymorphic arguments"
 
                ));
 
            }
 

	
 
            self.source.seek(backup_pos);
 
@@ -678,13 +678,13 @@ impl Lexer<'_> {
 
                });
 
                backup_pos = self.source.pos();
 

	
 
                // In case we're dealing with another array
 
                self.consume_whitespace(false)?;
 
            } else {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    &self.source, pos,
 
                    "Expected a closing ']'"
 
                ));
 
            }
 
        }
 

	
 
@@ -754,26 +754,26 @@ impl Lexer<'_> {
 
    /// the input position will remain unmodified and an empty vector will be
 
    /// returned.
 
    ///
 
    /// Polymorphic arguments represent the specification of the parametric
 
    /// types of a polymorphic type: they specify the value of the polymorphic
 
    /// type's polymorphic variables.
 
    fn consume_polymorphic_args(&mut self, h: &mut Heap, allow_inference: bool) -> Result<Option<Vec<ParserTypeId>>, ParseError2> {
 
    fn consume_polymorphic_args(&mut self, h: &mut Heap, allow_inference: bool) -> Result<Option<Vec<ParserTypeId>>, ParseError> {
 
        self.consume_comma_separated(
 
            h, b'<', b'>', "Expected the end of the polymorphic argument list",
 
            |lexer, heap| lexer.consume_type2(heap, allow_inference)
 
            |lexer, heap| lexer.consume_type(heap, allow_inference)
 
        )
 
    }
 

	
 
    /// Consumes polymorphic variables. These are identifiers that are used
 
    /// within polymorphic types. The input position may be at whitespace. If
 
    /// polymorphic variables are present then the whitespace, wrapping
 
    /// delimiters and the polymorphic variables are consumed. Otherwise the
 
    /// input position will stay where it is. If no polymorphic variables are
 
    /// present then an empty vector will be returned.
 
    fn consume_polymorphic_vars(&mut self, h: &mut Heap) -> Result<Vec<Identifier>, ParseError2> {
 
    fn consume_polymorphic_vars(&mut self, h: &mut Heap) -> Result<Vec<Identifier>, ParseError> {
 
        let backup_pos = self.source.pos();
 
        match self.consume_comma_separated(
 
            h, b'<', b'>', "Expected the end of the polymorphic variable list",
 
            |lexer, _heap| lexer.consume_identifier()
 
        )? {
 
            Some(poly_vars) => Ok(poly_vars),
 
@@ -783,58 +783,58 @@ impl Lexer<'_> {
 
            }
 
        }
 
    }
 

	
 
    // Parameters
 

	
 
    fn consume_parameter(&mut self, h: &mut Heap) -> Result<ParameterId, ParseError2> {
 
        let parser_type = self.consume_type2(h, false)?;
 
    fn consume_parameter(&mut self, h: &mut Heap) -> Result<ParameterId, ParseError> {
 
        let parser_type = self.consume_type(h, false)?;
 
        self.consume_whitespace(true)?;
 
        let position = self.source.pos();
 
        let identifier = self.consume_identifier()?;
 
        let id =
 
            h.alloc_parameter(|this| Parameter { this, position, parser_type, identifier });
 
        Ok(id)
 
    }
 
    fn consume_parameters(&mut self, h: &mut Heap) -> Result<Vec<ParameterId>, ParseError2> {
 
    fn consume_parameters(&mut self, h: &mut Heap) -> Result<Vec<ParameterId>, ParseError> {
 
        match self.consume_comma_separated(
 
            h, b'(', b')', "Expected the end of the parameter list",
 
            |lexer, heap| lexer.consume_parameter(heap)
 
        )? {
 
            Some(params) => Ok(params),
 
            None => {
 
                Err(ParseError2::new_error(
 
                Err(ParseError::new_error(
 
                    &self.source, self.source.pos(),
 
                    "Expected a parameter list"
 
                ))
 
            }
 
        }
 
    }
 

	
 
    // ====================
 
    // Expressions
 
    // ====================
 

	
 
    fn consume_paren_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_paren_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        self.consume_string(b"(")?;
 
        self.consume_whitespace(false)?;
 
        let result = self.consume_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b")")?;
 
        Ok(result)
 
    }
 
    fn consume_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.level >= MAX_LEVEL {
 
            return Err(self.error_at_pos("Too deeply nested expression"));
 
        }
 
        self.level += 1;
 
        let result = self.consume_assignment_expression(h);
 
        self.level -= 1;
 
        result
 
    }
 
    fn consume_assignment_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_assignment_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let result = self.consume_conditional_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        if self.has_assignment_operator() {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation = self.consume_assignment_operator()?;
 
@@ -864,13 +864,13 @@ impl Lexer<'_> {
 
            || self.has_string(b"<<=")
 
            || self.has_string(b">>=")
 
            || self.has_string(b"&=")
 
            || self.has_string(b"^=")
 
            || self.has_string(b"|=")
 
    }
 
    fn consume_assignment_operator(&mut self) -> Result<AssignmentOperator, ParseError2> {
 
    fn consume_assignment_operator(&mut self) -> Result<AssignmentOperator, ParseError> {
 
        if self.has_string(b"=") {
 
            self.consume_string(b"=")?;
 
            Ok(AssignmentOperator::Set)
 
        } else if self.has_string(b"*=") {
 
            self.consume_string(b"*=")?;
 
            Ok(AssignmentOperator::Multiplied)
 
@@ -902,13 +902,13 @@ impl Lexer<'_> {
 
            self.consume_string(b"|=")?;
 
            Ok(AssignmentOperator::BitwiseOred)
 
        } else {
 
            Err(self.error_at_pos("Expected assignment operator"))
 
        }
 
    }
 
    fn consume_conditional_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_conditional_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let result = self.consume_concat_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        if self.has_string(b"?") {
 
            let position = self.source.pos();
 
            let test = result;
 
            self.consume_string(b"?")?;
 
@@ -929,13 +929,13 @@ impl Lexer<'_> {
 
            })
 
            .upcast())
 
        } else {
 
            Ok(result)
 
        }
 
    }
 
    fn consume_concat_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_concat_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_lor_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"@") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"@")?;
 
@@ -954,13 +954,13 @@ impl Lexer<'_> {
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_lor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_lor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_land_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"||") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"||")?;
 
@@ -979,13 +979,13 @@ impl Lexer<'_> {
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_land_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_land_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_bor_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"&&") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"&&")?;
 
@@ -1004,13 +1004,13 @@ impl Lexer<'_> {
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_bor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_bor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_xor_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"|") && !self.has_string(b"||") && !self.has_string(b"|=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"|")?;
 
@@ -1029,13 +1029,13 @@ impl Lexer<'_> {
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_xor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_xor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_band_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"^") && !self.has_string(b"^=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"^")?;
 
@@ -1054,13 +1054,13 @@ impl Lexer<'_> {
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_band_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_band_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_eq_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"&") && !self.has_string(b"&&") && !self.has_string(b"&=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"&")?;
 
@@ -1079,13 +1079,13 @@ impl Lexer<'_> {
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_eq_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_eq_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_rel_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"==") || self.has_string(b"!=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
@@ -1110,13 +1110,13 @@ impl Lexer<'_> {
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_rel_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_rel_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_shift_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"<=")
 
            || self.has_string(b">=")
 
            || self.has_string(b"<") && !self.has_string(b"<<=")
 
            || self.has_string(b">") && !self.has_string(b">>=")
 
@@ -1151,13 +1151,13 @@ impl Lexer<'_> {
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_shift_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_shift_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_add_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"<<") && !self.has_string(b"<<=")
 
            || self.has_string(b">>") && !self.has_string(b">>=")
 
        {
 
            let position = self.source.pos();
 
@@ -1184,13 +1184,13 @@ impl Lexer<'_> {
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_add_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_add_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_mul_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"+") && !self.has_string(b"+=")
 
            || self.has_string(b"-") && !self.has_string(b"-=")
 
        {
 
            let position = self.source.pos();
 
@@ -1217,13 +1217,13 @@ impl Lexer<'_> {
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_mul_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_mul_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_prefix_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"*") && !self.has_string(b"*=")
 
            || self.has_string(b"/") && !self.has_string(b"/=")
 
            || self.has_string(b"%") && !self.has_string(b"%=")
 
        {
 
@@ -1254,13 +1254,13 @@ impl Lexer<'_> {
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_prefix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_prefix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.has_string(b"+")
 
            || self.has_string(b"-")
 
            || self.has_string(b"~")
 
            || self.has_string(b"!")
 
        {
 
            let position = self.source.pos();
 
@@ -1306,13 +1306,13 @@ impl Lexer<'_> {
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast());
 
        }
 
        self.consume_postfix_expression(h)
 
    }
 
    fn consume_postfix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_postfix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_primary_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"++")
 
            || self.has_string(b"--")
 
            || self.has_string(b"[")
 
            || (self.has_string(b".") && !self.has_string(b".."))
 
@@ -1417,13 +1417,13 @@ impl Lexer<'_> {
 
                    })
 
                    .upcast();
 
            }
 
        }
 
        Ok(result)
 
    }
 
    fn consume_primary_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_primary_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.has_string(b"(") {
 
            return self.consume_paren_expression(h);
 
        }
 
        if self.has_string(b"{") {
 
            return Ok(self.consume_array_expression(h)?.upcast());
 
        }
 
@@ -1435,13 +1435,13 @@ impl Lexer<'_> {
 
        }
 
        if self.has_call_expression() {
 
            return Ok(self.consume_call_expression(h)?.upcast());
 
        }
 
        Ok(self.consume_variable_expression(h)?.upcast())
 
    }
 
    fn consume_array_expression(&mut self, h: &mut Heap) -> Result<ArrayExpressionId, ParseError2> {
 
    fn consume_array_expression(&mut self, h: &mut Heap) -> Result<ArrayExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        let mut elements = Vec::new();
 
        self.consume_string(b"{")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b"}") {
 
            while self.source.next().is_some() {
 
@@ -1469,13 +1469,13 @@ impl Lexer<'_> {
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
    }
 
    fn consume_builtin_literal_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<LiteralExpressionId, ParseError2> {
 
    ) -> Result<LiteralExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        let value;
 
        if self.has_keyword(b"null") {
 
            self.consume_keyword(b"null")?;
 
            value = Literal::Null;
 
        } else if self.has_keyword(b"true") {
 
@@ -1525,13 +1525,13 @@ impl Lexer<'_> {
 
            self.source.next() == Some(b'{');
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 

	
 
    fn consume_struct_literal_expression(&mut self, h: &mut Heap) -> Result<LiteralExpressionId, ParseError2> {
 
    fn consume_struct_literal_expression(&mut self, h: &mut Heap) -> Result<LiteralExpressionId, ParseError> {
 
        // Consume identifier and polymorphic arguments
 
        debug_log!("consume_struct_literal_expression: {}", debug_line!(self.source));
 
        let position = self.source.pos();
 
        let identifier = self.consume_namespaced_identifier(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
@@ -1546,13 +1546,13 @@ impl Lexer<'_> {
 
                let value = lexer.consume_expression(heap)?;
 

	
 
                Ok(LiteralStructField{ identifier, value, field_idx: 0 })
 
            }
 
        )? {
 
            Some(fields) => fields,
 
            None => return Err(ParseError2::new_error(
 
            None => return Err(ParseError::new_error(
 
                self.source, self.source.pos(),
 
                "A struct literal must be followed by its field values"
 
            ))
 
        };
 

	
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression{
 
@@ -1586,13 +1586,13 @@ impl Lexer<'_> {
 
            result = true;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_call_expression(&mut self, h: &mut Heap) -> Result<CallExpressionId, ParseError2> {
 
    fn consume_call_expression(&mut self, h: &mut Heap) -> Result<CallExpressionId, ParseError> {
 
        let position = self.source.pos();
 

	
 
        // Consume method identifier
 
        // TODO: @token Replace this conditional polymorphic arg parsing once we have a tokenizer.
 
        debug_log!("consume_call_expression: {}", debug_line!(self.source));
 
        let method;
 
@@ -1654,13 +1654,13 @@ impl Lexer<'_> {
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn consume_variable_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<VariableExpressionId, ParseError2> {
 
    ) -> Result<VariableExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        debug_log!("consume_variable_expression: {}", debug_line!(self.source));
 

	
 
        // TODO: @token Reimplement when tokenizer is implemented, prevent ambiguities
 
        let identifier = identifier_as_namespaced(self.consume_identifier()?);
 

	
 
@@ -1683,13 +1683,13 @@ impl Lexer<'_> {
 
    /// statement is nested too deeply.
 
    ///
 
    /// `wrap_in_block` may be set to true to ensure that the parsed statement
 
    /// will be wrapped in a block statement if it is not already a block
 
    /// statement. This is used to ensure that all `if`, `while` and `sync`
 
    /// statements have a block statement as body.
 
    fn consume_statement(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError2> {
 
    fn consume_statement(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError> {
 
        if self.level >= MAX_LEVEL {
 
            return Err(self.error_at_pos("Too deeply nested statement"));
 
        }
 
        self.level += 1;
 
        let result = self.consume_statement_impl(h, wrap_in_block);
 
        self.level -= 1;
 
@@ -1705,13 +1705,13 @@ impl Lexer<'_> {
 
            // next character is ':', second character is NOT ':'
 
            result = Some(b':') == self.source.next() && Some(b':') != self.source.lookahead(1)
 
        }
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_statement_impl(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError2> {
 
    fn consume_statement_impl(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError> {
 
        // Parse and allocate statement
 
        let mut must_wrap = true;
 
        let mut stmt_id = if self.has_string(b"{") {
 
            must_wrap = false;
 
            self.consume_block_statement(h)?
 
        } else if self.has_keyword(b"skip") {
 
@@ -1783,13 +1783,13 @@ impl Lexer<'_> {
 
            }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_block_statement(&mut self, h: &mut Heap) -> Result<StatementId, ParseError2> {
 
    fn consume_block_statement(&mut self, h: &mut Heap) -> Result<StatementId, ParseError> {
 
        let position = self.source.pos();
 
        let mut statements = Vec::new();
 
        self.consume_string(b"{")?;
 
        self.consume_whitespace(false)?;
 
        while self.has_local_statement() {
 
            let (local_id, stmt_id) = self.consume_local_statement(h)?;
 
@@ -1816,25 +1816,25 @@ impl Lexer<'_> {
 
                locals: Vec::new(),
 
                labels: Vec::new(),
 
            })
 
            .upcast())
 
        }
 
    }
 
    fn consume_local_statement(&mut self, h: &mut Heap) -> Result<(LocalStatementId, Option<ExpressionStatementId>), ParseError2> {
 
    fn consume_local_statement(&mut self, h: &mut Heap) -> Result<(LocalStatementId, Option<ExpressionStatementId>), ParseError> {
 
        if self.has_keyword(b"channel") {
 
            let local_id = self.consume_channel_statement(h)?.upcast();
 
            Ok((local_id, None))
 
        } else {
 
            let (memory_id, stmt_id) = self.consume_memory_statement(h)?;
 
            Ok((memory_id.upcast(), Some(stmt_id)))
 
        }
 
    }
 
    fn consume_channel_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ChannelStatementId, ParseError2> {
 
    ) -> Result<ChannelStatementId, ParseError> {
 
        // Consume channel statement and polymorphic argument if specified.
 
        // Needs a tiny bit of special parsing to ensure the right amount of
 
        // whitespace is present.
 
        let position = self.source.pos();
 
        self.consume_keyword(b"channel")?;
 

	
 
@@ -1843,13 +1843,13 @@ impl Lexer<'_> {
 
        let poly_args = self.consume_polymorphic_args(h, true)?.unwrap_or_default();
 
        let poly_arg_id = match poly_args.len() {
 
            0 => h.alloc_parser_type(|this| ParserType{
 
                this, pos: position.clone(), variant: ParserTypeVariant::Inferred,
 
            }),
 
            1 => poly_args[0],
 
            _ => return Err(ParseError2::new_error(
 
            _ => return Err(ParseError::new_error(
 
                &self.source, self.source.pos(),
 
                "port construction using 'channel' accepts up to 1 polymorphic argument"
 
            ))
 
        };
 
        self.consume_whitespace(false)?;
 

	
 
@@ -1891,15 +1891,15 @@ impl Lexer<'_> {
 
            from: out_port,
 
            to: in_port,
 
            relative_pos_in_block: 0,
 
            next: None,
 
        }))
 
    }
 
    fn consume_memory_statement(&mut self, h: &mut Heap) -> Result<(MemoryStatementId, ExpressionStatementId), ParseError2> {
 
    fn consume_memory_statement(&mut self, h: &mut Heap) -> Result<(MemoryStatementId, ExpressionStatementId), ParseError> {
 
        let position = self.source.pos();
 
        let parser_type = self.consume_type2(h, true)?;
 
        let parser_type = self.consume_type(h, true)?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let assignment_position = self.source.pos();
 
        self.consume_string(b"=")?;
 
        self.consume_whitespace(false)?;
 
@@ -1946,13 +1946,13 @@ impl Lexer<'_> {
 
        });
 
        Ok((memory_stmt_id, assignment_stmt_id))
 
    }
 
    fn consume_labeled_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<LabeledStatementId, ParseError2> {
 
    ) -> Result<LabeledStatementId, ParseError> {
 
        let position = self.source.pos();
 
        let label = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b":")?;
 
        self.consume_whitespace(false)?;
 
        let body = self.consume_statement(h, false)?;
 
@@ -1962,20 +1962,20 @@ impl Lexer<'_> {
 
            label,
 
            body,
 
            relative_pos_in_block: 0,
 
            in_sync: None,
 
        }))
 
    }
 
    fn consume_skip_statement(&mut self, h: &mut Heap) -> Result<SkipStatementId, ParseError2> {
 
    fn consume_skip_statement(&mut self, h: &mut Heap) -> Result<SkipStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"skip")?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_skip_statement(|this| SkipStatement { this, position, next: None }))
 
    }
 
    fn consume_if_statement(&mut self, h: &mut Heap) -> Result<IfStatementId, ParseError2> {
 
    fn consume_if_statement(&mut self, h: &mut Heap) -> Result<IfStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"if")?;
 
        self.consume_whitespace(false)?;
 
        let test = self.consume_paren_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        let true_body = self.consume_statement(h, true)?;
 
@@ -1986,13 +1986,13 @@ impl Lexer<'_> {
 
            self.consume_statement(h, true)?
 
        } else {
 
            h.alloc_skip_statement(|this| SkipStatement { this, position, next: None }).upcast()
 
        };
 
        Ok(h.alloc_if_statement(|this| IfStatement { this, position, test, true_body, false_body, end_if: None }))
 
    }
 
    fn consume_while_statement(&mut self, h: &mut Heap) -> Result<WhileStatementId, ParseError2> {
 
    fn consume_while_statement(&mut self, h: &mut Heap) -> Result<WhileStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"while")?;
 
        self.consume_whitespace(false)?;
 
        let test = self.consume_paren_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        let body = self.consume_statement(h, true)?;
 
@@ -2002,13 +2002,13 @@ impl Lexer<'_> {
 
            test,
 
            body,
 
            end_while: None,
 
            in_sync: None,
 
        }))
 
    }
 
    fn consume_break_statement(&mut self, h: &mut Heap) -> Result<BreakStatementId, ParseError2> {
 
    fn consume_break_statement(&mut self, h: &mut Heap) -> Result<BreakStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"break")?;
 
        self.consume_whitespace(false)?;
 
        let label;
 
        if self.has_identifier() {
 
            label = Some(self.consume_identifier()?);
 
@@ -2019,13 +2019,13 @@ impl Lexer<'_> {
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_break_statement(|this| BreakStatement { this, position, label, target: None }))
 
    }
 
    fn consume_continue_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ContinueStatementId, ParseError2> {
 
    ) -> Result<ContinueStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"continue")?;
 
        self.consume_whitespace(false)?;
 
        let label;
 
        if self.has_identifier() {
 
            label = Some(self.consume_identifier()?);
 
@@ -2041,13 +2041,13 @@ impl Lexer<'_> {
 
            target: None,
 
        }))
 
    }
 
    fn consume_synchronous_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<SynchronousStatementId, ParseError2> {
 
    ) -> Result<SynchronousStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"synchronous")?;
 
        self.consume_whitespace(false)?;
 
        // TODO: What was the purpose of this? Seems superfluous and confusing?
 
        // let mut parameters = Vec::new();
 
        // if self.has_string(b"(") {
 
@@ -2062,26 +2062,26 @@ impl Lexer<'_> {
 
            position,
 
            body,
 
            end_sync: None,
 
            parent_scope: None,
 
        }))
 
    }
 
    fn consume_return_statement(&mut self, h: &mut Heap) -> Result<ReturnStatementId, ParseError2> {
 
    fn consume_return_statement(&mut self, h: &mut Heap) -> Result<ReturnStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"return")?;
 
        self.consume_whitespace(false)?;
 
        let expression = if self.has_string(b"(") {
 
            self.consume_paren_expression(h)
 
        } else {
 
            self.consume_expression(h)
 
        }?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_return_statement(|this| ReturnStatement { this, position, expression }))
 
    }
 
    fn consume_assert_statement(&mut self, h: &mut Heap) -> Result<AssertStatementId, ParseError2> {
 
    fn consume_assert_statement(&mut self, h: &mut Heap) -> Result<AssertStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"assert")?;
 
        self.consume_whitespace(false)?;
 
        let expression = if self.has_string(b"(") {
 
            self.consume_paren_expression(h)
 
        } else {
 
@@ -2093,34 +2093,34 @@ impl Lexer<'_> {
 
            this,
 
            position,
 
            expression,
 
            next: None,
 
        }))
 
    }
 
    fn consume_goto_statement(&mut self, h: &mut Heap) -> Result<GotoStatementId, ParseError2> {
 
    fn consume_goto_statement(&mut self, h: &mut Heap) -> Result<GotoStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"goto")?;
 
        self.consume_whitespace(false)?;
 
        let label = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_goto_statement(|this| GotoStatement { this, position, label, target: None }))
 
    }
 
    fn consume_new_statement(&mut self, h: &mut Heap) -> Result<NewStatementId, ParseError2> {
 
    fn consume_new_statement(&mut self, h: &mut Heap) -> Result<NewStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"new")?;
 
        self.consume_whitespace(false)?;
 
        let expression = self.consume_call_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_new_statement(|this| NewStatement { this, position, expression, next: None }))
 
    }
 
    fn consume_expression_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ExpressionStatementId, ParseError2> {
 
    ) -> Result<ExpressionStatementId, ParseError> {
 
        let position = self.source.pos();
 
        let expression = self.consume_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_expression_statement(|this| ExpressionStatement {
 
            this,
 
@@ -2137,24 +2137,24 @@ impl Lexer<'_> {
 
    fn has_symbol_definition(&self) -> bool {
 
        self.has_keyword(b"composite")
 
            || self.has_keyword(b"primitive")
 
            || self.has_type_keyword()
 
            || self.has_identifier()
 
    }
 
    fn consume_symbol_definition(&mut self, h: &mut Heap) -> Result<DefinitionId, ParseError2> {
 
    fn consume_symbol_definition(&mut self, h: &mut Heap) -> Result<DefinitionId, ParseError> {
 
        if self.has_keyword(b"struct") {
 
            Ok(self.consume_struct_definition(h)?.upcast())
 
        } else if self.has_keyword(b"enum") {
 
            Ok(self.consume_enum_definition(h)?.upcast())
 
        } else if self.has_keyword(b"composite") || self.has_keyword(b"primitive") {
 
            Ok(self.consume_component_definition(h)?.upcast())
 
        } else {
 
            Ok(self.consume_function_definition(h)?.upcast())
 
        }
 
    }
 
    fn consume_struct_definition(&mut self, h: &mut Heap) -> Result<StructId, ParseError2> {
 
    fn consume_struct_definition(&mut self, h: &mut Heap) -> Result<StructId, ParseError> {
 
        // Parse "struct" keyword, optional polyvars and its identifier
 
        let struct_pos = self.source.pos();
 
        self.consume_keyword(b"struct")?;
 
        self.consume_whitespace(true)?;
 
        let struct_ident = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
@@ -2163,21 +2163,21 @@ impl Lexer<'_> {
 

	
 
        // Parse struct fields
 
        let fields = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected the end of the list of struct fields",
 
            |lexer, heap| {
 
                let position = lexer.source.pos();
 
                let parser_type = lexer.consume_type2(heap, false)?;
 
                let parser_type = lexer.consume_type(heap, false)?;
 
                lexer.consume_whitespace(true)?;
 
                let field = lexer.consume_identifier()?;
 

	
 
                Ok(StructFieldDefinition{ position, field, parser_type })
 
            }
 
        )? {
 
            Some(fields) => fields,
 
            None => return Err(ParseError2::new_error(
 
            None => return Err(ParseError::new_error(
 
                self.source, struct_pos,
 
                "An struct definition must be followed by its fields"
 
            )),
 
        };
 

	
 
        // Valid struct definition
 
@@ -2186,13 +2186,13 @@ impl Lexer<'_> {
 
            position: struct_pos,
 
            identifier: struct_ident,
 
            poly_vars,
 
            fields,
 
        }))
 
    }
 
    fn consume_enum_definition(&mut self, h: &mut Heap) -> Result<EnumId, ParseError2> {
 
    fn consume_enum_definition(&mut self, h: &mut Heap) -> Result<EnumId, ParseError> {
 
        // Parse "enum" keyword, optional polyvars and its identifier
 
        let enum_pos = self.source.pos();
 
        self.consume_keyword(b"enum")?;
 
        self.consume_whitespace(true)?;
 
        let enum_ident = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
@@ -2226,13 +2226,13 @@ impl Lexer<'_> {
 
                        EnumVariantValue::Integer(value)
 
                    },
 
                    Some(b'(') => {
 
                        // Embedded type
 
                        lexer.source.consume();
 
                        lexer.consume_whitespace(false)?;
 
                        let embedded_type = lexer.consume_type2(heap, false)?;
 
                        let embedded_type = lexer.consume_type(heap, false)?;
 
                        lexer.consume_whitespace(false)?;
 
                        lexer.consume_string(b")")?;
 
                        EnumVariantValue::Type(embedded_type)
 
                    },
 
                    _ => {
 
                        return Err(lexer.error_at_pos("Expected ',', '=', or '('"));
 
@@ -2240,13 +2240,13 @@ impl Lexer<'_> {
 
                };
 

	
 
                Ok(EnumVariantDefinition{ position, identifier, value })
 
            }
 
        )? {
 
            Some(variants) => variants,
 
            None => return Err(ParseError2::new_error(
 
            None => return Err(ParseError::new_error(
 
                self.source, enum_pos,
 
                "An enum definition must be followed by its variants"
 
            )),
 
        };
 

	
 
        Ok(h.alloc_enum_definition(|this| EnumDefinition{
 
@@ -2254,21 +2254,21 @@ impl Lexer<'_> {
 
            position: enum_pos,
 
            identifier: enum_ident,
 
            poly_vars,
 
            variants,
 
        }))
 
    }
 
    fn consume_component_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError2> {
 
    fn consume_component_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // TODO: Cleanup
 
        if self.has_keyword(b"composite") {
 
            Ok(self.consume_composite_definition(h)?)
 
        } else {
 
            Ok(self.consume_primitive_definition(h)?)
 
        }
 
    }
 
    fn consume_composite_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError2> {
 
    fn consume_composite_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // Parse keyword, optional polyvars and the identifier
 
        let position = self.source.pos();
 
        self.consume_keyword(b"composite")?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
@@ -2288,13 +2288,13 @@ impl Lexer<'_> {
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body
 
        }))
 
    }
 
    fn consume_primitive_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError2> {
 
    fn consume_primitive_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // Consume keyword, optional polyvars and identifier
 
        let position = self.source.pos();
 
        self.consume_keyword(b"primitive")?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
@@ -2314,16 +2314,16 @@ impl Lexer<'_> {
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body
 
        }))
 
    }
 
    fn consume_function_definition(&mut self, h: &mut Heap) -> Result<FunctionId, ParseError2> {
 
    fn consume_function_definition(&mut self, h: &mut Heap) -> Result<FunctionId, ParseError> {
 
        // Consume return type, optional polyvars and identifier
 
        let position = self.source.pos();
 
        let return_type = self.consume_type2(h, false)?;
 
        let return_type = self.consume_type(h, false)?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
@@ -2347,13 +2347,13 @@ impl Lexer<'_> {
 
        if let Some(c) = self.source.next() {
 
            c == b'#'
 
        } else {
 
            false
 
        }
 
    }
 
    fn consume_pragma(&mut self, h: &mut Heap) -> Result<PragmaId, ParseError2> {
 
    fn consume_pragma(&mut self, h: &mut Heap) -> Result<PragmaId, ParseError> {
 
        let position = self.source.pos();
 
        let next = self.source.next();
 
        if next != Some(b'#') {
 
            return Err(self.error_at_pos("Expected pragma"));
 
        }
 
        self.source.consume();
 
@@ -2394,13 +2394,13 @@ impl Lexer<'_> {
 
        }
 
    }
 

	
 
    fn has_import(&self) -> bool {
 
        self.has_keyword(b"import")
 
    }
 
    fn consume_import(&mut self, h: &mut Heap) -> Result<ImportId, ParseError2> {
 
    fn consume_import(&mut self, h: &mut Heap) -> Result<ImportId, ParseError> {
 
        // Parse the word "import" and the name of the module
 
        let position = self.source.pos();
 
        self.consume_keyword(b"import")?;
 
        self.consume_whitespace(true)?;
 
        let mut value = Vec::new();
 
        let mut last_ident_pos = self.source.pos();
 
@@ -2534,13 +2534,13 @@ impl Lexer<'_> {
 
        };
 

	
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(import)
 
    }
 
    pub fn consume_protocol_description(&mut self, h: &mut Heap) -> Result<RootId, ParseError2> {
 
    pub fn consume_protocol_description(&mut self, h: &mut Heap) -> Result<RootId, ParseError> {
 
        let position = self.source.pos();
 
        let mut pragmas = Vec::new();
 
        let mut imports = Vec::new();
 
        let mut definitions = Vec::new();
 
        self.consume_whitespace(false)?;
 
        while self.has_pragma() {
src/protocol/parser/mod.rs
Show inline comments
 
@@ -44,13 +44,13 @@ impl Parser {
 
            module_lookup: HashMap::new(),
 
            symbol_table: SymbolTable::new(),
 
            type_table: TypeTable::new(),
 
        }
 
    }
 

	
 
    pub fn feed(&mut self, mut source: InputSource) -> Result<RootId, ParseError2> {
 
    pub fn feed(&mut self, mut source: InputSource) -> Result<RootId, ParseError> {
 
        // Lex the input source
 
        let mut lex = Lexer::new(&mut source);
 
        let pd = lex.consume_protocol_description(&mut self.heap)?;
 

	
 
        // Seek the module name and version
 
        let root = &self.heap[pd];
 
@@ -61,24 +61,24 @@ impl Parser {
 

	
 
        for pragma in &root.pragmas {
 
            match &self.heap[*pragma] {
 
                Pragma::Module(module) => {
 
                    if !module_name.is_empty() {
 
                        return Err(
 
                            ParseError2::new_error(&source, module.position, "Double definition of module name in the same file")
 
                            ParseError::new_error(&source, module.position, "Double definition of module name in the same file")
 
                                .with_postfixed_info(&source, module_name_pos, "Previous definition was here")
 
                        )
 
                    }
 

	
 
                    module_name_pos = module.position.clone();
 
                    module_name = module.value.clone();
 
                },
 
                Pragma::Version(version) => {
 
                    if module_version.is_some() {
 
                        return Err(
 
                            ParseError2::new_error(&source, version.position, "Double definition of module version")
 
                            ParseError::new_error(&source, version.position, "Double definition of module version")
 
                                .with_postfixed_info(&source, module_version_pos, "Previous definition was here")
 
                        )
 
                    }
 

	
 
                    module_version_pos = version.position.clone();
 
                    module_version = Some(version.version);
 
@@ -105,13 +105,13 @@ impl Parser {
 
                format!("a nameless module")
 
            } else {
 
                format!("module '{}'", String::from_utf8_lossy(&module_name))
 
            };
 

	
 
            return Err(
 
                ParseError2::new_error(&source, module_name_pos, &format!("Double definition of {} across files", module_name_msg))
 
                ParseError::new_error(&source, module_name_pos, &format!("Double definition of {} across files", module_name_msg))
 
                    .with_postfixed_info(&prev_module.source, prev_module_pos, "Other definition was here")
 
            );
 
        }
 

	
 
        self.modules.push(LexedModule{
 
            source,
 
@@ -120,13 +120,13 @@ impl Parser {
 
            root_id: pd
 
        });
 
        self.module_lookup.insert(module_name, cur_module_idx);
 
        Ok(pd)
 
    }
 

	
 
    fn resolve_symbols_and_types(&mut self) -> Result<(), ParseError2> {
 
    fn resolve_symbols_and_types(&mut self) -> Result<(), ParseError> {
 
        // Construct the symbol table to resolve any imports and/or definitions,
 
        // then use the symbol table to actually annotate all of the imports.
 
        // If the type table is constructed correctly then all imports MUST be
 
        // resolvable.
 
        self.symbol_table.build(&self.heap, &self.modules)?;
 

	
 
@@ -184,13 +184,13 @@ impl Parser {
 
        let mut type_ctx = TypeCtx::new(&self.symbol_table, &mut self.heap, &self.modules);
 
        self.type_table.build_base_types(&mut type_ctx)?;
 

	
 
        Ok(())
 
    }
 

	
 
    pub fn parse(&mut self) -> Result<(), ParseError2> {
 
    pub fn parse(&mut self) -> Result<(), ParseError> {
 
        self.resolve_symbols_and_types()?;
 

	
 
        // Validate and link all modules
 
        let mut visit = ValidityAndLinkerVisitor::new();
 
        for module in &self.modules {
 
            let mut ctx = visitor::Ctx{
 
@@ -227,13 +227,13 @@ impl Parser {
 

	
 
        // Perform remaining steps
 
        // TODO: Phase out at some point
 
        for module in &self.modules {
 
            let root_id = module.root_id;
 
            if let Err((position, message)) = Self::parse_inner(&mut self.heap, root_id) {
 
                return Err(ParseError2::new_error(&self.modules[0].source, position, &message))
 
                return Err(ParseError::new_error(&self.modules[0].source, position, &message))
 
            }
 
        }
 

	
 
        // let mut writer = ASTWriter::new();
 
        // let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap();
 
        // writer.write_ast(&mut file, &self.heap);
src/protocol/parser/symbol_table.rs
Show inline comments
 
@@ -86,13 +86,13 @@ pub(crate) struct SymbolTable {
 

	
 
impl SymbolTable {
 
    pub(crate) fn new() -> Self {
 
        Self{ module_lookup: HashMap::new(), symbol_lookup: HashMap::new() }
 
    }
 

	
 
    pub(crate) fn build(&mut self, heap: &Heap, modules: &[LexedModule]) -> Result<(), ParseError2> {
 
    pub(crate) fn build(&mut self, heap: &Heap, modules: &[LexedModule]) -> Result<(), ParseError> {
 
        // Sanity checks
 
        debug_assert!(self.module_lookup.is_empty());
 
        debug_assert!(self.symbol_lookup.is_empty());
 
        if cfg!(debug_assertions) {
 
            for (index, module) in modules.iter().enumerate() {
 
                debug_assert_eq!(
 
@@ -126,13 +126,13 @@ impl SymbolTable {
 
                            match self.module_lookup.get(&import.module_name) {
 
                                Some(target_module_id) => {
 
                                    lookup_reserve_size += heap[*target_module_id].definitions.len()
 
                                },
 
                                None => {
 
                                    return Err(
 
                                        ParseError2::new_error(&module.source, import.position, "Cannot resolve module")
 
                                        ParseError::new_error(&module.source, import.position, "Cannot resolve module")
 
                                    );
 
                                }
 
                            }
 
                        } else {
 
                            lookup_reserve_size += import.symbols.len();
 
                        }
 
@@ -155,13 +155,13 @@ impl SymbolTable {
 
                let identifier = definition.identifier();
 
                if let Err(previous_position) = self.add_definition_symbol(
 
                    identifier.position, SymbolKey::from_identifier(module.root_id, &identifier),
 
                    module.root_id, *definition_id
 
                ) {
 
                    return Err(
 
                        ParseError2::new_error(&module.source, definition.position(), "Symbol is multiply defined")
 
                        ParseError::new_error(&module.source, definition.position(), "Symbol is multiply defined")
 
                            .with_postfixed_info(&module.source, previous_position, "Previous definition was here")
 
                    )
 
                }
 
            }
 
        }
 

	
 
@@ -173,39 +173,39 @@ impl SymbolTable {
 
                let import = &heap[*import_id];
 
                match import {
 
                    Import::Module(import) => {
 
                        // Find the module using its name
 
                        let target_root_id = self.resolve_module(&import.module_name);
 
                        if target_root_id.is_none() {
 
                            return Err(ParseError2::new_error(&module.source, import.position, "Could not resolve module"));
 
                            return Err(ParseError::new_error(&module.source, import.position, "Could not resolve module"));
 
                        }
 
                        let target_root_id = target_root_id.unwrap();
 
                        if target_root_id == module.root_id {
 
                            return Err(ParseError2::new_error(&module.source, import.position, "Illegal import of self"));
 
                            return Err(ParseError::new_error(&module.source, import.position, "Illegal import of self"));
 
                        }
 

	
 
                        // Add the target module under its alias
 
                        if let Err(previous_position) = self.add_namespace_symbol(
 
                            import.position, SymbolKey::from_identifier(module.root_id, &import.alias),
 
                            target_root_id
 
                        ) {
 
                            return Err(
 
                                ParseError2::new_error(&module.source, import.position, "Symbol is multiply defined")
 
                                ParseError::new_error(&module.source, import.position, "Symbol is multiply defined")
 
                                    .with_postfixed_info(&module.source, previous_position, "Previous definition was here")
 
                            );
 
                        }
 
                    },
 
                    Import::Symbols(import) => {
 
                        // Find the target module using its name
 
                        let target_root_id = self.resolve_module(&import.module_name);
 
                        if target_root_id.is_none() {
 
                            return Err(ParseError2::new_error(&module.source, import.position, "Could not resolve module of symbol imports"));
 
                            return Err(ParseError::new_error(&module.source, import.position, "Could not resolve module of symbol imports"));
 
                        }
 
                        let target_root_id = target_root_id.unwrap();
 
                        if target_root_id == module.root_id {
 
                            return Err(ParseError2::new_error(&module.source, import.position, "Illegal import of self"));
 
                            return Err(ParseError::new_error(&module.source, import.position, "Illegal import of self"));
 
                        }
 

	
 
                        // Determine which symbols to import
 
                        if import.symbols.is_empty() {
 
                            // Import of all symbols, not using any aliases
 
                            for definition_id in &heap[target_root_id].definitions {
 
@@ -213,13 +213,13 @@ impl SymbolTable {
 
                                let identifier = definition.identifier();
 
                                if let Err(previous_position) = self.add_definition_symbol(
 
                                    import.position, SymbolKey::from_identifier(module.root_id, identifier),
 
                                    target_root_id, *definition_id
 
                                ) {
 
                                    return Err(
 
                                        ParseError2::new_error(
 
                                        ParseError::new_error(
 
                                            &module.source, import.position,
 
                                            &format!("Imported symbol '{}' is already defined", String::from_utf8_lossy(&identifier.value))
 
                                        )
 
                                        .with_postfixed_info(
 
                                            &modules[target_root_id.index as usize].source,
 
                                            definition.position(),
 
@@ -264,23 +264,23 @@ impl SymbolTable {
 
                                    },
 
                                    None => None
 
                                };
 

	
 
                                if symbol_definition_id.is_none() {
 
                                    return Err(
 
                                        ParseError2::new_error(&module.source, symbol.position, "Could not resolve symbol")
 
                                        ParseError::new_error(&module.source, symbol.position, "Could not resolve symbol")
 
                                    )
 
                                }
 
                                let symbol_definition_id = symbol_definition_id.unwrap();
 

	
 
                                if let Err(previous_position) = self.add_definition_symbol(
 
                                    symbol.position, SymbolKey::from_identifier(module.root_id, &symbol.alias),
 
                                    target_root_id, symbol_definition_id
 
                                ) {
 
                                    return Err(
 
                                        ParseError2::new_error(&module.source, symbol.position, "Symbol is multiply defined")
 
                                        ParseError::new_error(&module.source, symbol.position, "Symbol is multiply defined")
 
                                            .with_postfixed_info(&module.source, previous_position, "Previous definition was here")
 
                                    )
 
                                }
 
                            }
 
                        }
 
                    }
src/protocol/parser/type_resolver.rs
Show inline comments
 
@@ -1288,13 +1288,13 @@ macro_rules! debug_assert_ptrs_distinct {
 
        debug_assert_ptrs_distinct!($ptr1, $ptr2);
 
        debug_assert_ptrs_distinct!($ptr2, $($tail),+);
 
    };
 
}
 

	
 
impl TypeResolvingVisitor {
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError2> {
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError> {
 
        // Keep inferring until we can no longer make any progress
 
        while let Some(next_expr_id) = self.expr_queued.iter().next() {
 
            let next_expr_id = *next_expr_id;
 
            self.expr_queued.remove(&next_expr_id);
 
            self.progress_expr(ctx, next_expr_id)?;
 
        }
 
@@ -1315,13 +1315,13 @@ impl TypeResolvingVisitor {
 
            if !expr_type.is_done {
 
                // Auto-infer numberlike/integerlike types to a regular int
 
                if expr_type.parts.len() == 1 && expr_type.parts[0] == InferenceTypePart::IntegerLike {
 
                    expr_type.parts[0] = InferenceTypePart::Int;
 
                } else {
 
                    let expr = &ctx.heap[*expr_id];
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Could not fully infer the type of this expression (got '{}')",
 
                            expr_type.display_name(&ctx.heap)
 
                        )
 
                    ))
 
@@ -1363,13 +1363,13 @@ impl TypeResolvingVisitor {
 
                let mut monomorph_types = Vec::with_capacity(extra_data.poly_vars.len());
 
                for (poly_idx, poly_type) in extra_data.poly_vars.iter().enumerate() {
 
                    if !poly_type.is_done {
 
                        // TODO: Single clean function for function signatures and polyvars.
 
                        // TODO: Better error message
 
                        let expr = &ctx.heap[*expr_id];
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, expr.position(),
 
                            &format!(
 
                                "Could not fully infer the type of polymorphic variable {} of this expression (got '{}')",
 
                                poly_idx, poly_type.display_name(&ctx.heap)
 
                            )
 
                        ))
 
@@ -1419,13 +1419,13 @@ impl TypeResolvingVisitor {
 
            } // else: was just a helper structure...
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> Result<(), ParseError2> {
 
    fn progress_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> Result<(), ParseError> {
 
        match &ctx.heap[id] {
 
            Expression::Assignment(expr) => {
 
                let id = expr.this;
 
                self.progress_assignment_expr(ctx, id)
 
            },
 
            Expression::Conditional(expr) => {
 
@@ -1468,13 +1468,13 @@ impl TypeResolvingVisitor {
 
                let id = expr.this;
 
                self.progress_variable_expr(ctx, id)
 
            }
 
        }
 
    }
 

	
 
    fn progress_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> Result<(), ParseError2> {
 
    fn progress_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> Result<(), ParseError> {
 
        use AssignmentOperator as AO;
 

	
 
        // TODO: Assignable check
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.left;
 
@@ -1510,13 +1510,13 @@ impl TypeResolvingVisitor {
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> Result<(), ParseError2> {
 
    fn progress_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> Result<(), ParseError> {
 
        // Note: test expression type is already enforced
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.true_expression;
 
        let arg2_expr_id = expr.false_expression;
 

	
 
@@ -1539,13 +1539,13 @@ impl TypeResolvingVisitor {
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> Result<(), ParseError2> {
 
    fn progress_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> Result<(), ParseError> {
 
        // Note: our expression type might be fixed by our parent, but we still
 
        // need to make sure it matches the type associated with our operation.
 
        use BinaryOperator as BO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
@@ -1623,13 +1623,13 @@ impl TypeResolvingVisitor {
 
        if progress_arg1 { self.queue_expr(arg1_id); }
 
        if progress_arg2 { self.queue_expr(arg2_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> Result<(), ParseError2> {
 
    fn progress_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> Result<(), ParseError> {
 
        use UnaryOperation as UO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg_id = expr.expression;
 

	
 
@@ -1670,13 +1670,13 @@ impl TypeResolvingVisitor {
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg { self.queue_expr(arg_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> Result<(), ParseError2> {
 
    fn progress_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let subject_id = expr.subject;
 
        let index_id = expr.index;
 

	
 
        debug_log!("Indexing expr: {}", upcast_id.index);
 
@@ -1702,13 +1702,13 @@ impl TypeResolvingVisitor {
 
        if progress_subject_base || progress_subject { self.queue_expr(subject_id); }
 
        if progress_index { self.queue_expr(index_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> Result<(), ParseError2> {
 
    fn progress_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let subject_id = expr.subject;
 
        let from_id = expr.from_index;
 
        let to_id = expr.to_index;
 

	
 
@@ -1740,13 +1740,13 @@ impl TypeResolvingVisitor {
 
        if progress_idx_base || progress_from { self.queue_expr(from_id); }
 
        if progress_idx_base || progress_to { self.queue_expr(to_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> Result<(), ParseError2> {
 
    fn progress_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        
 
        debug_log!("Select expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Subject type: {}", self.expr_types.get(&ctx.heap[id].subject).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 
@@ -1798,13 +1798,13 @@ impl TypeResolvingVisitor {
 
                        Ok(Some(type_def)) => {
 
                            // Subject type is known, check if it is a 
 
                            // struct and the field exists on the struct
 
                            let struct_def = if let DefinedTypeVariant::Struct(struct_def) = &type_def.definition {
 
                                struct_def
 
                            } else {
 
                                return Err(ParseError2::new_error(
 
                                return Err(ParseError::new_error(
 
                                    &ctx.module.source, field.identifier.position,
 
                                    &format!(
 
                                        "Can only apply field access to structs, got a subject of type '{}'",
 
                                        subject_type.display_name(&ctx.heap)
 
                                    )
 
                                ));
 
@@ -1819,13 +1819,13 @@ impl TypeResolvingVisitor {
 
                                }
 
                            }
 

	
 
                            if field.definition.is_none() {
 
                                let field_position = field.identifier.position;
 
                                let ast_struct_def = ctx.heap[type_def.ast_definition].as_struct();
 
                                return Err(ParseError2::new_error(
 
                                return Err(ParseError::new_error(
 
                                    &ctx.module.source, field_position,
 
                                    &format!(
 
                                        "This field does not exist on the struct '{}'",
 
                                        &String::from_utf8_lossy(&ast_struct_def.identifier.value)
 
                                    )
 
                                ))
 
@@ -1838,13 +1838,13 @@ impl TypeResolvingVisitor {
 
                        Ok(None) => {
 
                            // Type of subject is not yet known, so we 
 
                            // cannot make any progress yet
 
                            return Ok(())
 
                        },
 
                        Err(()) => {
 
                            return Err(ParseError2::new_error(
 
                            return Err(ParseError::new_error(
 
                                &ctx.module.source, field.identifier.position,
 
                                &format!(
 
                                    "Can only apply field access to structs, got a subject of type '{}'",
 
                                    subject_type.display_name(&ctx.heap)
 
                                )
 
                            ));
 
@@ -1914,13 +1914,13 @@ impl TypeResolvingVisitor {
 
        debug_log!("   - Subject type [{}]: {}", progress_subject, self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> Result<(), ParseError2> {
 
    fn progress_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let expr_elements = expr.elements.clone(); // TODO: @performance
 

	
 
        debug_log!("Array expr ({} elements): {}", expr_elements.len(), upcast_id.index);
 
        debug_log!(" * Before:");
 
@@ -1955,13 +1955,13 @@ impl TypeResolvingVisitor {
 

	
 
        if expr_progress { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> Result<(), ParseError2> {
 
    fn progress_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 

	
 
        debug_log!("Literal expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 
@@ -2079,13 +2079,13 @@ impl TypeResolvingVisitor {
 
        Ok(())
 
    }
 

	
 
    // TODO: @cleanup, see how this can be cleaned up once I implement
 
    //  polymorphic struct/enum/union literals. These likely follow the same
 
    //  pattern as here.
 
    fn progress_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> Result<(), ParseError2> {
 
    fn progress_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 

	
 
        debug_log!("Call expr '{}': {}", match &expr.method {
 
            Method::Create => String::from("create"),
 
@@ -2192,13 +2192,13 @@ impl TypeResolvingVisitor {
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> Result<(), ParseError2> {
 
    fn progress_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let var_expr = &ctx.heap[id];
 
        let var_id = var_expr.declaration.unwrap();
 

	
 
        debug_log!("Variable expr '{}': {}", &String::from_utf8_lossy(&ctx.heap[var_id].identifier().value), upcast_id.index);
 
        debug_log!(" * Before:");
 
@@ -2211,13 +2211,13 @@ impl TypeResolvingVisitor {
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            &mut var_data.var_type as *mut _, 0, expr_type, 0
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            let var_decl = &ctx.heap[var_id];
 
            return Err(ParseError2::new_error(
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, var_decl.position(),
 
                &format!(
 
                    "Conflicting types for this variable, previously assigned the type '{}'",
 
                    var_data.var_type.display_name(&ctx.heap)
 
                )
 
            ).with_postfixed_info(
 
@@ -2265,13 +2265,13 @@ impl TypeResolvingVisitor {
 
                    SingleInferenceResult::Incompatible => {
 
                        let var_data = self.var_types.get(&var_id).unwrap();
 
                        let link_data = self.var_types.get(&linked_id).unwrap();
 
                        let var_decl = &ctx.heap[var_id];
 
                        let link_decl = &ctx.heap[linked_id];
 

	
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, var_decl.position(),
 
                            &format!(
 
                                "Conflicting types for this variable, assigned the type '{}'",
 
                                var_data.var_type.display_name(&ctx.heap)
 
                            )
 
                        ).with_postfixed_info(
 
@@ -2308,13 +2308,13 @@ impl TypeResolvingVisitor {
 
    /// Applies a forced type constraint: the type associated with the supplied
 
    /// expression will be molded into the provided "template". The template may
 
    /// be fully specified (e.g. a bool) or contain "inference" variables (e.g.
 
    /// an array of T)
 
    fn apply_forced_constraint(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
    ) -> Result<bool, ParseError2> {
 
    ) -> Result<bool, ParseError> {
 
        debug_assert_expr_ids_unique_and_known!(self, expr_id);
 
        let expr_type = self.expr_types.get_mut(&expr_id).unwrap();
 
        match InferenceType::infer_subtree_for_single_type(expr_type, 0, template, 0) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(
 
@@ -2342,13 +2342,13 @@ impl TypeResolvingVisitor {
 
    /// is successful then the composition of all types are made equal.
 
    /// The "parent" `expr_id` is provided to construct errors.
 
    fn apply_equal2_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg1_start_idx: usize,
 
        arg2_id: ExpressionId, arg2_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError2> {
 
    ) -> Result<(bool, bool), ParseError> {
 
        debug_assert_expr_ids_unique_and_known!(self, arg1_id, arg2_id);
 
        let arg1_type: *mut _ = self.expr_types.get_mut(&arg1_id).unwrap();
 
        let arg2_type: *mut _ = self.expr_types.get_mut(&arg2_id).unwrap();
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            arg1_type, arg1_start_idx,
 
@@ -2372,13 +2372,13 @@ impl TypeResolvingVisitor {
 
    /// `expression_start_idx` belong to `expr_id`.
 
    fn apply_equal2_signature_constraint(
 
        ctx: &Ctx, outer_expr_id: ExpressionId, expr_id: Option<ExpressionId>,
 
        polymorph_data: &mut ExtraData, polymorph_progress: &mut HashSet<usize>,
 
        signature_type: *mut InferenceType, signature_start_idx: usize,
 
        expression_type: *mut InferenceType, expression_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError2> {
 
    ) -> Result<(bool, bool), ParseError> {
 
        // Safety: cannot mutually infer the same types
 
        //         polymorph_data containers may not be modified
 
        debug_assert_ptrs_distinct!(signature_type, expression_type);
 

	
 
        // Infer the signature and expression type
 
        let infer_res = unsafe { 
 
@@ -2397,13 +2397,13 @@ impl TypeResolvingVisitor {
 
            };
 
            let (signature_display_type, expression_display_type) = unsafe { (
 
                (&*signature_type).display_name(&ctx.heap),
 
                (&*expression_type).display_name(&ctx.heap)
 
            ) };
 

	
 
            return Err(ParseError2::new_error(
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, outer_position,
 
                "Failed to fully resolve the types of this expression"
 
            ).with_postfixed_info(
 
                &ctx.module.source, position,
 
                &format!(
 
                    "Because the {} signature has been resolved to '{}', but the expression has been resolved to '{}'",
 
@@ -2500,13 +2500,13 @@ impl TypeResolvingVisitor {
 
    /// attempt to do so. If the call is successful then the composition of all
 
    /// types is made equal.
 
    fn apply_equal3_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId,
 
        start_idx: usize
 
    ) -> Result<(bool, bool, bool), ParseError2> {
 
    ) -> Result<(bool, bool, bool), ParseError> {
 
        // Safety: all expression IDs are always distinct, and we do not modify
 
        //  the container
 
        debug_assert_expr_ids_unique_and_known!(self, expr_id, arg1_id, arg2_id);
 
        let expr_type: *mut _ = self.expr_types.get_mut(&expr_id).unwrap();
 
        let arg1_type: *mut _ = self.expr_types.get_mut(&arg1_id).unwrap();
 
        let arg2_type: *mut _ = self.expr_types.get_mut(&arg2_id).unwrap();
 
@@ -2544,13 +2544,13 @@ impl TypeResolvingVisitor {
 
    }
 

	
 
    // TODO: @optimize Since we only deal with a single type this might be done
 
    //  a lot more efficiently, methinks (disregarding the allocations here)
 
    fn apply_equal_n_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId, args: &[ExpressionId],
 
    ) -> Result<Vec<bool>, ParseError2> {
 
    ) -> Result<Vec<bool>, ParseError> {
 
        // Early exit
 
        match args.len() {
 
            0 => return Ok(vec!()),         // nothing to progress
 
            1 => return Ok(vec![false]),    // only one type, so nothing to infer
 
            _ => {}
 
        }
 
@@ -2606,13 +2606,13 @@ impl TypeResolvingVisitor {
 
    /// Determines the `InferenceType` for the expression based on the
 
    /// expression parent. Note that if the parent is another expression, we do
 
    /// not take special action, instead we let parent expressions fix the type
 
    /// of subexpressions before they have a chance to call this function.
 
    fn insert_initial_expr_inference_type(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId
 
    ) -> Result<(), ParseError2> {
 
    ) -> Result<(), ParseError> {
 
        use ExpressionParent as EP;
 
        use InferenceTypePart as ITP;
 

	
 
        let expr = &ctx.heap[expr_id];
 
        let inference_type = match expr.parent() {
 
            EP::None =>
 
@@ -2982,20 +2982,20 @@ impl TypeResolvingVisitor {
 
    /// happens if we infer the expression type from its arguments (e.g. the
 
    /// expression type of an addition operator is the type of the arguments)
 
    /// But the expression type was already set due to our parent (e.g. an
 
    /// "if statement" or a "logical not" always expecting a boolean)
 
    fn construct_expr_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, arg_id: ExpressionId
 
    ) -> ParseError2 {
 
    ) -> ParseError {
 
        // TODO: Expand and provide more meaningful information for humans
 
        let expr = &ctx.heap[expr_id];
 
        let arg_expr = &ctx.heap[arg_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 
        let arg_type = self.expr_types.get(&arg_id).unwrap();
 

	
 
        return ParseError2::new_error(
 
        return ParseError::new_error(
 
            &ctx.module.source, expr.position(),
 
            &format!(
 
                "Incompatible types: this expression expected a '{}'", 
 
                expr_type.display_name(&ctx.heap)
 
            )
 
        ).with_postfixed_info(
 
@@ -3007,21 +3007,21 @@ impl TypeResolvingVisitor {
 
        )
 
    }
 

	
 
    fn construct_arg_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId
 
    ) -> ParseError2 {
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let arg1 = &ctx.heap[arg1_id];
 
        let arg2 = &ctx.heap[arg2_id];
 

	
 
        let arg1_type = self.expr_types.get(&arg1_id).unwrap();
 
        let arg2_type = self.expr_types.get(&arg2_id).unwrap();
 

	
 
        return ParseError2::new_error(
 
        return ParseError::new_error(
 
            &ctx.module.source, expr.position(),
 
            "Incompatible types: cannot apply this expression"
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg1.position(),
 
            &format!(
 
                "Because this expression has type '{}'",
 
@@ -3035,17 +3035,17 @@ impl TypeResolvingVisitor {
 
            )
 
        )
 
    }
 

	
 
    fn construct_template_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
    ) -> ParseError2 {
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 

	
 
        return ParseError2::new_error(
 
        return ParseError::new_error(
 
            &ctx.module.source, expr.position(),
 
            &format!(
 
                "Incompatible types: got a '{}' but expected a '{}'",
 
                expr_type.display_name(&ctx.heap), 
 
                InferenceType::partial_display_name(&ctx.heap, template)
 
            )
 
@@ -3061,13 +3061,13 @@ impl TypeResolvingVisitor {
 
    /// So we find this pair and construct the error using it.
 
    ///
 
    /// We assume that the expression is a function call or a struct literal,
 
    /// and that an actual error has occurred.
 
    fn construct_poly_arg_error(
 
        ctx: &Ctx, poly_data: &ExtraData, expr_id: ExpressionId
 
    ) -> ParseError2 {
 
    ) -> ParseError {
 
        // Helper function to check for polymorph mismatch between two inference
 
        // types.
 
        fn has_poly_mismatch<'a>(type_a: &'a InferenceType, type_b: &'a InferenceType) -> Option<(usize, &'a [InferenceTypePart], &'a [InferenceTypePart])> {
 
            if !type_a.has_body_marker || !type_b.has_body_marker {
 
                return None
 
            }
 
@@ -3123,39 +3123,39 @@ impl TypeResolvingVisitor {
 
                    String::from_utf8_lossy(&definition.identifier.value).to_string()
 
                ),
 
            }
 
        }
 

	
 
        // Helper function to construct initial error
 
        fn construct_main_error(ctx: &Ctx, poly_var_idx: usize, expr: &Expression) -> ParseError2 {
 
        fn construct_main_error(ctx: &Ctx, poly_var_idx: usize, expr: &Expression) -> ParseError {
 
            match expr {
 
                Expression::Call(expr) => {
 
                    let (poly_var, func_name) = get_poly_var_and_func_name(ctx, poly_var_idx, expr);
 
                    return ParseError2::new_error(
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' of '{}'",
 
                            poly_var, func_name
 
                        )
 
                    )
 
                },
 
                Expression::Literal(expr) => {
 
                    let lit_struct = expr.value.as_struct();
 
                    let (poly_var, struct_name) = get_poly_var_and_type_name(ctx, poly_var_idx, lit_struct.definition.unwrap());
 
                    return ParseError2::new_error(
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' of instantiation of '{}'",
 
                            poly_var, struct_name
 
                        )
 
                    )
 
                },
 
                Expression::Select(expr) => {
 
                    let field = expr.field.as_symbolic();
 
                    let (poly_var, struct_name) = get_poly_var_and_type_name(ctx, poly_var_idx, field.definition.unwrap());
 
                    return ParseError2::new_error(
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' while accessing field '{}' of '{}'",
 
                            poly_var, &String::from_utf8_lossy(&field.identifier.value), struct_name
 
                        )
 
                    )
src/protocol/parser/type_table.rs
Show inline comments
 
@@ -315,13 +315,13 @@ impl TypeTable {
 
            lookup: HashMap::new(), 
 
            iter: TypeIterator::new(), 
 
            parser_type_iter: VecDeque::with_capacity(64), 
 
        }
 
    }
 

	
 
    pub(crate) fn build_base_types(&mut self, ctx: &mut TypeCtx) -> Result<(), ParseError2> {
 
    pub(crate) fn build_base_types(&mut self, ctx: &mut TypeCtx) -> Result<(), ParseError> {
 
        // Make sure we're allowed to cast root_id to index into ctx.modules
 
        debug_assert!(self.lookup.is_empty());
 
        debug_assert!(self.iter.top().is_none());
 
        debug_assert!(self.parser_type_iter.is_empty());
 

	
 
        if cfg!(debug_assertions) {
 
@@ -377,13 +377,13 @@ impl TypeTable {
 
        let definition = self.lookup.get(definition_id).unwrap();
 
        definition.has_monomorph(types)
 
    }
 

	
 
    /// This function will resolve just the basic definition of the type, it
 
    /// will not handle any of the monomorphized instances of the type.
 
    fn resolve_base_definition<'a>(&'a mut self, ctx: &mut TypeCtx, definition_id: DefinitionId) -> Result<(), ParseError2> {
 
    fn resolve_base_definition<'a>(&'a mut self, ctx: &mut TypeCtx, definition_id: DefinitionId) -> Result<(), ParseError> {
 
        // Check if we have already resolved the base definition
 
        if self.lookup.contains_key(&definition_id) { return Ok(()); }
 

	
 
        let root_id = Self::find_root_id(ctx, definition_id);
 
        self.iter.reset(root_id, definition_id);
 

	
 
@@ -412,13 +412,13 @@ impl TypeTable {
 
    }
 

	
 
    /// Resolve the basic enum definition to an entry in the type table. It will
 
    /// not instantiate any monomorphized instances of polymorphic enum
 
    /// definitions. If a subtype has to be resolved first then this function
 
    /// will return `false` after calling `ingest_resolve_result`.
 
    fn resolve_base_enum_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
    fn resolve_base_enum_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_enum());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base enum already resolved");
 
        
 
        let definition = ctx.heap[definition_id].as_enum();
 

	
 
        // Check if the enum should be implemented as a classic enumeration or
 
@@ -449,13 +449,13 @@ impl TypeTable {
 
        if first_tag_value.is_some() && first_int_value.is_some() {
 
            // Not illegal, but useless and probably a programmer mistake
 
            let module_source = &ctx.modules[root_id.index as usize].source;
 
            let tag_pos = first_tag_value.unwrap();
 
            let int_pos = first_int_value.unwrap();
 
            return Err(
 
                ParseError2::new_error(
 
                ParseError::new_error(
 
                    module_source, definition.position,
 
                    "Illegal combination of enum integer variant(s) and enum union variant(s)"
 
                )
 
                    .with_postfixed_info(module_source, int_pos, "Assigning an integer value here")
 
                    .with_postfixed_info(module_source, tag_pos, "Embedding a type in a union variant here")
 
            );
 
@@ -575,13 +575,13 @@ impl TypeTable {
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic struct definition to an entry in the type table. It
 
    /// will not instantiate any monomorphized instances of polymorphic struct
 
    /// definitions.
 
    fn resolve_base_struct_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
    fn resolve_base_struct_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_struct());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base struct already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_struct();
 

	
 
        // Make sure all fields point to resolvable types
 
@@ -630,13 +630,13 @@ impl TypeTable {
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic function definition to an entry in the type table. It
 
    /// will not instantiate any monomorphized instances of polymorphic function
 
    /// definitions.
 
    fn resolve_base_function_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
    fn resolve_base_function_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_function());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base function already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_function();
 
        let return_type = definition.return_type;
 

	
 
@@ -702,13 +702,13 @@ impl TypeTable {
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic component definition to an entry in the type table.
 
    /// It will not instantiate any monomorphized instancees of polymorphic
 
    /// component definitions.
 
    fn resolve_base_component_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
    fn resolve_base_component_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_component());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base component already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_component();
 
        let component_variant = definition.variant;
 

	
 
@@ -766,21 +766,21 @@ impl TypeTable {
 

	
 
    /// Takes a ResolveResult and returns `true` if the caller can happily
 
    /// continue resolving its current type, or `false` if the caller must break
 
    /// resolving the current type and exit to the outer resolving loop. In the
 
    /// latter case the `result` value was `ResolveResult::Unresolved`, implying
 
    /// that the type must be resolved first.
 
    fn ingest_resolve_result(&mut self, ctx: &TypeCtx, result: ResolveResult) -> Result<bool, ParseError2> {
 
    fn ingest_resolve_result(&mut self, ctx: &TypeCtx, result: ResolveResult) -> Result<bool, ParseError> {
 
        match result {
 
            ResolveResult::BuiltIn | ResolveResult::PolyArg(_) => Ok(true),
 
            ResolveResult::Resolved(_) => Ok(true),
 
            ResolveResult::Unresolved((root_id, definition_id)) => {
 
                if self.iter.contains(root_id, definition_id) {
 
                    // Cyclic dependency encountered
 
                    // TODO: Allow this
 
                    let mut error = ParseError2::new_error(
 
                    let mut error = ParseError::new_error(
 
                        &ctx.modules[root_id.index as usize].source, ctx.heap[definition_id].position(),
 
                        "Evaluating this type definition results in a cyclic type"
 
                    );
 

	
 
                    for (breadcrumb_idx, (root_id, definition_id)) in self.iter.breadcrumbs.iter().enumerate() {
 
                        let msg = if breadcrumb_idx == 0 {
 
@@ -814,13 +814,13 @@ impl TypeTable {
 
    ///
 
    /// If the embedded type is resolved, then one always receives the type's
 
    /// (module, definition) tuple. If any of the types in the embedded type's
 
    /// tree is not yet resolved, then one may receive a (module, definition)
 
    /// tuple that does not correspond to the `parser_type_id` passed into this
 
    /// function.
 
    fn resolve_base_parser_type(&mut self, ctx: &TypeCtx, poly_vars: &Vec<Identifier>, root_id: RootId, parser_type_id: ParserTypeId) -> Result<ResolveResult, ParseError2> {
 
    fn resolve_base_parser_type(&mut self, ctx: &TypeCtx, poly_vars: &Vec<Identifier>, root_id: RootId, parser_type_id: ParserTypeId) -> Result<ResolveResult, ParseError> {
 
        use ParserTypeVariant as PTV;
 

	
 
        // Prepping iterator
 
        self.parser_type_iter.clear();
 
        self.parser_type_iter.push_back(parser_type_id);
 

	
 
@@ -863,13 +863,13 @@ impl TypeTable {
 
                        }
 
                    }
 

	
 
                    // Lookup the definition in the symbol table
 
                    let (symbol, mut ident_iter) = ctx.symbols.resolve_namespaced_identifier(root_id, &symbolic.identifier);
 
                    if symbol.is_none() {
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            &ctx.modules[root_id.index as usize].source, symbolic.identifier.position,
 
                            "Could not resolve type"
 
                        ))
 
                    }
 

	
 
                    let symbol_value = symbol.unwrap();
 
@@ -879,20 +879,20 @@ impl TypeTable {
 
                        Symbol::Namespace(_) => {
 
                            // Reference to a namespace instead of a type
 
                            let last_ident = ident_iter.prev();
 
                            return if ident_iter.num_remaining() == 0 {
 
                                // Could also have polymorphic args, but we 
 
                                // don't care, just throw this error: 
 
                                Err(ParseError2::new_error(
 
                                Err(ParseError::new_error(
 
                                    module_source, symbolic.identifier.position,
 
                                    "Expected a type, got a module name"
 
                                ))
 
                            } else if last_ident.is_some() && last_ident.map(|(_, poly_args)| poly_args.is_some()).unwrap() {
 
                                // Halted at a namespaced because we encountered
 
                                // polymorphic arguments
 
                                Err(ParseError2::new_error(
 
                                Err(ParseError::new_error(
 
                                    module_source, symbolic.identifier.position,
 
                                    "Illegal specification of polymorphic arguments to a module name"
 
                                ))
 
                            } else {
 
                                // Impossible (with the current implementation 
 
                                // of the symbol table)
 
@@ -906,31 +906,31 @@ impl TypeTable {
 
                        Symbol::Definition((root_id, definition_id)) => {
 
                            let definition = &ctx.heap[definition_id];
 
                            if ident_iter.num_remaining() > 0 {
 
                                // Namespaced identifier is longer than the type
 
                                // we found. Return the appropriate message
 
                                return if definition.is_struct() || definition.is_enum() {
 
                                    Err(ParseError2::new_error(
 
                                    Err(ParseError::new_error(
 
                                        module_source, symbolic.identifier.position,
 
                                        &format!(
 
                                            "Unknown type '{}', did you mean to use '{}'?",
 
                                            String::from_utf8_lossy(&symbolic.identifier.value),
 
                                            String::from_utf8_lossy(&definition.identifier().value)
 
                                        )
 
                                    ))
 
                                } else {
 
                                    Err(ParseError2::new_error(
 
                                    Err(ParseError::new_error(
 
                                        module_source, symbolic.identifier.position,
 
                                        "Unknown datatype"
 
                                    ))
 
                                }
 
                            }
 

	
 
                            // Found a match, make sure it is a datatype
 
                            if !(definition.is_struct() || definition.is_enum()) {
 
                                return Err(ParseError2::new_error(
 
                                return Err(ParseError::new_error(
 
                                    module_source, symbolic.identifier.position,
 
                                    "Embedded types must be datatypes (structs or enums)"
 
                                ))
 
                            }
 

	
 
                            // Found a struct/enum definition
 
@@ -986,13 +986,13 @@ impl TypeTable {
 
    /// when the embedded type is a polymorphic variable or points to another
 
    /// user-defined type.
 
    fn check_and_resolve_embedded_type_and_modify_poly_args(
 
        &mut self, ctx: &mut TypeCtx, 
 
        type_definition_id: DefinitionId, poly_args: &mut [PolyVar], 
 
        root_id: RootId, embedded_type_id: ParserTypeId,
 
    ) -> Result<(), ParseError2> {
 
    ) -> Result<(), ParseError> {
 
        use ParserTypeVariant as PTV;
 

	
 
        self.parser_type_iter.clear();
 
        self.parser_type_iter.push_back(embedded_type_id);
 

	
 
        'type_loop: while let Some(embedded_type_id) = self.parser_type_iter.pop_back() {
 
@@ -1062,13 +1062,13 @@ impl TypeTable {
 
                        let number_args_msg = if defined_type.poly_vars.is_empty() {
 
                            String::from("is not polymorphic")
 
                        } else {
 
                            format!("accepts {} polymorphic arguments", defined_type.poly_vars.len())
 
                        };
 
    
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            module_source, symbolic.identifier.position,
 
                            &format!(
 
                                "The type '{}' {}, but {} polymorphic arguments were provided",
 
                                String::from_utf8_lossy(&symbolic.identifier.strip_poly_args()),
 
                                number_args_msg, poly_args.len()
 
                            )
 
@@ -1087,20 +1087,20 @@ impl TypeTable {
 
    }
 

	
 
    /// Go through a list of identifiers and ensure that all identifiers have
 
    /// unique names
 
    fn check_identifier_collision<T: Sized, F: Fn(&T) -> &Identifier>(
 
        &self, ctx: &TypeCtx, root_id: RootId, items: &[T], getter: F, item_name: &'static str
 
    ) -> Result<(), ParseError2> {
 
    ) -> Result<(), ParseError> {
 
        for (item_idx, item) in items.iter().enumerate() {
 
            let item_ident = getter(item);
 
            for other_item in &items[0..item_idx] {
 
                let other_item_ident = getter(other_item);
 
                if item_ident == other_item_ident {
 
                    let module_source = &ctx.modules[root_id.index as usize].source;
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        module_source, item_ident.position, &format!("This {} is defined more than once", item_name)
 
                    ).with_postfixed_info(
 
                        module_source, other_item_ident.position, &format!("The other {} is defined here", item_name)
 
                    ));
 
                }
 
            }
 
@@ -1111,20 +1111,20 @@ impl TypeTable {
 

	
 
    /// Go through a list of polymorphic arguments and make sure that the
 
    /// arguments all have unique names, and the arguments do not conflict with
 
    /// any symbols defined at the module scope.
 
    fn check_poly_args_collision(
 
        &self, ctx: &TypeCtx, root_id: RootId, poly_args: &[Identifier]
 
    ) -> Result<(), ParseError2> {
 
    ) -> Result<(), ParseError> {
 
        // Make sure polymorphic arguments are unique and none of the
 
        // identifiers conflict with any imported scopes
 
        for (arg_idx, poly_arg) in poly_args.iter().enumerate() {
 
            for other_poly_arg in &poly_args[..arg_idx] {
 
                if poly_arg == other_poly_arg {
 
                    let module_source = &ctx.modules[root_id.index as usize].source;
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        module_source, poly_arg.position,
 
                        "This polymorphic argument is defined more than once"
 
                    ).with_postfixed_info(
 
                        module_source, other_poly_arg.position,
 
                        "It conflicts with this polymorphic argument"
 
                    ));
 
@@ -1133,13 +1133,13 @@ impl TypeTable {
 

	
 
            // Check if identifier conflicts with a symbol defined or imported
 
            // in the current module
 
            if let Some(symbol) = ctx.symbols.resolve_symbol(root_id, &poly_arg.value) {
 
                // We have a conflict
 
                let module_source = &ctx.modules[root_id.index as usize].source;
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    module_source, poly_arg.position,
 
                    "This polymorphic argument conflicts with another symbol"
 
                ).with_postfixed_info(
 
                    module_source, symbol.position,
 
                    "It conflicts due to this symbol"
 
                ));
src/protocol/parser/utils.rs
Show inline comments
 
@@ -17,32 +17,32 @@ pub(crate) enum FindTypeResult<'t, 'i> {
 

	
 
// TODO: @cleanup Find other uses of this pattern
 
impl<'t, 'i> FindTypeResult<'t, 'i> {
 
    /// Utility function to transform the `FindTypeResult` into a `Result` where
 
    /// `Ok` contains the resolved type, and `Err` contains a `ParseError` which
 
    /// can be readily returned. This is the most common use.
 
    pub(crate) fn as_parse_error(self, module_source: &InputSource) -> Result<(&'t DefinedType, NamespacedIdentifierIter<'i>), ParseError2> {
 
    pub(crate) fn as_parse_error(self, module_source: &InputSource) -> Result<(&'t DefinedType, NamespacedIdentifierIter<'i>), ParseError> {
 
        match self {
 
            FindTypeResult::Found(defined_type) => Ok(defined_type),
 
            FindTypeResult::SymbolNotFound{ident_pos} => {
 
                Err(ParseError2::new_error(
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos,
 
                    "Could not resolve this identifier to a symbol"
 
                ))
 
            },
 
            FindTypeResult::SymbolPartial{ident_pos, ident_iter} => {
 
                Err(ParseError2::new_error(
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos, 
 
                    &format!(
 
                        "Could not fully resolve this identifier to a symbol, was only able to match '{}'",
 
                        &String::from_utf8_lossy(ident_iter.returned_section())
 
                    )
 
                ))
 
            },
 
            FindTypeResult::SymbolNamespace{ident_pos, symbol_pos} => {
 
                Err(ParseError2::new_error(
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos,
 
                    "This identifier was resolved to a namespace instead of a type"
 
                ).with_postfixed_info(
 
                    module_source, symbol_pos,
 
                    "This is the referenced namespace"
 
                ))
 
@@ -95,13 +95,13 @@ pub(crate) enum MatchPolymorphResult<'t> {
 
    InferAll(usize),
 
    Mismatch{defined_type: &'t DefinedType, ident_position: InputPosition, num_specified: usize},
 
    NoneExpected{defined_type: &'t DefinedType, ident_position: InputPosition},
 
}
 

	
 
impl<'t> MatchPolymorphResult<'t> {
 
    pub(crate) fn as_parse_error(self, heap: &Heap, module_source: &InputSource) -> Result<usize, ParseError2> {
 
    pub(crate) fn as_parse_error(self, heap: &Heap, module_source: &InputSource) -> Result<usize, ParseError> {
 
        match self {
 
            MatchPolymorphResult::Matching => Ok(0),
 
            MatchPolymorphResult::InferAll(count) => {
 
                debug_assert!(count > 0);
 
                Ok(count)
 
            },
 
@@ -110,25 +110,25 @@ impl<'t> MatchPolymorphResult<'t> {
 
                let args_name = if defined_type.poly_vars.len() == 1 {
 
                    "argument"
 
                } else {
 
                    "arguments"
 
                };
 

	
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    module_source, ident_position,
 
                    &format!(
 
                        "expected {} polymorphic {} (or none, to infer them) for the type {}, but {} were specified",
 
                        defined_type.poly_vars.len(), args_name, 
 
                        &String::from_utf8_lossy(&type_identifier.value),
 
                        num_specified
 
                    )
 
                ))
 
            },
 
            MatchPolymorphResult::NoneExpected{defined_type, ident_position, ..} => {
 
                let type_identifier = heap[defined_type.ast_definition].identifier();
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    module_source, ident_position,
 
                    &format!(
 
                        "the type {} is not polymorphic",
 
                        &String::from_utf8_lossy(&type_identifier.value)
 
                    )
 
                ))
src/protocol/parser/visitor.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::parser::{symbol_table::*, type_table::*, LexedModule};
 

	
 
type Unit = ();
 
pub(crate) type VisitorResult = Result<Unit, ParseError2>;
 
pub(crate) type VisitorResult = Result<Unit, ParseError>;
 

	
 
/// Globally configured vector capacity for statement buffers in visitor 
 
/// implementations
 
pub(crate) const STMT_BUFFER_INIT_CAPACITY: usize = 256;
 
/// Globally configured vector capacity for expression buffers in visitor
 
/// implementations
src/protocol/parser/visitor_linker.rs
Show inline comments
 
@@ -366,19 +366,19 @@ impl Visitor2 for ValidityAndLinkerVisitor {
 
            // Check for validity of synchronous statement
 
            let cur_sync_position = ctx.heap[id].position;
 
            if self.in_sync.is_some() {
 
                // Nested synchronous statement
 
                let old_sync = &ctx.heap[self.in_sync.unwrap()];
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, cur_sync_position, "Illegal nested synchronous statement")
 
                    ParseError::new_error(&ctx.module.source, cur_sync_position, "Illegal nested synchronous statement")
 
                        .with_postfixed_info(&ctx.module.source, old_sync.position, "It is nested in this synchronous statement")
 
                );
 
            }
 

	
 
            if !self.def_type.is_primitive() {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    &ctx.module.source, cur_sync_position,
 
                    "Synchronous statements may only be used in primitive components"
 
                ));
 
            }
 

	
 
            // Append SynchronousEnd pseudo-statement
 
@@ -403,13 +403,13 @@ impl Visitor2 for ValidityAndLinkerVisitor {
 

	
 
    fn visit_return_stmt(&mut self, ctx: &mut Ctx, id: ReturnStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let stmt = &ctx.heap[id];
 
            if !self.def_type.is_function() {
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, stmt.position, "Return statements may only appear in function bodies")
 
                    ParseError::new_error(&ctx.module.source, stmt.position, "Return statements may only appear in function bodies")
 
                );
 
            }
 
        } else {
 
            // If here then we are within a function
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::Return(id);
 
@@ -426,21 +426,21 @@ impl Visitor2 for ValidityAndLinkerVisitor {
 
            if self.def_type.is_function() {
 
                // TODO: We probably want to allow this. Mark the function as
 
                //  using asserts, and then only allow calls to these functions
 
                //  within components. Such a marker will cascade through any
 
                //  functions that then call an asserting function
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, stmt.position, "Illegal assert statement in a function")
 
                    ParseError::new_error(&ctx.module.source, stmt.position, "Illegal assert statement in a function")
 
                );
 
            }
 

	
 
            // We are in a component of some sort, but we also need to be within a
 
            // synchronous statement
 
            if self.in_sync.is_none() {
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, stmt.position, "Illegal assert statement outside of a synchronous block")
 
                    ParseError::new_error(&ctx.module.source, stmt.position, "Illegal assert statement outside of a synchronous block")
 
                );
 
            }
 
        } else {
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            let expr_id = stmt.expression;
 

	
 
@@ -466,13 +466,13 @@ impl Visitor2 for ValidityAndLinkerVisitor {
 
                // nested sync statements are not allowed so if the value does
 
                // not match, then we must be inside a sync scope
 
                debug_assert!(self.in_sync.is_some());
 
                let goto_stmt = &ctx.heap[id];
 
                let sync_stmt = &ctx.heap[self.in_sync.unwrap()];
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, goto_stmt.position, "Goto may not escape the surrounding synchronous block")
 
                    ParseError::new_error(&ctx.module.source, goto_stmt.position, "Goto may not escape the surrounding synchronous block")
 
                        .with_postfixed_info(&ctx.module.source, target.position, "This is the target of the goto statement")
 
                        .with_postfixed_info(&ctx.module.source, sync_stmt.position, "Which will jump past this statement")
 
                );
 
            }
 
        }
 

	
 
@@ -485,24 +485,24 @@ impl Visitor2 for ValidityAndLinkerVisitor {
 
            // TODO: Cleanup error messages, can be done cleaner
 
            // Make sure new statement occurs within a composite component
 
            let call_expr_id = ctx.heap[id].expression;
 
            if !self.def_type.is_composite() {
 
                let new_stmt = &ctx.heap[id];
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, new_stmt.position, "Instantiating components may only be done in composite components")
 
                    ParseError::new_error(&ctx.module.source, new_stmt.position, "Instantiating components may only be done in composite components")
 
                );
 
            }
 

	
 
            // We make sure that we point to a symbolic method. Checking that it
 
            // points to a component is done in the depth pass.
 
            let call_expr = &ctx.heap[call_expr_id];
 
            if let Method::Symbolic(_) = &call_expr.method {
 
                // We're fine
 
            } else {
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, call_expr.position, "Must instantiate a component")
 
                    ParseError::new_error(&ctx.module.source, call_expr.position, "Must instantiate a component")
 
                );
 
            }
 
        } else {
 
            // Just call `visit_call_expr`. We do some extra work we don't have
 
            // to, but this prevents silly mistakes.
 
            let call_expr_id = ctx.heap[id].expression;
 
@@ -735,24 +735,24 @@ impl Visitor2 for ValidityAndLinkerVisitor {
 
                            break;
 
                        }
 
                    }
 

	
 
                    // Check if not found
 
                    if field.field_idx == FIELD_NOT_FOUND_SENTINEL {
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, field.identifier.position,
 
                            &format!(
 
                                "This field does not exist on the struct '{}'",
 
                                &String::from_utf8_lossy(&literal.identifier.value),
 
                            )
 
                        ));
 
                    }
 

	
 
                    // Check if specified more than once
 
                    if specified[field.field_idx] {
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, field.identifier.position,
 
                            "This field is specified more than once"
 
                        ));
 
                    }
 

	
 
                    specified[field.field_idx] = true;
 
@@ -766,13 +766,13 @@ impl Visitor2 for ValidityAndLinkerVisitor {
 
                            if !not_specified.is_empty() { not_specified.push_str(", ") }
 
                            let field_ident = &definition.fields[def_field_idx].identifier;
 
                            not_specified.push_str(&String::from_utf8_lossy(&field_ident.value));
 
                        }
 
                    }
 

	
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, literal.identifier.position,
 
                        &format!("Not all fields are specified, [{}] are missing", not_specified)
 
                    ));
 
                }
 

	
 
                // Need to traverse fields expressions in struct and evaluate
 
@@ -812,49 +812,49 @@ impl Visitor2 for ValidityAndLinkerVisitor {
 
        match &mut call_expr.method {
 
            Method::Create => {
 
                num_definition_args = 1;
 
            },
 
            Method::Fires => {
 
                if !self.def_type.is_primitive() {
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'fires' may only occur in primitive component definitions"
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'fires' may only occur inside synchronous blocks"
 
                    ));
 
                }
 
                num_definition_args = 1;
 
            },
 
            Method::Get => {
 
                if !self.def_type.is_primitive() {
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'get' may only occur in primitive component definitions"
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'get' may only occur inside synchronous blocks"
 
                    ));
 
                }
 
                num_definition_args = 1;
 
            },
 
            Method::Put => {
 
                if !self.def_type.is_primitive() {
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'put' may only occur in primitive component definitions"
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'put' may only occur inside synchronous blocks"
 
                    ));
 
                }
 
                num_definition_args = 2;
 
            }
 
@@ -888,13 +888,13 @@ impl Visitor2 for ValidityAndLinkerVisitor {
 

	
 
        // Check the poly args and the number of variables in the call
 
        // expression
 
        self.visit_call_poly_args(ctx, id)?;
 
        let call_expr = &mut ctx.heap[id];
 
        if num_expr_args != num_definition_args {
 
            return Err(ParseError2::new_error(
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, call_expr.position,
 
                &format!(
 
                    "This call expects {} arguments, but {} were provided",
 
                    num_definition_args, num_expr_args
 
                )
 
            ));
 
@@ -1076,13 +1076,13 @@ impl ValidityAndLinkerVisitor {
 
                    let mut symbolic_variant = None;
 
                    for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
                        if symbolic.identifier.matches_identifier(poly_var) {
 
                            // Type refers to a polymorphic variable.
 
                            // TODO: @hkt Maybe allow higher-kinded types?
 
                            if symbolic.identifier.get_poly_args().is_some() {
 
                                return Err(ParseError2::new_error(
 
                                return Err(ParseError::new_error(
 
                                    &ctx.module.source, symbolic.identifier.position,
 
                                    "Polymorphic arguments to a polymorphic variable (higher-kinded types) are not allowed (yet)"
 
                                ));
 
                            }
 
                            symbolic_variant = Some(SymbolicParserTypeVariant::PolyArg(definition_id, poly_var_idx));
 
                        }
 
@@ -1097,13 +1097,13 @@ impl ValidityAndLinkerVisitor {
 
                            &ctx.symbols, &ctx.types, ctx.module.root_id, &symbolic.identifier
 
                        ).as_parse_error(&ctx.module.source)?;
 

	
 
                        // TODO: @function_ptrs: Allow function pointers at some
 
                        //  point in the future
 
                        if found_type.definition.type_class().is_proc_type() {
 
                            return Err(ParseError2::new_error(
 
                            return Err(ParseError::new_error(
 
                                &ctx.module.source, symbolic.identifier.position,
 
                                &format!(
 
                                    "This identifier points to a {} type, expected a datatype",
 
                                    found_type.definition.type_class()
 
                                )
 
                            ));
 
@@ -1163,21 +1163,21 @@ impl ValidityAndLinkerVisitor {
 
    //--------------------------------------------------------------------------
 
    // Utilities
 
    //--------------------------------------------------------------------------
 

	
 
    /// Adds a local variable to the current scope. It will also annotate the
 
    /// `Local` in the AST with its relative position in the block.
 
    fn checked_local_add(&mut self, ctx: &mut Ctx, relative_pos: u32, id: LocalId) -> Result<(), ParseError2> {
 
    fn checked_local_add(&mut self, ctx: &mut Ctx, relative_pos: u32, id: LocalId) -> Result<(), ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 

	
 
        // Make sure we do not conflict with any global symbols
 
        {
 
            let ident = &ctx.heap[id].identifier;
 
            if let Some(symbol) = ctx.symbols.resolve_symbol(ctx.module.root_id, &ident.value) {
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, ident.position, "Local variable declaration conflicts with symbol")
 
                    ParseError::new_error(&ctx.module.source, ident.position, "Local variable declaration conflicts with symbol")
 
                        .with_postfixed_info(&ctx.module.source, symbol.position, "Conflicting symbol is found here")
 
                );
 
            }
 
        }
 

	
 
        let local = &mut ctx.heap[id];
 
@@ -1199,13 +1199,13 @@ impl ValidityAndLinkerVisitor {
 
                // in which the current variable resides.
 
                if local.this != *other_local_id &&
 
                    local_relative_pos >= other_local.relative_pos_in_block &&
 
                    local.identifier == other_local.identifier {
 
                    // Collision within this scope
 
                    return Err(
 
                        ParseError2::new_error(&ctx.module.source, local.position, "Local variable name conflicts with another variable")
 
                        ParseError::new_error(&ctx.module.source, local.position, "Local variable name conflicts with another variable")
 
                            .with_postfixed_info(&ctx.module.source, other_local.position, "Previous variable is found here")
 
                    );
 
                }
 
            }
 

	
 
            // Current scope is fine, move to parent scope if any
 
@@ -1214,13 +1214,13 @@ impl ValidityAndLinkerVisitor {
 
            if let Scope::Definition(definition_id) = scope {
 
                // At outer scope, check parameters of function/component
 
                for parameter_id in ctx.heap[*definition_id].parameters() {
 
                    let parameter = &ctx.heap[*parameter_id];
 
                    if local.identifier == parameter.identifier {
 
                        return Err(
 
                            ParseError2::new_error(&ctx.module.source, local.position, "Local variable name conflicts with parameter")
 
                            ParseError::new_error(&ctx.module.source, local.position, "Local variable name conflicts with parameter")
 
                                .with_postfixed_info(&ctx.module.source, parameter.position, "Parameter definition is found here")
 
                        );
 
                    }
 
                }
 

	
 
                break;
 
@@ -1236,13 +1236,13 @@ impl ValidityAndLinkerVisitor {
 

	
 
        Ok(())
 
    }
 

	
 
    /// Finds a variable in the visitor's scope that must appear before the
 
    /// specified relative position within that block.
 
    fn find_variable(&self, ctx: &Ctx, mut relative_pos: u32, identifier: &NamespacedIdentifier) -> Result<VariableId, ParseError2> {
 
    fn find_variable(&self, ctx: &Ctx, mut relative_pos: u32, identifier: &NamespacedIdentifier) -> Result<VariableId, ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 
        debug_assert!(identifier.parts.len() == 1, "implement namespaced seeking of target associated with identifier");
 

	
 
        // TODO: May still refer to an alias of a global symbol using a single
 
        //  identifier in the namespace.
 
        // No need to use iterator over namespaces if here
 
@@ -1275,24 +1275,24 @@ impl ValidityAndLinkerVisitor {
 
                        }
 
                    },
 
                    _ => unreachable!(),
 
                }
 

	
 
                // Variable could not be found
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    &ctx.module.source, identifier.position, "This variable is not declared"
 
                ));
 
            } else {
 
                relative_pos = block.relative_pos_in_parent;
 
            }
 
        }
 
    }
 

	
 
    /// Adds a particular label to the current scope. Will return an error if
 
    /// there is another label with the same name visible in the current scope.
 
    fn checked_label_add(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> Result<(), ParseError2> {
 
    fn checked_label_add(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> Result<(), ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 

	
 
        // Make sure label is not defined within the current scope or any of the
 
        // parent scope.
 
        let label = &ctx.heap[id];
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
@@ -1302,13 +1302,13 @@ impl ValidityAndLinkerVisitor {
 
            let block = &ctx.heap[scope.to_block()];
 
            for other_label_id in &block.labels {
 
                let other_label = &ctx.heap[*other_label_id];
 
                if other_label.label == label.label {
 
                    // Collision
 
                    return Err(
 
                        ParseError2::new_error(&ctx.module.source, label.position, "Label name conflicts with another label")
 
                        ParseError::new_error(&ctx.module.source, label.position, "Label name conflicts with another label")
 
                            .with_postfixed_info(&ctx.module.source, other_label.position, "Other label is found here")
 
                    );
 
                }
 
            }
 

	
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
@@ -1325,13 +1325,13 @@ impl ValidityAndLinkerVisitor {
 
        Ok(())
 
    }
 

	
 
    /// Finds a particular labeled statement by its identifier. Once found it
 
    /// will make sure that the target label does not skip over any variable
 
    /// declarations within the scope in which the label was found.
 
    fn find_label(&self, ctx: &Ctx, identifier: &Identifier) -> Result<LabeledStatementId, ParseError2> {
 
    fn find_label(&self, ctx: &Ctx, identifier: &Identifier) -> Result<LabeledStatementId, ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 

	
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        loop {
 
            debug_assert!(scope.is_block(), "scope is not a block");
 
            let relative_scope_pos = ctx.heap[scope.to_block()].relative_pos_in_parent;
 
@@ -1345,46 +1345,46 @@ impl ValidityAndLinkerVisitor {
 
                        //  is legal to skip over a variable declaration if it
 
                        //  is not actually being used. I might be missing
 
                        //  something here when laying out the bytecode...
 
                        let local = &ctx.heap[*local_id];
 
                        if local.relative_pos_in_block > relative_scope_pos && local.relative_pos_in_block < label.relative_pos_in_block {
 
                            return Err(
 
                                ParseError2::new_error(&ctx.module.source, identifier.position, "This target label skips over a variable declaration")
 
                                ParseError::new_error(&ctx.module.source, identifier.position, "This target label skips over a variable declaration")
 
                                    .with_postfixed_info(&ctx.module.source, label.position, "Because it jumps to this label")
 
                                    .with_postfixed_info(&ctx.module.source, local.position, "Which skips over this variable")
 
                            );
 
                        }
 
                    }
 
                    return Ok(*label_id);
 
                }
 
            }
 

	
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                return Err(ParseError2::new_error(&ctx.module.source, identifier.position, "Could not find this label"));
 
                return Err(ParseError::new_error(&ctx.module.source, identifier.position, "Could not find this label"));
 
            }
 

	
 
        }
 
    }
 

	
 
    /// Finds a particular symbol in the symbol table which must correspond to
 
    /// a definition of a particular type.
 
    // Note: root_id, symbols and types passed in explicitly to prevent
 
    //  borrowing errors
 
    fn find_symbol_of_type<'a>(
 
        &self, source: &InputSource, root_id: RootId, symbols: &SymbolTable, types: &'a TypeTable,
 
        identifier: &NamespacedIdentifier, expected_type_class: TypeClass
 
    ) -> Result<&'a DefinedType, ParseError2> {
 
    ) -> Result<&'a DefinedType, ParseError> {
 
        // Find symbol associated with identifier
 
        let (find_result, _) = find_type_definition(symbols, types, root_id, identifier)
 
            .as_parse_error(source)?;
 

	
 
        let definition_type_class = find_result.definition.type_class();
 
        if expected_type_class != definition_type_class {
 
            return Err(ParseError2::new_error(
 
            return Err(ParseError::new_error(
 
                source, identifier.position,
 
                &format!(
 
                    "Expected to find a {}, this symbol points to a {}",
 
                    expected_type_class, definition_type_class
 
                )
 
            ))
 
@@ -1418,41 +1418,41 @@ impl ValidityAndLinkerVisitor {
 
    /// This function should be called while dealing with break/continue
 
    /// statements. It will try to find the targeted while statement, using the
 
    /// target label if provided. If a valid target is found then the loop's
 
    /// ID will be returned, otherwise a parsing error is constructed.
 
    /// The provided input position should be the position of the break/continue
 
    /// statement.
 
    fn resolve_break_or_continue_target(&self, ctx: &Ctx, position: InputPosition, label: &Option<Identifier>) -> Result<WhileStatementId, ParseError2> {
 
    fn resolve_break_or_continue_target(&self, ctx: &Ctx, position: InputPosition, label: &Option<Identifier>) -> Result<WhileStatementId, ParseError> {
 
        let target = match label {
 
            Some(label) => {
 
                let target_id = self.find_label(ctx, label)?;
 

	
 
                // Make sure break target is a while statement
 
                let target = &ctx.heap[target_id];
 
                if let Statement::While(target_stmt) = &ctx.heap[target.body] {
 
                    // Even though we have a target while statement, the break might not be
 
                    // present underneath this particular labeled while statement
 
                    if !self.has_parent_while_scope(ctx, target_stmt.this) {
 
                        ParseError2::new_error(&ctx.module.source, label.position, "Break statement is not nested under the target label's while statement")
 
                        ParseError::new_error(&ctx.module.source, label.position, "Break statement is not nested under the target label's while statement")
 
                            .with_postfixed_info(&ctx.module.source, target.position, "The targeted label is found here");
 
                    }
 

	
 
                    target_stmt.this
 
                } else {
 
                    return Err(
 
                        ParseError2::new_error(&ctx.module.source, label.position, "Incorrect break target label, it must target a while loop")
 
                        ParseError::new_error(&ctx.module.source, label.position, "Incorrect break target label, it must target a while loop")
 
                            .with_postfixed_info(&ctx.module.source, target.position, "The targeted label is found here")
 
                    );
 
                }
 
            },
 
            None => {
 
                // Use the enclosing while statement, the break must be
 
                // nested within that while statement
 
                if self.in_while.is_none() {
 
                    return Err(
 
                        ParseError2::new_error(&ctx.module.source, position, "Break statement is not nested under a while loop")
 
                        ParseError::new_error(&ctx.module.source, position, "Break statement is not nested under a while loop")
 
                    );
 
                }
 

	
 
                self.in_while.unwrap()
 
            }
 
        };
 
@@ -1464,13 +1464,13 @@ impl ValidityAndLinkerVisitor {
 
            if target_while.in_sync != self.in_sync {
 
                // Break is nested under while statement, so can only escape a
 
                // sync block if the sync is nested inside the while statement.
 
                debug_assert!(self.in_sync.is_some());
 
                let sync_stmt = &ctx.heap[self.in_sync.unwrap()];
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, position, "Break may not escape the surrounding synchronous block")
 
                    ParseError::new_error(&ctx.module.source, position, "Break may not escape the surrounding synchronous block")
 
                        .with_postfixed_info(&ctx.module.source, target_while.position, "The break escapes out of this loop")
 
                        .with_postfixed_info(&ctx.module.source, sync_stmt.position, "And would therefore escape this synchronous block")
 
                );
 
            }
 
        }
 

	
 
@@ -1550,13 +1550,13 @@ impl ValidityAndLinkerVisitor {
 
                let parser_type_id = self.parser_type_buffer.pop().unwrap();
 
                self.visit_parser_type(ctx, parser_type_id)?;
 
            }
 
            self.parser_type_buffer.truncate(old_num_types);
 
            Ok(())
 
        } else {
 
            return Err(ParseError2::new_error(
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, call_expr.position,
 
                &format!(
 
                    "Expected {} polymorphic arguments (or none, to infer them), but {} were specified",
 
                    num_expected_poly_args, call_expr.poly_args.len()
 
                )
 
            ));
src/protocol/tests/utils.rs
Show inline comments
 
@@ -548,17 +548,17 @@ impl<'a> ExpressionTester<'a> {
 
//------------------------------------------------------------------------------
 
// Interface for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct AstErrTester {
 
    test_name: String,
 
    error: ParseError2,
 
    error: ParseError,
 
}
 

	
 
impl AstErrTester {
 
    fn new(test_name: String, error: ParseError2) -> Self {
 
    fn new(test_name: String, error: ParseError) -> Self {
 
        Self{ test_name, error }
 
    }
 

	
 
    pub(crate) fn error<F: Fn(ErrorTester)>(&self, f: F) {
 
        // Maybe multiple errors will be supported in the future
 
        let tester = ErrorTester{ test_name: &self.test_name, error: &self.error };
 
@@ -569,13 +569,13 @@ impl AstErrTester {
 
//------------------------------------------------------------------------------
 
// Utilities for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct ErrorTester<'a> {
 
    test_name: &'a str,
 
    error: &'a ParseError2,
 
    error: &'a ParseError,
 
}
 

	
 
impl<'a> ErrorTester<'a> {
 
    pub(crate) fn assert_num(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.error.statements.len(),
0 comments (0 inline, 0 general)