Changeset - c87205ed6292
[Not reviewed]
0 10 0
MH - 4 years ago 2021-04-02 14:59:36
contact@maxhenger.nl
cleanup consume_type function name and rename ParseError
8 files changed:
0 comments (0 inline, 0 general)
src/protocol/inputsource.rs
Show inline comments
 
@@ -66,51 +66,48 @@ impl InputSource {
 
                let mut f = File::open(path)?;
 
                InputSource::new(filename.to_string_lossy(), &mut f)
 
            }
 
            None => Err(io::Error::new(io::ErrorKind::NotFound, "Invalid path")),
 
        }
 
    }
 
    pub fn from_string(string: &str) -> io::Result<InputSource> {
 
        let buffer = Box::new(string);
 
        let mut bytes = buffer.as_bytes();
 
        InputSource::new(String::new(), &mut bytes)
 
    }
 
    pub fn from_buffer(buffer: &[u8]) -> io::Result<InputSource> {
 
        InputSource::new(String::new(), &mut Box::new(buffer))
 
    }
 
    // Internal methods
 
    pub fn pos(&self) -> InputPosition {
 
        InputPosition { line: self.line, column: self.column, offset: self.offset }
 
    }
 
    pub fn seek(&mut self, pos: InputPosition) {
 
        debug_assert!(pos.offset < self.input.len());
 
        self.line = pos.line;
 
        self.column = pos.column;
 
        self.offset = pos.offset;
 
    }
 
    // pub fn error<S: ToString>(&self, message: S) -> ParseError {
 
    //     self.pos().parse_error(message)
 
    // }
 
    pub fn is_eof(&self) -> bool {
 
        self.next() == None
 
    }
 

	
 
    pub fn next(&self) -> Option<u8> {
 
        if self.offset < self.input.len() {
 
            Some(self.input[self.offset])
 
        } else {
 
            None
 
        }
 
    }
 

	
 
    pub fn lookahead(&self, pos: usize) -> Option<u8> {
 
        let offset_pos = self.offset + pos;
 
        if offset_pos < self.input.len() {
 
            Some(self.input[offset_pos])
 
        } else {
 
            None
 
        }
 
    }
 

	
 
    pub fn has(&self, to_compare: &[u8]) -> bool {
 
        if self.offset + to_compare.len() <= self.input.len() {
 
            for idx in 0..to_compare.len() {
 
@@ -146,51 +143,48 @@ impl fmt::Display for InputSource {
 
        self.pos().fmt(f)
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
 
pub struct InputPosition {
 
    line: usize,
 
    column: usize,
 
    pub(crate) offset: usize,
 
}
 

	
 
impl InputPosition {
 
    fn context<'a>(&self, source: &'a InputSource) -> &'a [u8] {
 
        let start = self.offset - (self.column - 1);
 
        let mut end = self.offset;
 
        while end < source.input.len() {
 
            let cur = (*source.input)[end];
 
            if cur == b'\n' || cur == b'\r' {
 
                break;
 
            }
 
            end += 1;
 
        }
 
        &source.input[start..end]
 
    }
 
    // fn parse_error<S: ToString>(&self, message: S) -> ParseError {
 
    //     ParseError { position: *self, message: message.to_string(), backtrace: Backtrace::new() }
 
    // }
 
    fn eval_error<S: ToString>(&self, message: S) -> EvalError {
 
        EvalError { position: *self, message: message.to_string(), backtrace: Backtrace::new() }
 
    }
 

	
 
    pub(crate) fn col(&self) -> usize { self.column }
 
}
 

	
 
impl Default for InputPosition {
 
    fn default() -> Self {
 
        Self{ line: 1, column: 1, offset: 0 }
 
    }
 
}
 

	
 
impl fmt::Display for InputPosition {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}:{}", self.line, self.column)
 
    }
 
}
 

	
 
pub trait SyntaxElement {
 
    fn position(&self) -> InputPosition;
 
    fn error<S: ToString>(&self, message: S) -> EvalError {
 
        self.position().eval_error(message)
 
    }
 
@@ -257,69 +251,69 @@ impl fmt::Display for ParseErrorStatement {
 

	
 
        // Write underline indicating where the error ocurred
 
        debug_assert!(self.position.column <= self.context.chars().count());
 
        let mut arrow = String::with_capacity(self.context.len() + 3);
 
        arrow.push_str(" | ");
 
        let mut char_col = 1;
 
        for char in self.context.chars() {
 
            if char_col == self.position.column { break; }
 
            if char == '\t' {
 
                arrow.push('\t');
 
            } else {
 
                arrow.push(' ');
 
            }
 

	
 
            char_col += 1;
 
        }
 
        arrow.push('^');
 
        writeln!(f, "{}", arrow)?;
 

	
 
        Ok(())
 
    }
 
}
 

	
 
#[derive(Debug)]
 
pub struct ParseError2 {
 
pub struct ParseError {
 
    pub(crate) statements: Vec<ParseErrorStatement>
 
}
 

	
 
impl fmt::Display for ParseError2 {
 
impl fmt::Display for ParseError {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        if self.statements.is_empty() {
 
            return Ok(())
 
        }
 

	
 
        self.statements[0].fmt(f)?;
 
        for statement in self.statements.iter().skip(1) {
 
            writeln!(f)?;
 
            statement.fmt(f)?;
 
        }
 

	
 
        Ok(())
 
    }
 
}
 

	
 
impl ParseError2 {
 
impl ParseError {
 
    pub fn empty() -> Self {
 
        Self{ statements: Vec::new() }
 
    }
 

	
 
    pub fn new_error(source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        Self{ statements: vec!(ParseErrorStatement::from_source(ParseErrorType::Error, source, position, msg))}
 
    }
 

	
 
    pub fn with_prefixed(mut self, error_type: ParseErrorType, source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        self.statements.insert(0, ParseErrorStatement::from_source(error_type, source, position, msg));
 
        self
 
    }
 

	
 
    pub fn with_postfixed(mut self, error_type: ParseErrorType, source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        self.statements.push(ParseErrorStatement::from_source(error_type, source, position, msg));
 
        self
 
    }
 

	
 
    pub fn with_postfixed_info(self, source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        self.with_postfixed(ParseErrorType::Info, source, position, msg)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
src/protocol/lexer.rs
Show inline comments
 
@@ -95,71 +95,71 @@ fn lowercase(x: u8) -> u8 {
 
}
 

	
 
fn identifier_as_namespaced(identifier: Identifier) -> NamespacedIdentifier {
 
    let identifier_len = identifier.value.len();
 
    debug_assert!(identifier_len < u16::max_value() as usize);
 
    NamespacedIdentifier{
 
        position: identifier.position,
 
        value: identifier.value,
 
        poly_args: Vec::new(),
 
        parts: vec![
 
            NamespacedIdentifierPart::Identifier{start: 0, end: identifier_len as u16}
 
        ],
 
    }
 
}
 

	
 
pub struct Lexer<'a> {
 
    source: &'a mut InputSource,
 
    level: usize,
 
}
 

	
 
impl Lexer<'_> {
 
    pub fn new(source: &mut InputSource) -> Lexer {
 
        Lexer { source, level: 0 }
 
    }
 
    fn error_at_pos(&self, msg: &str) -> ParseError2 {
 
        ParseError2::new_error(self.source, self.source.pos(), msg)
 
    fn error_at_pos(&self, msg: &str) -> ParseError {
 
        ParseError::new_error(self.source, self.source.pos(), msg)
 
    }
 
    fn consume_line(&mut self) -> Result<Vec<u8>, ParseError2> {
 
    fn consume_line(&mut self) -> Result<Vec<u8>, ParseError> {
 
        let mut result: Vec<u8> = Vec::new();
 
        let mut next = self.source.next();
 
        while next.is_some() && next != Some(b'\n') && next != Some(b'\r') {
 
            if !(is_vchar(next) || is_wsp(next)) {
 
                return Err(self.error_at_pos("Expected visible character or whitespace"));
 
            }
 
            result.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 
        if next.is_some() {
 
            self.source.consume();
 
        }
 
        if next == Some(b'\r') && self.source.next() == Some(b'\n') {
 
            self.source.consume();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_whitespace(&mut self, expected: bool) -> Result<(), ParseError2> {
 
    fn consume_whitespace(&mut self, expected: bool) -> Result<(), ParseError> {
 
        let mut found = false;
 
        let mut next = self.source.next();
 
        while next.is_some() {
 
            if next == Some(b' ')
 
                || next == Some(b'\t')
 
                || next == Some(b'\r')
 
                || next == Some(b'\n')
 
            {
 
                self.source.consume();
 
                next = self.source.next();
 
                found = true;
 
                continue;
 
            }
 
            if next == Some(b'/') {
 
                next = self.source.lookahead(1);
 
                if next == Some(b'/') {
 
                    self.source.consume(); // slash
 
                    self.source.consume(); // slash
 
                    self.consume_line()?;
 
                    next = self.source.next();
 
                    found = true;
 
                    continue;
 
                }
 
                if next == Some(b'*') {
 
@@ -187,105 +187,105 @@ impl Lexer<'_> {
 
        }
 
        if expected && !found {
 
            Err(self.error_at_pos("Expected whitespace"))
 
        } else {
 
            Ok(())
 
        }
 
    }
 
    fn consume_any_chars(&mut self) {
 
        if !is_ident_start(self.source.next()) { return }
 
        self.source.consume();
 
        while is_ident_rest(self.source.next()) {
 
            self.source.consume()
 
        }
 
    }
 
    fn has_keyword(&self, keyword: &[u8]) -> bool {
 
        if !self.source.has(keyword) {
 
            return false;
 
        }
 

	
 
        // Word boundary
 
        let next = self.source.lookahead(keyword.len());
 
        if next.is_none() { return true; }
 
        return !is_ident_rest(next);
 
    }
 
    fn consume_keyword(&mut self, keyword: &[u8]) -> Result<(), ParseError2> {
 
    fn consume_keyword(&mut self, keyword: &[u8]) -> Result<(), ParseError> {
 
        let len = keyword.len();
 
        for i in 0..len {
 
            let expected = Some(lowercase(keyword[i]));
 
            let next = self.source.next();
 
            if next != expected {
 
                return Err(self.error_at_pos(&format!("Expected keyword '{}'", String::from_utf8_lossy(keyword))));
 
            }
 
            self.source.consume();
 
        }
 
        if let Some(next) = self.source.next() {
 
            if next >= b'A' && next <= b'Z' || next >= b'a' && next <= b'z' || next >= b'0' && next <= b'9' {
 
                return Err(self.error_at_pos(&format!("Expected word boundary after '{}'", String::from_utf8_lossy(keyword))));
 
            }
 
        }
 
        Ok(())
 
    }
 
    fn has_string(&self, string: &[u8]) -> bool {
 
        self.source.has(string)
 
    }
 
    fn consume_string(&mut self, string: &[u8]) -> Result<(), ParseError2> {
 
    fn consume_string(&mut self, string: &[u8]) -> Result<(), ParseError> {
 
        let len = string.len();
 
        for i in 0..len {
 
            let expected = Some(string[i]);
 
            let next = self.source.next();
 
            if next != expected {
 
                return Err(self.error_at_pos(&format!("Expected {}", String::from_utf8_lossy(string))));
 
            }
 
            self.source.consume();
 
        }
 
        Ok(())
 
    }
 
    /// Generic comma-separated consumer. If opening delimiter is not found then
 
    /// `Ok(None)` will be returned. Otherwise will consume the comma separated
 
    /// values, allowing a trailing comma. If no comma is found and the closing
 
    /// delimiter is not found, then a parse error with `expected_end_msg` is
 
    /// returned.
 
    fn consume_comma_separated<T, F>(
 
        &mut self, h: &mut Heap, open: u8, close: u8, expected_end_msg: &str, func: F
 
    ) -> Result<Option<Vec<T>>, ParseError2>
 
        where F: Fn(&mut Lexer, &mut Heap) -> Result<T, ParseError2>
 
    ) -> Result<Option<Vec<T>>, ParseError>
 
        where F: Fn(&mut Lexer, &mut Heap) -> Result<T, ParseError>
 
    {
 
        if Some(open) != self.source.next() {
 
            return Ok(None)
 
        }
 

	
 
        self.source.consume();
 
        self.consume_whitespace(false)?;
 
        let mut elements = Vec::new();
 
        let mut had_comma = true;
 

	
 
        loop {
 
            if Some(close) == self.source.next() {
 
                self.source.consume();
 
                break;
 
            } else if !had_comma {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    &self.source, self.source.pos(), expected_end_msg
 
                ));
 
            }
 

	
 
            elements.push(func(self, h)?);
 
            self.consume_whitespace(false)?;
 

	
 
            had_comma = self.source.next() == Some(b',');
 
            if had_comma {
 
                self.source.consume();
 
                self.consume_whitespace(false)?;
 
            }
 
        }
 

	
 
        Ok(Some(elements))
 
    }
 
    /// Essentially the same as `consume_comma_separated`, but will not allocate
 
    /// memory. Will return `Ok(true)` and leave the input position at the end
 
    /// the comma-separated list if well formed and `Ok(false)` if the list is
 
    /// not present. Otherwise returns `Err(())` and leaves the input position 
 
    /// at a "random" position.
 
    fn consume_comma_separated_spilled_without_pos_recovery<F: Fn(&mut Lexer) -> bool>(
 
        &mut self, open: u8, close: u8, func: F
 
    ) -> Result<bool, ()> {
 
@@ -293,101 +293,101 @@ impl Lexer<'_> {
 
            return Ok(false);
 
        }
 

	
 
        self.source.consume();
 
        if self.consume_whitespace(false).is_err() { return Err(()) };
 
        let mut had_comma = true;
 
        loop {
 
            if Some(close) == self.source.next() {
 
                self.source.consume();
 
                return Ok(true);
 
            } else if !had_comma {
 
                return Err(());
 
            }
 

	
 
            if !func(self) { return Err(()); }
 
            if self.consume_whitespace(false).is_err() { return Err(()) };
 

	
 
            had_comma = self.source.next() == Some(b',');
 
            if had_comma {
 
                self.source.consume();
 
                if self.consume_whitespace(false).is_err() { return Err(()); }
 
            }
 
        }
 
    }
 
    fn consume_ident(&mut self) -> Result<Vec<u8>, ParseError2> {
 
    fn consume_ident(&mut self) -> Result<Vec<u8>, ParseError> {
 
        if !self.has_identifier() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let mut result = Vec::new();
 
        let mut next = self.source.next();
 
        result.push(next.unwrap());
 
        self.source.consume();
 
        next = self.source.next();
 
        while is_ident_rest(next) {
 
            result.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 
        Ok(result)
 
    }
 
    fn has_integer(&mut self) -> bool {
 
        is_integer_start(self.source.next())
 
    }
 
    fn consume_integer(&mut self) -> Result<i64, ParseError2> {
 
    fn consume_integer(&mut self) -> Result<i64, ParseError> {
 
        let position = self.source.pos();
 
        let mut data = Vec::new();
 
        let mut next = self.source.next();
 
        while is_integer_rest(next) {
 
            data.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 

	
 
        let data_len = data.len();
 
        debug_assert_ne!(data_len, 0);
 
        if data_len == 1 {
 
            debug_assert!(data[0] >= b'0' && data[0] <= b'9');
 
            return Ok((data[0] - b'0') as i64);
 
        } else {
 
            // TODO: Fix, u64 should be supported as well
 
            let parsed = if data[1] == b'b' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 2)
 
            } else if data[1] == b'o' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 8)
 
            } else if data[1] == b'x' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 16)
 
            } else {
 
                // Assume decimal
 
                let data = String::from_utf8_lossy(&data);
 
                i64::from_str_radix(&data, 10)
 
            };
 

	
 
            if let Err(_err) = parsed {
 
                return Err(ParseError2::new_error(&self.source, position, "Invalid integer constant"));
 
                return Err(ParseError::new_error(&self.source, position, "Invalid integer constant"));
 
            }
 

	
 
            Ok(parsed.unwrap())
 
        }
 
    }
 

	
 
    // Statement keywords
 
    // TODO: Clean up these functions
 
    fn has_statement_keyword(&self) -> bool {
 
        self.has_keyword(b"channel")
 
            || self.has_keyword(b"skip")
 
            || self.has_keyword(b"if")
 
            || self.has_keyword(b"while")
 
            || self.has_keyword(b"break")
 
            || self.has_keyword(b"continue")
 
            || self.has_keyword(b"synchronous")
 
            || self.has_keyword(b"return")
 
            || self.has_keyword(b"assert")
 
            || self.has_keyword(b"goto")
 
            || self.has_keyword(b"new")
 
    }
 
    fn has_type_keyword(&self) -> bool {
 
        self.has_keyword(b"in")
 
            || self.has_keyword(b"out")
 
@@ -405,79 +405,79 @@ impl Lexer<'_> {
 
            || self.has_keyword(b"create")
 
            || self.has_keyword(b"length")
 
    }
 
    fn has_reserved(&self) -> bool {
 
        self.has_statement_keyword()
 
            || self.has_type_keyword()
 
            || self.has_builtin_keyword()
 
            || self.has_keyword(b"let")
 
            || self.has_keyword(b"struct")
 
            || self.has_keyword(b"enum")
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
            || self.has_keyword(b"null")
 
    }
 

	
 
    // Identifiers
 

	
 
    fn has_identifier(&self) -> bool {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return false;
 
        }
 
        let next = self.source.next();
 
        is_ident_start(next)
 
    }
 
    fn consume_identifier(&mut self) -> Result<Identifier, ParseError2> {
 
    fn consume_identifier(&mut self) -> Result<Identifier, ParseError> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let position = self.source.pos();
 
        let value = self.consume_ident()?;
 
        Ok(Identifier{ position, value })
 
    }
 
    fn consume_identifier_spilled(&mut self) -> Result<(), ParseError2> {
 
    fn consume_identifier_spilled(&mut self) -> Result<(), ParseError> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        self.consume_ident()?;
 
        Ok(())
 
    }
 

	
 
    fn consume_namespaced_identifier(&mut self, h: &mut Heap) -> Result<NamespacedIdentifier, ParseError2> {
 
    fn consume_namespaced_identifier(&mut self, h: &mut Heap) -> Result<NamespacedIdentifier, ParseError> {
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        // Consumes a part of the namespaced identifier, returns a boolean
 
        // indicating whether polymorphic arguments were specified.
 
        // TODO: Continue here: if we fail to properly parse the polymorphic
 
        //  arguments, assume we have reached the end of the namespaced 
 
        //  identifier and are instead dealing with a less-than operator. Ugly?
 
        //  Yes. Needs tokenizer? Yes. 
 
        fn consume_part(
 
            l: &mut Lexer, h: &mut Heap, ident: &mut NamespacedIdentifier,
 
            backup_pos: &mut InputPosition
 
        ) -> Result<(), ParseError2> {
 
        ) -> Result<(), ParseError> {
 
            // Consume identifier
 
            if !ident.value.is_empty() {
 
                ident.value.extend(b"::");
 
            }
 
            let ident_start = ident.value.len();
 
            ident.value.extend(l.consume_ident()?);
 
            ident.parts.push(NamespacedIdentifierPart::Identifier{
 
                start: ident_start as u16,
 
                end: ident.value.len() as u16
 
            });
 

	
 
            // Maybe consume polymorphic args.
 
            *backup_pos = l.source.pos();
 
            l.consume_whitespace(false)?;
 
            match l.consume_polymorphic_args(h, true)? {
 
                Some(args) => {
 
                    let poly_start = ident.poly_args.len();
 
                    ident.poly_args.extend(args);
 

	
 
                    ident.parts.push(NamespacedIdentifierPart::PolyArgs{
 
                        start: poly_start as u16,
 
                        end: ident.poly_args.len() as u16,
 
                    });
 

	
 
@@ -493,216 +493,216 @@ impl Lexer<'_> {
 
            position: self.source.pos(),
 
            value: Vec::new(),
 
            poly_args: Vec::new(),
 
            parts: Vec::new(),
 
        };
 

	
 
        // Keep consume parts separted by "::". We don't consume the trailing
 
        // whitespace, hence we keep a backup position at the end of the last
 
        // valid part of the namespaced identifier (i.e. the last ident, or the
 
        // last encountered polymorphic arguments).
 
        let mut backup_pos = self.source.pos();
 
        consume_part(self, h, &mut ident, &mut backup_pos)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_whitespace(false)?;
 
            consume_part(self, h, &mut ident, &mut backup_pos)?;
 
            self.consume_whitespace(false)?;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(ident)
 
    }
 

	
 
    fn consume_namespaced_identifier_spilled(&mut self) -> Result<(), ParseError2> {
 
    fn consume_namespaced_identifier_spilled(&mut self) -> Result<(), ParseError> {
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        debug_log!("consume_nsident2_spilled: {}", debug_line!(self.source));
 

	
 
        fn consume_part_spilled(l: &mut Lexer, backup_pos: &mut InputPosition) -> Result<(), ParseError2> {
 
        fn consume_part_spilled(l: &mut Lexer, backup_pos: &mut InputPosition) -> Result<(), ParseError> {
 
            l.consume_ident()?;
 
            *backup_pos = l.source.pos();
 
            l.consume_whitespace(false)?;
 
            match l.maybe_consume_poly_args_spilled_without_pos_recovery() {
 
                Ok(true) => { *backup_pos = l.source.pos(); },
 
                Ok(false) => {},
 
                Err(_) => { return Err(l.error_at_pos("Failed to parse poly args (spilled)")) },
 
            }
 
            Ok(())
 
        }
 

	
 
        let mut backup_pos = self.source.pos();
 
        consume_part_spilled(self, &mut backup_pos)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_whitespace(false)?;
 
            consume_part_spilled(self, &mut backup_pos)?;
 
            self.consume_whitespace(false)?;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(())
 
    }
 

	
 
    // Types and type annotations
 

	
 
    /// Consumes a type definition. When called the input position should be at
 
    /// the type specification. When done the input position will be at the end
 
    /// of the type specifications (hence may be at whitespace).
 
    fn consume_type2(&mut self, h: &mut Heap, allow_inference: bool) -> Result<ParserTypeId, ParseError2> {
 
    fn consume_type(&mut self, h: &mut Heap, allow_inference: bool) -> Result<ParserTypeId, ParseError> {
 
        // Small helper function to convert in/out polymorphic arguments. Not
 
        // pretty, but return boolean is true if the error is due to inference
 
        // not being allowed
 
        let reduce_port_poly_args = |
 
            heap: &mut Heap,
 
            port_pos: &InputPosition,
 
            args: Vec<ParserTypeId>,
 
        | -> Result<ParserTypeId, bool> {
 
            match args.len() {
 
                0 => if allow_inference {  
 
                    Ok(heap.alloc_parser_type(|this| ParserType{
 
                        this,
 
                        pos: port_pos.clone(),
 
                        variant: ParserTypeVariant::Inferred
 
                    }))
 
                } else {
 
                    Err(true)
 
                },
 
                1 => Ok(args[0]),
 
                _ => Err(false)
 
            }
 
        };
 

	
 
        // Consume the type
 
        debug_log!("consume_type2: {}", debug_line!(self.source));
 
        debug_log!("consume_type: {}", debug_line!(self.source));
 
        let pos = self.source.pos();
 
        let parser_type_variant = if self.has_keyword(b"msg") {
 
            self.consume_keyword(b"msg")?;
 
            ParserTypeVariant::Message
 
        } else if self.has_keyword(b"boolean") {
 
            self.consume_keyword(b"boolean")?;
 
            ParserTypeVariant::Bool
 
        } else if self.has_keyword(b"byte") {
 
            self.consume_keyword(b"byte")?;
 
            ParserTypeVariant::Byte
 
        } else if self.has_keyword(b"short") {
 
            self.consume_keyword(b"short")?;
 
            ParserTypeVariant::Short
 
        } else if self.has_keyword(b"int") {
 
            self.consume_keyword(b"int")?;
 
            ParserTypeVariant::Int
 
        } else if self.has_keyword(b"long") {
 
            self.consume_keyword(b"long")?;
 
            ParserTypeVariant::Long
 
        } else if self.has_keyword(b"str") {
 
            self.consume_keyword(b"str")?;
 
            ParserTypeVariant::String
 
        } else if self.has_keyword(b"auto") {
 
            if !allow_inference {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                        &self.source, pos,
 
                        "Type inference is not allowed here"
 
                ));
 
            }
 

	
 
            self.consume_keyword(b"auto")?;
 
            ParserTypeVariant::Inferred
 
        } else if self.has_keyword(b"in") {
 
            // TODO: @cleanup: not particularly neat to have this special case
 
            //  where we enforce polyargs in the parser-phase
 
            self.consume_keyword(b"in")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?.unwrap_or_default();
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error|  {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'in' only allows for 1 polymorphic argument"
 
                    };
 
                    ParseError2::new_error(&self.source, pos, msg)
 
                    ParseError::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Input(poly_arg)
 
        } else if self.has_keyword(b"out") {
 
            self.consume_keyword(b"out")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?.unwrap_or_default();
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error| {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'out' only allows for 1 polymorphic argument, but {} were specified"
 
                    };
 
                    ParseError2::new_error(&self.source, pos, msg)
 
                    ParseError::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Output(poly_arg)
 
        } else {
 
            // Must be a symbolic type
 
            let identifier = self.consume_namespaced_identifier(h)?;
 
            ParserTypeVariant::Symbolic(SymbolicParserType{identifier, variant: None, poly_args2: Vec::new()})
 
        };
 

	
 
        // If the type was a basic type (not supporting polymorphic type
 
        // arguments), then we make sure the user did not specify any of them.
 
        let mut backup_pos = self.source.pos();
 
        if !parser_type_variant.supports_polymorphic_args() {
 
            self.consume_whitespace(false)?;
 
            if let Some(b'<') = self.source.next() {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    &self.source, self.source.pos(),
 
                    "This type does not allow polymorphic arguments"
 
                ));
 
            }
 

	
 
            self.source.seek(backup_pos);
 
        }
 

	
 
        let mut parser_type_id = h.alloc_parser_type(|this| ParserType{
 
            this, pos, variant: parser_type_variant
 
        });
 

	
 
        // If we're dealing with arrays, then we need to wrap the currently
 
        // parsed type in array types
 
        self.consume_whitespace(false)?;
 
        while let Some(b'[') = self.source.next() {
 
            let pos = self.source.pos();
 
            self.source.consume();
 
            self.consume_whitespace(false)?;
 
            if let Some(b']') = self.source.next() {
 
                // Type is wrapped in an array
 
                self.source.consume();
 
                parser_type_id = h.alloc_parser_type(|this| ParserType{
 
                    this, pos, variant: ParserTypeVariant::Array(parser_type_id)
 
                });
 
                backup_pos = self.source.pos();
 

	
 
                // In case we're dealing with another array
 
                self.consume_whitespace(false)?;
 
            } else {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    &self.source, pos,
 
                    "Expected a closing ']'"
 
                ));
 
            }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(parser_type_id)
 
    }
 

	
 
    /// Attempts to consume a type without returning it. If it doesn't encounter
 
    /// a well-formed type, then the input position is left at a "random"
 
    /// position.
 
    fn maybe_consume_type_spilled_without_pos_recovery(&mut self) -> bool {
 
        // Consume type identifier
 
        debug_log!("maybe_consume_type_spilled_...: {}", debug_line!(self.source));
 
        if self.has_type_keyword() {
 
            self.consume_any_chars();
 
        } else {
 
            let ident = self.consume_namespaced_identifier_spilled();
 
            if ident.is_err() { return false; }
 
        }
 

	
 
        // Consume any polymorphic arguments that follow the type identifier
 
@@ -736,549 +736,549 @@ impl Lexer<'_> {
 
        return true;
 
    }
 

	
 
    /// Attempts to consume polymorphic arguments without returning them. If it
 
    /// doesn't encounter well-formed polymorphic arguments, then the input
 
    /// position is left at a "random" position. Returns a boolean indicating if
 
    /// the poly_args list was present.
 
    fn maybe_consume_poly_args_spilled_without_pos_recovery(&mut self) -> Result<bool, ()> {
 
        debug_log!("maybe_consume_poly_args_spilled_...: {}", debug_line!(self.source));
 
        self.consume_comma_separated_spilled_without_pos_recovery(
 
            b'<', b'>', |lexer| {
 
                lexer.maybe_consume_type_spilled_without_pos_recovery()
 
            })
 
    }
 

	
 
    /// Consumes polymorphic arguments and its delimiters if specified. If
 
    /// polyargs are present then the args are consumed and the input position
 
    /// will be placed after the polyarg list. If polyargs are not present then
 
    /// the input position will remain unmodified and an empty vector will be
 
    /// returned.
 
    ///
 
    /// Polymorphic arguments represent the specification of the parametric
 
    /// types of a polymorphic type: they specify the value of the polymorphic
 
    /// type's polymorphic variables.
 
    fn consume_polymorphic_args(&mut self, h: &mut Heap, allow_inference: bool) -> Result<Option<Vec<ParserTypeId>>, ParseError2> {
 
    fn consume_polymorphic_args(&mut self, h: &mut Heap, allow_inference: bool) -> Result<Option<Vec<ParserTypeId>>, ParseError> {
 
        self.consume_comma_separated(
 
            h, b'<', b'>', "Expected the end of the polymorphic argument list",
 
            |lexer, heap| lexer.consume_type2(heap, allow_inference)
 
            |lexer, heap| lexer.consume_type(heap, allow_inference)
 
        )
 
    }
 

	
 
    /// Consumes polymorphic variables. These are identifiers that are used
 
    /// within polymorphic types. The input position may be at whitespace. If
 
    /// polymorphic variables are present then the whitespace, wrapping
 
    /// delimiters and the polymorphic variables are consumed. Otherwise the
 
    /// input position will stay where it is. If no polymorphic variables are
 
    /// present then an empty vector will be returned.
 
    fn consume_polymorphic_vars(&mut self, h: &mut Heap) -> Result<Vec<Identifier>, ParseError2> {
 
    fn consume_polymorphic_vars(&mut self, h: &mut Heap) -> Result<Vec<Identifier>, ParseError> {
 
        let backup_pos = self.source.pos();
 
        match self.consume_comma_separated(
 
            h, b'<', b'>', "Expected the end of the polymorphic variable list",
 
            |lexer, _heap| lexer.consume_identifier()
 
        )? {
 
            Some(poly_vars) => Ok(poly_vars),
 
            None => {
 
                self.source.seek(backup_pos);
 
                Ok(vec!())
 
            }
 
        }
 
    }
 

	
 
    // Parameters
 

	
 
    fn consume_parameter(&mut self, h: &mut Heap) -> Result<ParameterId, ParseError2> {
 
        let parser_type = self.consume_type2(h, false)?;
 
    fn consume_parameter(&mut self, h: &mut Heap) -> Result<ParameterId, ParseError> {
 
        let parser_type = self.consume_type(h, false)?;
 
        self.consume_whitespace(true)?;
 
        let position = self.source.pos();
 
        let identifier = self.consume_identifier()?;
 
        let id =
 
            h.alloc_parameter(|this| Parameter { this, position, parser_type, identifier });
 
        Ok(id)
 
    }
 
    fn consume_parameters(&mut self, h: &mut Heap) -> Result<Vec<ParameterId>, ParseError2> {
 
    fn consume_parameters(&mut self, h: &mut Heap) -> Result<Vec<ParameterId>, ParseError> {
 
        match self.consume_comma_separated(
 
            h, b'(', b')', "Expected the end of the parameter list",
 
            |lexer, heap| lexer.consume_parameter(heap)
 
        )? {
 
            Some(params) => Ok(params),
 
            None => {
 
                Err(ParseError2::new_error(
 
                Err(ParseError::new_error(
 
                    &self.source, self.source.pos(),
 
                    "Expected a parameter list"
 
                ))
 
            }
 
        }
 
    }
 

	
 
    // ====================
 
    // Expressions
 
    // ====================
 

	
 
    fn consume_paren_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_paren_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        self.consume_string(b"(")?;
 
        self.consume_whitespace(false)?;
 
        let result = self.consume_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b")")?;
 
        Ok(result)
 
    }
 
    fn consume_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.level >= MAX_LEVEL {
 
            return Err(self.error_at_pos("Too deeply nested expression"));
 
        }
 
        self.level += 1;
 
        let result = self.consume_assignment_expression(h);
 
        self.level -= 1;
 
        result
 
    }
 
    fn consume_assignment_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_assignment_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let result = self.consume_conditional_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        if self.has_assignment_operator() {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation = self.consume_assignment_operator()?;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_expression(h)?;
 
            Ok(h.alloc_assignment_expression(|this| AssignmentExpression {
 
                this,
 
                position,
 
                left,
 
                operation,
 
                right,
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            })
 
            .upcast())
 
        } else {
 
            Ok(result)
 
        }
 
    }
 
    fn has_assignment_operator(&self) -> bool {
 
        self.has_string(b"=")
 
            || self.has_string(b"*=")
 
            || self.has_string(b"/=")
 
            || self.has_string(b"%=")
 
            || self.has_string(b"+=")
 
            || self.has_string(b"-=")
 
            || self.has_string(b"<<=")
 
            || self.has_string(b">>=")
 
            || self.has_string(b"&=")
 
            || self.has_string(b"^=")
 
            || self.has_string(b"|=")
 
    }
 
    fn consume_assignment_operator(&mut self) -> Result<AssignmentOperator, ParseError2> {
 
    fn consume_assignment_operator(&mut self) -> Result<AssignmentOperator, ParseError> {
 
        if self.has_string(b"=") {
 
            self.consume_string(b"=")?;
 
            Ok(AssignmentOperator::Set)
 
        } else if self.has_string(b"*=") {
 
            self.consume_string(b"*=")?;
 
            Ok(AssignmentOperator::Multiplied)
 
        } else if self.has_string(b"/=") {
 
            self.consume_string(b"/=")?;
 
            Ok(AssignmentOperator::Divided)
 
        } else if self.has_string(b"%=") {
 
            self.consume_string(b"%=")?;
 
            Ok(AssignmentOperator::Remained)
 
        } else if self.has_string(b"+=") {
 
            self.consume_string(b"+=")?;
 
            Ok(AssignmentOperator::Added)
 
        } else if self.has_string(b"-=") {
 
            self.consume_string(b"-=")?;
 
            Ok(AssignmentOperator::Subtracted)
 
        } else if self.has_string(b"<<=") {
 
            self.consume_string(b"<<=")?;
 
            Ok(AssignmentOperator::ShiftedLeft)
 
        } else if self.has_string(b">>=") {
 
            self.consume_string(b">>=")?;
 
            Ok(AssignmentOperator::ShiftedRight)
 
        } else if self.has_string(b"&=") {
 
            self.consume_string(b"&=")?;
 
            Ok(AssignmentOperator::BitwiseAnded)
 
        } else if self.has_string(b"^=") {
 
            self.consume_string(b"^=")?;
 
            Ok(AssignmentOperator::BitwiseXored)
 
        } else if self.has_string(b"|=") {
 
            self.consume_string(b"|=")?;
 
            Ok(AssignmentOperator::BitwiseOred)
 
        } else {
 
            Err(self.error_at_pos("Expected assignment operator"))
 
        }
 
    }
 
    fn consume_conditional_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_conditional_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let result = self.consume_concat_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        if self.has_string(b"?") {
 
            let position = self.source.pos();
 
            let test = result;
 
            self.consume_string(b"?")?;
 
            self.consume_whitespace(false)?;
 
            let true_expression = self.consume_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            self.consume_string(b":")?;
 
            self.consume_whitespace(false)?;
 
            let false_expression = self.consume_expression(h)?;
 
            Ok(h.alloc_conditional_expression(|this| ConditionalExpression {
 
                this,
 
                position,
 
                test,
 
                true_expression,
 
                false_expression,
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            })
 
            .upcast())
 
        } else {
 
            Ok(result)
 
        }
 
    }
 
    fn consume_concat_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_concat_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_lor_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"@") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"@")?;
 
            let operation = BinaryOperator::Concatenate;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_lor_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_lor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_lor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_land_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"||") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"||")?;
 
            let operation = BinaryOperator::LogicalOr;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_land_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_land_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_land_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_bor_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"&&") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"&&")?;
 
            let operation = BinaryOperator::LogicalAnd;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_bor_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_bor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_bor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_xor_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"|") && !self.has_string(b"||") && !self.has_string(b"|=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"|")?;
 
            let operation = BinaryOperator::BitwiseOr;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_xor_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_xor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_xor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_band_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"^") && !self.has_string(b"^=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"^")?;
 
            let operation = BinaryOperator::BitwiseXor;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_band_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_band_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_band_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_eq_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"&") && !self.has_string(b"&&") && !self.has_string(b"&=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"&")?;
 
            let operation = BinaryOperator::BitwiseAnd;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_eq_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_eq_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_eq_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_rel_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"==") || self.has_string(b"!=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"==") {
 
                self.consume_string(b"==")?;
 
                operation = BinaryOperator::Equality;
 
            } else {
 
                self.consume_string(b"!=")?;
 
                operation = BinaryOperator::Inequality;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_rel_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_rel_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_rel_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_shift_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"<=")
 
            || self.has_string(b">=")
 
            || self.has_string(b"<") && !self.has_string(b"<<=")
 
            || self.has_string(b">") && !self.has_string(b">>=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"<=") {
 
                self.consume_string(b"<=")?;
 
                operation = BinaryOperator::LessThanEqual;
 
            } else if self.has_string(b">=") {
 
                self.consume_string(b">=")?;
 
                operation = BinaryOperator::GreaterThanEqual;
 
            } else if self.has_string(b"<") {
 
                self.consume_string(b"<")?;
 
                operation = BinaryOperator::LessThan;
 
            } else {
 
                self.consume_string(b">")?;
 
                operation = BinaryOperator::GreaterThan;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_shift_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_shift_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_shift_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_add_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"<<") && !self.has_string(b"<<=")
 
            || self.has_string(b">>") && !self.has_string(b">>=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"<<") {
 
                self.consume_string(b"<<")?;
 
                operation = BinaryOperator::ShiftLeft;
 
            } else {
 
                self.consume_string(b">>")?;
 
                operation = BinaryOperator::ShiftRight;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_add_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_add_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_add_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_mul_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"+") && !self.has_string(b"+=")
 
            || self.has_string(b"-") && !self.has_string(b"-=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"+") {
 
                self.consume_string(b"+")?;
 
                operation = BinaryOperator::Add;
 
            } else {
 
                self.consume_string(b"-")?;
 
                operation = BinaryOperator::Subtract;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_mul_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_mul_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_mul_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_prefix_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"*") && !self.has_string(b"*=")
 
            || self.has_string(b"/") && !self.has_string(b"/=")
 
            || self.has_string(b"%") && !self.has_string(b"%=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"*") {
 
                self.consume_string(b"*")?;
 
                operation = BinaryOperator::Multiply;
 
            } else if self.has_string(b"/") {
 
                self.consume_string(b"/")?;
 
                operation = BinaryOperator::Divide;
 
            } else {
 
                self.consume_string(b"%")?;
 
                operation = BinaryOperator::Remainder;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_prefix_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_prefix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_prefix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.has_string(b"+")
 
            || self.has_string(b"-")
 
            || self.has_string(b"~")
 
            || self.has_string(b"!")
 
        {
 
            let position = self.source.pos();
 
            let operation;
 
            if self.has_string(b"+") {
 
                self.consume_string(b"+")?;
 
                if self.has_string(b"+") {
 
                    self.consume_string(b"+")?;
 
                    operation = UnaryOperation::PreIncrement;
 
                } else {
 
                    operation = UnaryOperation::Positive;
 
                }
 
            } else if self.has_string(b"-") {
 
                self.consume_string(b"-")?;
 
                if self.has_string(b"-") {
 
                    self.consume_string(b"-")?;
 
                    operation = UnaryOperation::PreDecrement;
 
                } else {
 
                    operation = UnaryOperation::Negative;
 
                }
 
            } else if self.has_string(b"~") {
 
@@ -1288,49 +1288,49 @@ impl Lexer<'_> {
 
                self.consume_string(b"!")?;
 
                operation = UnaryOperation::LogicalNot;
 
            }
 
            self.consume_whitespace(false)?;
 
            if self.level >= MAX_LEVEL {
 
                return Err(self.error_at_pos("Too deeply nested expression"));
 
            }
 
            self.level += 1;
 
            let result = self.consume_prefix_expression(h);
 
            self.level -= 1;
 
            let expression = result?;
 
            return Ok(h
 
                .alloc_unary_expression(|this| UnaryExpression {
 
                    this,
 
                    position,
 
                    operation,
 
                    expression,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast());
 
        }
 
        self.consume_postfix_expression(h)
 
    }
 
    fn consume_postfix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_postfix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_primary_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"++")
 
            || self.has_string(b"--")
 
            || self.has_string(b"[")
 
            || (self.has_string(b".") && !self.has_string(b".."))
 
        {
 
            let mut position = self.source.pos();
 
            if self.has_string(b"++") {
 
                self.consume_string(b"++")?;
 
                let operation = UnaryOperation::PostIncrement;
 
                let expression = result;
 
                self.consume_whitespace(false)?;
 
                result = h
 
                    .alloc_unary_expression(|this| UnaryExpression {
 
                        this,
 
                        position,
 
                        operation,
 
                        expression,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            } else if self.has_string(b"--") {
 
@@ -1399,101 +1399,101 @@ impl Lexer<'_> {
 
                    self.consume_keyword(b"length")?;
 
                    field = Field::Length;
 
                } else {
 
                    let identifier = self.consume_identifier()?;
 
                    field = Field::Symbolic(FieldSymbolic{
 
                        identifier,
 
                        definition: None,
 
                        field_idx: 0,
 
                    });
 
                }
 
                result = h
 
                    .alloc_select_expression(|this| SelectExpression {
 
                        this,
 
                        position,
 
                        subject,
 
                        field,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            }
 
        }
 
        Ok(result)
 
    }
 
    fn consume_primary_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_primary_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.has_string(b"(") {
 
            return self.consume_paren_expression(h);
 
        }
 
        if self.has_string(b"{") {
 
            return Ok(self.consume_array_expression(h)?.upcast());
 
        }
 
        if self.has_builtin_literal() {
 
            return Ok(self.consume_builtin_literal_expression(h)?.upcast());
 
        }
 
        if self.has_struct_literal() {
 
            return Ok(self.consume_struct_literal_expression(h)?.upcast());
 
        }
 
        if self.has_call_expression() {
 
            return Ok(self.consume_call_expression(h)?.upcast());
 
        }
 
        Ok(self.consume_variable_expression(h)?.upcast())
 
    }
 
    fn consume_array_expression(&mut self, h: &mut Heap) -> Result<ArrayExpressionId, ParseError2> {
 
    fn consume_array_expression(&mut self, h: &mut Heap) -> Result<ArrayExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        let mut elements = Vec::new();
 
        self.consume_string(b"{")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b"}") {
 
            while self.source.next().is_some() {
 
                elements.push(self.consume_expression(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b"}") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?;
 
            }
 
        }
 
        self.consume_string(b"}")?;
 
        Ok(h.alloc_array_expression(|this| ArrayExpression {
 
            this,
 
            position,
 
            elements,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn has_builtin_literal(&self) -> bool {
 
        is_constant(self.source.next())
 
            || self.has_keyword(b"null")
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
    }
 
    fn consume_builtin_literal_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<LiteralExpressionId, ParseError2> {
 
    ) -> Result<LiteralExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        let value;
 
        if self.has_keyword(b"null") {
 
            self.consume_keyword(b"null")?;
 
            value = Literal::Null;
 
        } else if self.has_keyword(b"true") {
 
            self.consume_keyword(b"true")?;
 
            value = Literal::True;
 
        } else if self.has_keyword(b"false") {
 
            self.consume_keyword(b"false")?;
 
            value = Literal::False;
 
        } else if self.source.next() == Some(b'\'') {
 
            self.source.consume();
 
            let mut data = Vec::new();
 
            let mut next = self.source.next();
 
            while next != Some(b'\'') && (is_vchar(next) || next == Some(b' ')) {
 
                data.push(next.unwrap());
 
                self.source.consume();
 
                next = self.source.next();
 
            }
 
            if next != Some(b'\'') || data.is_empty() {
 
                return Err(self.error_at_pos("Expected character constant"));
 
            }
 
            self.source.consume();
 
@@ -1507,110 +1507,110 @@ impl Lexer<'_> {
 
        }
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression {
 
            this,
 
            position,
 
            value,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 

	
 
    fn has_struct_literal(&mut self) -> bool {
 
        // A struct literal is written as:
 
        //      namespace::StructName<maybe_one_of_these, auto>{ field: expr }
 
        // We will parse up until the opening brace to see if we're dealing with
 
        // a struct literal.
 
        let backup_pos = self.source.pos();
 
        let result = self.consume_namespaced_identifier_spilled().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.source.next() == Some(b'{');
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 

	
 
    fn consume_struct_literal_expression(&mut self, h: &mut Heap) -> Result<LiteralExpressionId, ParseError2> {
 
    fn consume_struct_literal_expression(&mut self, h: &mut Heap) -> Result<LiteralExpressionId, ParseError> {
 
        // Consume identifier and polymorphic arguments
 
        debug_log!("consume_struct_literal_expression: {}", debug_line!(self.source));
 
        let position = self.source.pos();
 
        let identifier = self.consume_namespaced_identifier(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume fields
 
        let fields = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected the end of the list of struct fields",
 
            |lexer, heap| {
 
                let identifier = lexer.consume_identifier()?;
 
                lexer.consume_whitespace(false)?;
 
                lexer.consume_string(b":")?;
 
                lexer.consume_whitespace(false)?;
 
                let value = lexer.consume_expression(heap)?;
 

	
 
                Ok(LiteralStructField{ identifier, value, field_idx: 0 })
 
            }
 
        )? {
 
            Some(fields) => fields,
 
            None => return Err(ParseError2::new_error(
 
            None => return Err(ParseError::new_error(
 
                self.source, self.source.pos(),
 
                "A struct literal must be followed by its field values"
 
            ))
 
        };
 

	
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression{
 
            this,
 
            position,
 
            value: Literal::Struct(LiteralStruct{
 
                identifier,
 
                fields,
 
                poly_args2: Vec::new(),
 
                definition: None,
 
            }),
 
            parent: ExpressionParent::None,
 
            concrete_type: Default::default()
 
        }))
 
    }
 

	
 
    fn has_call_expression(&mut self) -> bool {
 
        // We need to prevent ambiguity with various operators (because we may
 
        // be specifying polymorphic variables) and variables.
 
        if self.has_builtin_keyword() {
 
            return true;
 
        }
 

	
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 

	
 
        if self.consume_namespaced_identifier_spilled().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.source.next() == Some(b'(') {
 
            // Seems like we have a function call or an enum literal
 
            result = true;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_call_expression(&mut self, h: &mut Heap) -> Result<CallExpressionId, ParseError2> {
 
    fn consume_call_expression(&mut self, h: &mut Heap) -> Result<CallExpressionId, ParseError> {
 
        let position = self.source.pos();
 

	
 
        // Consume method identifier
 
        // TODO: @token Replace this conditional polymorphic arg parsing once we have a tokenizer.
 
        debug_log!("consume_call_expression: {}", debug_line!(self.source));
 
        let method;
 
        let mut consume_poly_args_explicitly = true;
 
        if self.has_keyword(b"get") {
 
            self.consume_keyword(b"get")?;
 
            method = Method::Get;
 
        } else if self.has_keyword(b"put") {
 
            self.consume_keyword(b"put")?;
 
            method = Method::Put;
 
        } else if self.has_keyword(b"fires") {
 
            self.consume_keyword(b"fires")?;
 
            method = Method::Fires;
 
        } else if self.has_keyword(b"create") {
 
            self.consume_keyword(b"create")?;
 
            method = Method::Create;
 
        } else {
 
            let identifier = self.consume_namespaced_identifier(h)?;
 
            method = Method::Symbolic(MethodSymbolic{
 
                identifier,
 
                definition: None
 
@@ -1636,100 +1636,100 @@ impl Lexer<'_> {
 
            while self.source.next().is_some() {
 
                arguments.push(self.consume_expression(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b")") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?
 
            }
 
        }
 
        self.consume_string(b")")?;
 
        Ok(h.alloc_call_expression(|this| CallExpression {
 
            this,
 
            position,
 
            method,
 
            arguments,
 
            poly_args,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn consume_variable_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<VariableExpressionId, ParseError2> {
 
    ) -> Result<VariableExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        debug_log!("consume_variable_expression: {}", debug_line!(self.source));
 

	
 
        // TODO: @token Reimplement when tokenizer is implemented, prevent ambiguities
 
        let identifier = identifier_as_namespaced(self.consume_identifier()?);
 

	
 
        Ok(h.alloc_variable_expression(|this| VariableExpression {
 
            this,
 
            position,
 
            identifier,
 
            declaration: None,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 

	
 
    // ====================
 
    // Statements
 
    // ====================
 

	
 
    /// Consumes any kind of statement from the source and will error if it
 
    /// did not encounter a statement. Will also return an error if the
 
    /// statement is nested too deeply.
 
    ///
 
    /// `wrap_in_block` may be set to true to ensure that the parsed statement
 
    /// will be wrapped in a block statement if it is not already a block
 
    /// statement. This is used to ensure that all `if`, `while` and `sync`
 
    /// statements have a block statement as body.
 
    fn consume_statement(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError2> {
 
    fn consume_statement(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError> {
 
        if self.level >= MAX_LEVEL {
 
            return Err(self.error_at_pos("Too deeply nested statement"));
 
        }
 
        self.level += 1;
 
        let result = self.consume_statement_impl(h, wrap_in_block);
 
        self.level -= 1;
 
        result
 
    }
 
    fn has_label(&mut self) -> bool {
 
        // To prevent ambiguity with expression statements consisting only of an
 
        // identifier or a namespaced identifier, we look ahead and match on the
 
        // *single* colon that signals a labeled statement.
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 
        if self.consume_identifier_spilled().is_ok() {
 
            // next character is ':', second character is NOT ':'
 
            result = Some(b':') == self.source.next() && Some(b':') != self.source.lookahead(1)
 
        }
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_statement_impl(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError2> {
 
    fn consume_statement_impl(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError> {
 
        // Parse and allocate statement
 
        let mut must_wrap = true;
 
        let mut stmt_id = if self.has_string(b"{") {
 
            must_wrap = false;
 
            self.consume_block_statement(h)?
 
        } else if self.has_keyword(b"skip") {
 
            must_wrap = false;
 
            self.consume_skip_statement(h)?.upcast()
 
        } else if self.has_keyword(b"if") {
 
            self.consume_if_statement(h)?.upcast()
 
        } else if self.has_keyword(b"while") {
 
            self.consume_while_statement(h)?.upcast()
 
        } else if self.has_keyword(b"break") {
 
            self.consume_break_statement(h)?.upcast()
 
        } else if self.has_keyword(b"continue") {
 
            self.consume_continue_statement(h)?.upcast()
 
        } else if self.has_keyword(b"synchronous") {
 
            self.consume_synchronous_statement(h)?.upcast()
 
        } else if self.has_keyword(b"return") {
 
            self.consume_return_statement(h)?.upcast()
 
        } else if self.has_keyword(b"assert") {
 
            self.consume_assert_statement(h)?.upcast()
 
        } else if self.has_keyword(b"goto") {
 
            self.consume_goto_statement(h)?.upcast()
 
@@ -1765,159 +1765,159 @@ impl Lexer<'_> {
 
        a type annotation followed by another identifier.
 
        Example:
 
          my_type[] x = {5}; // memory statement
 
          my_var[5] = x; // assignment expression, expression statement
 
        Note how both the local and the assignment
 
        start with arbitrary identifier followed by [. */
 
        if self.has_keyword(b"channel") {
 
            return true;
 
        }
 
        if self.has_statement_keyword() {
 
            return false;
 
        }
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 
        if self.maybe_consume_type_spilled_without_pos_recovery() {
 
            // We seem to have a valid type, do we now have an identifier?
 
            if self.consume_whitespace(true).is_ok() {
 
                result = self.has_identifier();
 
            }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_block_statement(&mut self, h: &mut Heap) -> Result<StatementId, ParseError2> {
 
    fn consume_block_statement(&mut self, h: &mut Heap) -> Result<StatementId, ParseError> {
 
        let position = self.source.pos();
 
        let mut statements = Vec::new();
 
        self.consume_string(b"{")?;
 
        self.consume_whitespace(false)?;
 
        while self.has_local_statement() {
 
            let (local_id, stmt_id) = self.consume_local_statement(h)?;
 
            statements.push(local_id.upcast());
 
            if let Some(stmt_id) = stmt_id {
 
                statements.push(stmt_id.upcast());
 
            }
 
            self.consume_whitespace(false)?;
 
        }
 
        while !self.has_string(b"}") {
 
            statements.push(self.consume_statement(h, false)?);
 
            self.consume_whitespace(false)?;
 
        }
 
        self.consume_string(b"}")?;
 
        if statements.is_empty() {
 
            Ok(h.alloc_skip_statement(|this| SkipStatement { this, position, next: None }).upcast())
 
        } else {
 
            Ok(h.alloc_block_statement(|this| BlockStatement {
 
                this,
 
                position,
 
                statements,
 
                parent_scope: None,
 
                relative_pos_in_parent: 0,
 
                locals: Vec::new(),
 
                labels: Vec::new(),
 
            })
 
            .upcast())
 
        }
 
    }
 
    fn consume_local_statement(&mut self, h: &mut Heap) -> Result<(LocalStatementId, Option<ExpressionStatementId>), ParseError2> {
 
    fn consume_local_statement(&mut self, h: &mut Heap) -> Result<(LocalStatementId, Option<ExpressionStatementId>), ParseError> {
 
        if self.has_keyword(b"channel") {
 
            let local_id = self.consume_channel_statement(h)?.upcast();
 
            Ok((local_id, None))
 
        } else {
 
            let (memory_id, stmt_id) = self.consume_memory_statement(h)?;
 
            Ok((memory_id.upcast(), Some(stmt_id)))
 
        }
 
    }
 
    fn consume_channel_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ChannelStatementId, ParseError2> {
 
    ) -> Result<ChannelStatementId, ParseError> {
 
        // Consume channel statement and polymorphic argument if specified.
 
        // Needs a tiny bit of special parsing to ensure the right amount of
 
        // whitespace is present.
 
        let position = self.source.pos();
 
        self.consume_keyword(b"channel")?;
 

	
 
        let expect_whitespace = self.source.next() != Some(b'<');
 
        self.consume_whitespace(expect_whitespace)?;
 
        let poly_args = self.consume_polymorphic_args(h, true)?.unwrap_or_default();
 
        let poly_arg_id = match poly_args.len() {
 
            0 => h.alloc_parser_type(|this| ParserType{
 
                this, pos: position.clone(), variant: ParserTypeVariant::Inferred,
 
            }),
 
            1 => poly_args[0],
 
            _ => return Err(ParseError2::new_error(
 
            _ => return Err(ParseError::new_error(
 
                &self.source, self.source.pos(),
 
                "port construction using 'channel' accepts up to 1 polymorphic argument"
 
            ))
 
        };
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume the output port
 
        let out_parser_type = h.alloc_parser_type(|this| ParserType{
 
            this, pos: position.clone(), variant: ParserTypeVariant::Output(poly_arg_id)
 
        });
 
        let out_identifier = self.consume_identifier()?;
 

	
 
        // Consume the "->" syntax
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b"->")?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume the input port
 
        let in_parser_type = h.alloc_parser_type(|this| ParserType{
 
            this, pos: position.clone(), variant: ParserTypeVariant::Input(poly_arg_id)
 
        });
 
        let in_identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        let out_port = h.alloc_local(|this| Local {
 
            this,
 
            position,
 
            parser_type: out_parser_type,
 
            identifier: out_identifier,
 
            relative_pos_in_block: 0
 
        });
 
        let in_port = h.alloc_local(|this| Local {
 
            this,
 
            position,
 
            parser_type: in_parser_type,
 
            identifier: in_identifier,
 
            relative_pos_in_block: 0
 
        });
 
        Ok(h.alloc_channel_statement(|this| ChannelStatement {
 
            this,
 
            position,
 
            from: out_port,
 
            to: in_port,
 
            relative_pos_in_block: 0,
 
            next: None,
 
        }))
 
    }
 
    fn consume_memory_statement(&mut self, h: &mut Heap) -> Result<(MemoryStatementId, ExpressionStatementId), ParseError2> {
 
    fn consume_memory_statement(&mut self, h: &mut Heap) -> Result<(MemoryStatementId, ExpressionStatementId), ParseError> {
 
        let position = self.source.pos();
 
        let parser_type = self.consume_type2(h, true)?;
 
        let parser_type = self.consume_type(h, true)?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let assignment_position = self.source.pos();
 
        self.consume_string(b"=")?;
 
        self.consume_whitespace(false)?;
 
        let initial = self.consume_expression(h)?;
 
        let variable = h.alloc_local(|this| Local {
 
            this,
 
            position,
 
            parser_type,
 
            identifier: identifier.clone(),
 
            relative_pos_in_block: 0
 
        });
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 

	
 
        // Transform into the variable declaration, followed by an assignment
 
        let memory_stmt_id = h.alloc_memory_statement(|this| MemoryStatement {
 
            this,
 
            position,
 
            variable,
 
            next: None,
 
        });
 
@@ -1928,497 +1928,497 @@ impl Lexer<'_> {
 
            declaration: None,
 
            parent: ExpressionParent::None,
 
            concrete_type: Default::default()
 
        });
 
        let assignment_expr_id = h.alloc_assignment_expression(|this| AssignmentExpression{
 
            this,
 
            position: assignment_position,
 
            left: variable_expr_id.upcast(),
 
            operation: AssignmentOperator::Set,
 
            right: initial,
 
            parent: ExpressionParent::None,
 
            concrete_type: Default::default()
 
        });
 
        let assignment_stmt_id = h.alloc_expression_statement(|this| ExpressionStatement{
 
            this,
 
            position,
 
            expression: assignment_expr_id.upcast(),
 
            next: None
 
        });
 
        Ok((memory_stmt_id, assignment_stmt_id))
 
    }
 
    fn consume_labeled_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<LabeledStatementId, ParseError2> {
 
    ) -> Result<LabeledStatementId, ParseError> {
 
        let position = self.source.pos();
 
        let label = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b":")?;
 
        self.consume_whitespace(false)?;
 
        let body = self.consume_statement(h, false)?;
 
        Ok(h.alloc_labeled_statement(|this| LabeledStatement {
 
            this,
 
            position,
 
            label,
 
            body,
 
            relative_pos_in_block: 0,
 
            in_sync: None,
 
        }))
 
    }
 
    fn consume_skip_statement(&mut self, h: &mut Heap) -> Result<SkipStatementId, ParseError2> {
 
    fn consume_skip_statement(&mut self, h: &mut Heap) -> Result<SkipStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"skip")?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_skip_statement(|this| SkipStatement { this, position, next: None }))
 
    }
 
    fn consume_if_statement(&mut self, h: &mut Heap) -> Result<IfStatementId, ParseError2> {
 
    fn consume_if_statement(&mut self, h: &mut Heap) -> Result<IfStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"if")?;
 
        self.consume_whitespace(false)?;
 
        let test = self.consume_paren_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        let true_body = self.consume_statement(h, true)?;
 
        self.consume_whitespace(false)?;
 
        let false_body = if self.has_keyword(b"else") {
 
            self.consume_keyword(b"else")?;
 
            self.consume_whitespace(false)?;
 
            self.consume_statement(h, true)?
 
        } else {
 
            h.alloc_skip_statement(|this| SkipStatement { this, position, next: None }).upcast()
 
        };
 
        Ok(h.alloc_if_statement(|this| IfStatement { this, position, test, true_body, false_body, end_if: None }))
 
    }
 
    fn consume_while_statement(&mut self, h: &mut Heap) -> Result<WhileStatementId, ParseError2> {
 
    fn consume_while_statement(&mut self, h: &mut Heap) -> Result<WhileStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"while")?;
 
        self.consume_whitespace(false)?;
 
        let test = self.consume_paren_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        let body = self.consume_statement(h, true)?;
 
        Ok(h.alloc_while_statement(|this| WhileStatement {
 
            this,
 
            position,
 
            test,
 
            body,
 
            end_while: None,
 
            in_sync: None,
 
        }))
 
    }
 
    fn consume_break_statement(&mut self, h: &mut Heap) -> Result<BreakStatementId, ParseError2> {
 
    fn consume_break_statement(&mut self, h: &mut Heap) -> Result<BreakStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"break")?;
 
        self.consume_whitespace(false)?;
 
        let label;
 
        if self.has_identifier() {
 
            label = Some(self.consume_identifier()?);
 
            self.consume_whitespace(false)?;
 
        } else {
 
            label = None;
 
        }
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_break_statement(|this| BreakStatement { this, position, label, target: None }))
 
    }
 
    fn consume_continue_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ContinueStatementId, ParseError2> {
 
    ) -> Result<ContinueStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"continue")?;
 
        self.consume_whitespace(false)?;
 
        let label;
 
        if self.has_identifier() {
 
            label = Some(self.consume_identifier()?);
 
            self.consume_whitespace(false)?;
 
        } else {
 
            label = None;
 
        }
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_continue_statement(|this| ContinueStatement {
 
            this,
 
            position,
 
            label,
 
            target: None,
 
        }))
 
    }
 
    fn consume_synchronous_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<SynchronousStatementId, ParseError2> {
 
    ) -> Result<SynchronousStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"synchronous")?;
 
        self.consume_whitespace(false)?;
 
        // TODO: What was the purpose of this? Seems superfluous and confusing?
 
        // let mut parameters = Vec::new();
 
        // if self.has_string(b"(") {
 
        //     self.consume_parameters(h, &mut parameters)?;
 
        //     self.consume_whitespace(false)?;
 
        // } else if !self.has_keyword(b"skip") && !self.has_string(b"{") {
 
        //     return Err(self.error_at_pos("Expected block statement"));
 
        // }
 
        let body = self.consume_statement(h, true)?;
 
        Ok(h.alloc_synchronous_statement(|this| SynchronousStatement {
 
            this,
 
            position,
 
            body,
 
            end_sync: None,
 
            parent_scope: None,
 
        }))
 
    }
 
    fn consume_return_statement(&mut self, h: &mut Heap) -> Result<ReturnStatementId, ParseError2> {
 
    fn consume_return_statement(&mut self, h: &mut Heap) -> Result<ReturnStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"return")?;
 
        self.consume_whitespace(false)?;
 
        let expression = if self.has_string(b"(") {
 
            self.consume_paren_expression(h)
 
        } else {
 
            self.consume_expression(h)
 
        }?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_return_statement(|this| ReturnStatement { this, position, expression }))
 
    }
 
    fn consume_assert_statement(&mut self, h: &mut Heap) -> Result<AssertStatementId, ParseError2> {
 
    fn consume_assert_statement(&mut self, h: &mut Heap) -> Result<AssertStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"assert")?;
 
        self.consume_whitespace(false)?;
 
        let expression = if self.has_string(b"(") {
 
            self.consume_paren_expression(h)
 
        } else {
 
            self.consume_expression(h)
 
        }?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_assert_statement(|this| AssertStatement {
 
            this,
 
            position,
 
            expression,
 
            next: None,
 
        }))
 
    }
 
    fn consume_goto_statement(&mut self, h: &mut Heap) -> Result<GotoStatementId, ParseError2> {
 
    fn consume_goto_statement(&mut self, h: &mut Heap) -> Result<GotoStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"goto")?;
 
        self.consume_whitespace(false)?;
 
        let label = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_goto_statement(|this| GotoStatement { this, position, label, target: None }))
 
    }
 
    fn consume_new_statement(&mut self, h: &mut Heap) -> Result<NewStatementId, ParseError2> {
 
    fn consume_new_statement(&mut self, h: &mut Heap) -> Result<NewStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"new")?;
 
        self.consume_whitespace(false)?;
 
        let expression = self.consume_call_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_new_statement(|this| NewStatement { this, position, expression, next: None }))
 
    }
 
    fn consume_expression_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ExpressionStatementId, ParseError2> {
 
    ) -> Result<ExpressionStatementId, ParseError> {
 
        let position = self.source.pos();
 
        let expression = self.consume_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_expression_statement(|this| ExpressionStatement {
 
            this,
 
            position,
 
            expression,
 
            next: None,
 
        }))
 
    }
 

	
 
    // ====================
 
    // Symbol definitions
 
    // ====================
 

	
 
    fn has_symbol_definition(&self) -> bool {
 
        self.has_keyword(b"composite")
 
            || self.has_keyword(b"primitive")
 
            || self.has_type_keyword()
 
            || self.has_identifier()
 
    }
 
    fn consume_symbol_definition(&mut self, h: &mut Heap) -> Result<DefinitionId, ParseError2> {
 
    fn consume_symbol_definition(&mut self, h: &mut Heap) -> Result<DefinitionId, ParseError> {
 
        if self.has_keyword(b"struct") {
 
            Ok(self.consume_struct_definition(h)?.upcast())
 
        } else if self.has_keyword(b"enum") {
 
            Ok(self.consume_enum_definition(h)?.upcast())
 
        } else if self.has_keyword(b"composite") || self.has_keyword(b"primitive") {
 
            Ok(self.consume_component_definition(h)?.upcast())
 
        } else {
 
            Ok(self.consume_function_definition(h)?.upcast())
 
        }
 
    }
 
    fn consume_struct_definition(&mut self, h: &mut Heap) -> Result<StructId, ParseError2> {
 
    fn consume_struct_definition(&mut self, h: &mut Heap) -> Result<StructId, ParseError> {
 
        // Parse "struct" keyword, optional polyvars and its identifier
 
        let struct_pos = self.source.pos();
 
        self.consume_keyword(b"struct")?;
 
        self.consume_whitespace(true)?;
 
        let struct_ident = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Parse struct fields
 
        let fields = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected the end of the list of struct fields",
 
            |lexer, heap| {
 
                let position = lexer.source.pos();
 
                let parser_type = lexer.consume_type2(heap, false)?;
 
                let parser_type = lexer.consume_type(heap, false)?;
 
                lexer.consume_whitespace(true)?;
 
                let field = lexer.consume_identifier()?;
 

	
 
                Ok(StructFieldDefinition{ position, field, parser_type })
 
            }
 
        )? {
 
            Some(fields) => fields,
 
            None => return Err(ParseError2::new_error(
 
            None => return Err(ParseError::new_error(
 
                self.source, struct_pos,
 
                "An struct definition must be followed by its fields"
 
            )),
 
        };
 

	
 
        // Valid struct definition
 
        Ok(h.alloc_struct_definition(|this| StructDefinition{
 
            this,
 
            position: struct_pos,
 
            identifier: struct_ident,
 
            poly_vars,
 
            fields,
 
        }))
 
    }
 
    fn consume_enum_definition(&mut self, h: &mut Heap) -> Result<EnumId, ParseError2> {
 
    fn consume_enum_definition(&mut self, h: &mut Heap) -> Result<EnumId, ParseError> {
 
        // Parse "enum" keyword, optional polyvars and its identifier
 
        let enum_pos = self.source.pos();
 
        self.consume_keyword(b"enum")?;
 
        self.consume_whitespace(true)?;
 
        let enum_ident = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        let variants = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected end of enum variant list",
 
            |lexer, heap| {
 
                // Variant identifier
 
                let position = lexer.source.pos();
 
                let identifier = lexer.consume_identifier()?;
 
                lexer.consume_whitespace(false)?;
 

	
 
                // Optional variant value/type
 
                let next = lexer.source.next();
 
                let value = match next {
 
                    Some(b',') => {
 
                        // Do not consume, let `consume_comma_separated` handle
 
                        // the next item
 
                        EnumVariantValue::None
 
                    },
 
                    Some(b'=') => {
 
                        // Integer value
 
                        lexer.source.consume();
 
                        lexer.consume_whitespace(false)?;
 
                        if !lexer.has_integer() {
 
                            return Err(lexer.error_at_pos("expected integer"))
 
                        }
 
                        let value = lexer.consume_integer()?;
 
                        EnumVariantValue::Integer(value)
 
                    },
 
                    Some(b'(') => {
 
                        // Embedded type
 
                        lexer.source.consume();
 
                        lexer.consume_whitespace(false)?;
 
                        let embedded_type = lexer.consume_type2(heap, false)?;
 
                        let embedded_type = lexer.consume_type(heap, false)?;
 
                        lexer.consume_whitespace(false)?;
 
                        lexer.consume_string(b")")?;
 
                        EnumVariantValue::Type(embedded_type)
 
                    },
 
                    _ => {
 
                        return Err(lexer.error_at_pos("Expected ',', '=', or '('"));
 
                    }
 
                };
 

	
 
                Ok(EnumVariantDefinition{ position, identifier, value })
 
            }
 
        )? {
 
            Some(variants) => variants,
 
            None => return Err(ParseError2::new_error(
 
            None => return Err(ParseError::new_error(
 
                self.source, enum_pos,
 
                "An enum definition must be followed by its variants"
 
            )),
 
        };
 

	
 
        Ok(h.alloc_enum_definition(|this| EnumDefinition{
 
            this,
 
            position: enum_pos,
 
            identifier: enum_ident,
 
            poly_vars,
 
            variants,
 
        }))
 
    }
 
    fn consume_component_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError2> {
 
    fn consume_component_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // TODO: Cleanup
 
        if self.has_keyword(b"composite") {
 
            Ok(self.consume_composite_definition(h)?)
 
        } else {
 
            Ok(self.consume_primitive_definition(h)?)
 
        }
 
    }
 
    fn consume_composite_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError2> {
 
    fn consume_composite_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // Parse keyword, optional polyvars and the identifier
 
        let position = self.source.pos();
 
        self.consume_keyword(b"composite")?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Parse body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_component(|this| Component { 
 
            this,
 
            variant: ComponentVariant::Composite,
 
            position,
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body
 
        }))
 
    }
 
    fn consume_primitive_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError2> {
 
    fn consume_primitive_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // Consume keyword, optional polyvars and identifier
 
        let position = self.source.pos();
 
        self.consume_keyword(b"primitive")?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_component(|this| Component { 
 
            this,
 
            variant: ComponentVariant::Primitive,
 
            position,
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body
 
        }))
 
    }
 
    fn consume_function_definition(&mut self, h: &mut Heap) -> Result<FunctionId, ParseError2> {
 
    fn consume_function_definition(&mut self, h: &mut Heap) -> Result<FunctionId, ParseError> {
 
        // Consume return type, optional polyvars and identifier
 
        let position = self.source.pos();
 
        let return_type = self.consume_type2(h, false)?;
 
        let return_type = self.consume_type(h, false)?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_function(|this| Function {
 
            this,
 
            position,
 
            return_type,
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body,
 
        }))
 
    }
 
    fn has_pragma(&self) -> bool {
 
        if let Some(c) = self.source.next() {
 
            c == b'#'
 
        } else {
 
            false
 
        }
 
    }
 
    fn consume_pragma(&mut self, h: &mut Heap) -> Result<PragmaId, ParseError2> {
 
    fn consume_pragma(&mut self, h: &mut Heap) -> Result<PragmaId, ParseError> {
 
        let position = self.source.pos();
 
        let next = self.source.next();
 
        if next != Some(b'#') {
 
            return Err(self.error_at_pos("Expected pragma"));
 
        }
 
        self.source.consume();
 
        if !is_vchar(self.source.next()) {
 
            return Err(self.error_at_pos("Expected pragma"));
 
        }
 
        if self.has_string(b"version") {
 
            self.consume_string(b"version")?;
 
            self.consume_whitespace(true)?;
 
            if !self.has_integer() {
 
                return Err(self.error_at_pos("Expected integer constant"));
 
            }
 
            let version = self.consume_integer()?;
 
            debug_assert!(version >= 0);
 
            return Ok(h.alloc_pragma(|this| Pragma::Version(PragmaVersion{
 
                this, position, version: version as u64
 
            })))
 
        } else if self.has_string(b"module") {
 
            self.consume_string(b"module")?;
 
            self.consume_whitespace(true)?;
 
            if !self.has_identifier() {
 
                return Err(self.error_at_pos("Expected identifier"));
 
            }
 
            let mut value = Vec::new();
 
            let mut ident = self.consume_ident()?;
 
            value.append(&mut ident);
 
            while self.has_string(b".") {
 
                self.consume_string(b".")?;
 
                value.push(b'.');
 
                ident = self.consume_ident()?;
 
                value.append(&mut ident);
 
            }
 
            return Ok(h.alloc_pragma(|this| Pragma::Module(PragmaModule{
 
                this, position, value
 
            })));
 
        } else {
 
            return Err(self.error_at_pos("Unknown pragma"));
 
        }
 
    }
 

	
 
    fn has_import(&self) -> bool {
 
        self.has_keyword(b"import")
 
    }
 
    fn consume_import(&mut self, h: &mut Heap) -> Result<ImportId, ParseError2> {
 
    fn consume_import(&mut self, h: &mut Heap) -> Result<ImportId, ParseError> {
 
        // Parse the word "import" and the name of the module
 
        let position = self.source.pos();
 
        self.consume_keyword(b"import")?;
 
        self.consume_whitespace(true)?;
 
        let mut value = Vec::new();
 
        let mut last_ident_pos = self.source.pos();
 
        let mut ident = self.consume_ident()?;
 
        value.append(&mut ident);
 
        let mut last_ident_start = 0;
 

	
 
        while self.has_string(b".") {
 
            self.consume_string(b".")?;
 
            value.push(b'.');
 
            last_ident_pos = self.source.pos();
 
            ident = self.consume_ident()?;
 
            last_ident_start = value.len();
 
            value.append(&mut ident);
 
        }
 

	
 

	
 
        self.consume_whitespace(false)?;
 

	
 
        // Check for the potential aliasing or specific module imports
 
        let import = if self.has_string(b"as") {
 
@@ -2516,49 +2516,49 @@ impl Lexer<'_> {
 
                    }]
 
                }))
 
            } else {
 
                return Err(self.error_at_pos("Expected '*', '{' or a symbol name"));
 
            }
 
        } else {
 
            // No explicit alias or subimports, so implicit alias
 
            let alias_value = Vec::from(&value[last_ident_start..]);
 
            h.alloc_import(|this| Import::Module(ImportModule{
 
                this,
 
                position,
 
                module_name: value,
 
                alias: Identifier{
 
                    position: last_ident_pos,
 
                    value: Vec::from(alias_value),
 
                },
 
                module_id: None,
 
            }))
 
        };
 

	
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(import)
 
    }
 
    pub fn consume_protocol_description(&mut self, h: &mut Heap) -> Result<RootId, ParseError2> {
 
    pub fn consume_protocol_description(&mut self, h: &mut Heap) -> Result<RootId, ParseError> {
 
        let position = self.source.pos();
 
        let mut pragmas = Vec::new();
 
        let mut imports = Vec::new();
 
        let mut definitions = Vec::new();
 
        self.consume_whitespace(false)?;
 
        while self.has_pragma() {
 
            let pragma = self.consume_pragma(h)?;
 
            pragmas.push(pragma);
 
            self.consume_whitespace(false)?;
 
        }
 
        while self.has_import() {
 
            let import = self.consume_import(h)?;
 
            imports.push(import);
 
            self.consume_whitespace(false)?;
 
        }
 
        while self.has_symbol_definition() {
 
            let def = self.consume_symbol_definition(h)?;
 
            definitions.push(def);
 
            self.consume_whitespace(false)?;
 
        }
 
        // end of file
 
        if !self.source.is_eof() {
 
            return Err(self.error_at_pos("Expected end of file"));
 
        }
src/protocol/parser/mod.rs
Show inline comments
 
@@ -26,125 +26,125 @@ pub(crate) struct LexedModule {
 
    module_name: Vec<u8>,
 
    version: Option<u64>,
 
    pub(crate) root_id: RootId,
 
}
 

	
 
pub struct Parser {
 
    pub(crate) heap: Heap,
 
    pub(crate) modules: Vec<LexedModule>,
 
    pub(crate) module_lookup: HashMap<Vec<u8>, usize>, // from (optional) module name to `modules` idx
 
    pub(crate) symbol_table: SymbolTable,
 
    pub(crate) type_table: TypeTable,
 
}
 

	
 
impl Parser {
 
    pub fn new() -> Self {
 
        Parser{
 
            heap: Heap::new(),
 
            modules: Vec::new(),
 
            module_lookup: HashMap::new(),
 
            symbol_table: SymbolTable::new(),
 
            type_table: TypeTable::new(),
 
        }
 
    }
 

	
 
    pub fn feed(&mut self, mut source: InputSource) -> Result<RootId, ParseError2> {
 
    pub fn feed(&mut self, mut source: InputSource) -> Result<RootId, ParseError> {
 
        // Lex the input source
 
        let mut lex = Lexer::new(&mut source);
 
        let pd = lex.consume_protocol_description(&mut self.heap)?;
 

	
 
        // Seek the module name and version
 
        let root = &self.heap[pd];
 
        let mut module_name_pos = InputPosition::default();
 
        let mut module_name = Vec::new();
 
        let mut module_version_pos = InputPosition::default();
 
        let mut module_version = None;
 

	
 
        for pragma in &root.pragmas {
 
            match &self.heap[*pragma] {
 
                Pragma::Module(module) => {
 
                    if !module_name.is_empty() {
 
                        return Err(
 
                            ParseError2::new_error(&source, module.position, "Double definition of module name in the same file")
 
                            ParseError::new_error(&source, module.position, "Double definition of module name in the same file")
 
                                .with_postfixed_info(&source, module_name_pos, "Previous definition was here")
 
                        )
 
                    }
 

	
 
                    module_name_pos = module.position.clone();
 
                    module_name = module.value.clone();
 
                },
 
                Pragma::Version(version) => {
 
                    if module_version.is_some() {
 
                        return Err(
 
                            ParseError2::new_error(&source, version.position, "Double definition of module version")
 
                            ParseError::new_error(&source, version.position, "Double definition of module version")
 
                                .with_postfixed_info(&source, module_version_pos, "Previous definition was here")
 
                        )
 
                    }
 

	
 
                    module_version_pos = version.position.clone();
 
                    module_version = Some(version.version);
 
                },
 
            }
 
        }
 

	
 
        // Add module to list of modules and prevent naming conflicts
 
        let cur_module_idx = self.modules.len();
 
        if let Some(prev_module_idx) = self.module_lookup.get(&module_name) {
 
            // Find `#module` statement in other module again
 
            let prev_module = &self.modules[*prev_module_idx];
 
            let prev_module_pos = self.heap[prev_module.root_id].pragmas
 
                .iter()
 
                .find_map(|p| {
 
                    match &self.heap[*p] {
 
                        Pragma::Module(module) => Some(module.position.clone()),
 
                        _ => None
 
                    }
 
                })
 
                .unwrap_or(InputPosition::default());
 

	
 
            let module_name_msg = if module_name.is_empty() {
 
                format!("a nameless module")
 
            } else {
 
                format!("module '{}'", String::from_utf8_lossy(&module_name))
 
            };
 

	
 
            return Err(
 
                ParseError2::new_error(&source, module_name_pos, &format!("Double definition of {} across files", module_name_msg))
 
                ParseError::new_error(&source, module_name_pos, &format!("Double definition of {} across files", module_name_msg))
 
                    .with_postfixed_info(&prev_module.source, prev_module_pos, "Other definition was here")
 
            );
 
        }
 

	
 
        self.modules.push(LexedModule{
 
            source,
 
            module_name: module_name.clone(),
 
            version: module_version,
 
            root_id: pd
 
        });
 
        self.module_lookup.insert(module_name, cur_module_idx);
 
        Ok(pd)
 
    }
 

	
 
    fn resolve_symbols_and_types(&mut self) -> Result<(), ParseError2> {
 
    fn resolve_symbols_and_types(&mut self) -> Result<(), ParseError> {
 
        // Construct the symbol table to resolve any imports and/or definitions,
 
        // then use the symbol table to actually annotate all of the imports.
 
        // If the type table is constructed correctly then all imports MUST be
 
        // resolvable.
 
        self.symbol_table.build(&self.heap, &self.modules)?;
 

	
 
        // Not pretty, but we need to work around rust's borrowing rules, it is
 
        // totally safe to mutate the contents of an AST element that we are
 
        // not borrowing anywhere else.
 
        let mut module_index = 0;
 
        let mut import_index = 0;
 
        loop {
 
            if module_index >= self.modules.len() {
 
                break;
 
            }
 

	
 
            let module_root_id = self.modules[module_index].root_id;
 
            let import_id = {
 
                let root = &self.heap[module_root_id];
 
                if import_index >= root.imports.len() {
 
                    module_index += 1;
 
                    import_index = 0;
 
                    continue
 
                }
 
@@ -166,92 +166,92 @@ impl Parser {
 
                    import.module_id = Some(target_module_id);
 

	
 
                    for symbol in &mut import.symbols {
 
                        debug_assert!(symbol.definition_id.is_none(), "symbol import already resolved");
 
                        let (_, target_definition_id) = self.symbol_table.resolve_identifier(module_root_id, &symbol.alias)
 
                            .expect("symbol import is resolved by symbol table")
 
                            .as_definition()
 
                            .expect("symbol import does not resolve to namespace symbol");
 
                        symbol.definition_id = Some(target_definition_id);
 
                    }
 
                }
 
            }
 

	
 
            import_index += 1;
 
        }
 

	
 
        // All imports in the AST are now annotated. We now use the symbol table
 
        // to construct the type table.
 
        let mut type_ctx = TypeCtx::new(&self.symbol_table, &mut self.heap, &self.modules);
 
        self.type_table.build_base_types(&mut type_ctx)?;
 

	
 
        Ok(())
 
    }
 

	
 
    pub fn parse(&mut self) -> Result<(), ParseError2> {
 
    pub fn parse(&mut self) -> Result<(), ParseError> {
 
        self.resolve_symbols_and_types()?;
 

	
 
        // Validate and link all modules
 
        let mut visit = ValidityAndLinkerVisitor::new();
 
        for module in &self.modules {
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            visit.visit_module(&mut ctx)?;
 
        }
 

	
 
        // Perform typechecking on all modules
 
        let mut visit = TypeResolvingVisitor::new();
 
        let mut queue = ResolveQueue::new();
 
        for module in &self.modules {
 
            let ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            TypeResolvingVisitor::queue_module_definitions(&ctx, &mut queue);   
 
        };
 
        while !queue.is_empty() {
 
            let top = queue.pop().unwrap();
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module: &self.modules[top.root_id.index as usize],
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            visit.handle_module_definition(&mut ctx, &mut queue, top)?;
 
        }
 

	
 
        // Perform remaining steps
 
        // TODO: Phase out at some point
 
        for module in &self.modules {
 
            let root_id = module.root_id;
 
            if let Err((position, message)) = Self::parse_inner(&mut self.heap, root_id) {
 
                return Err(ParseError2::new_error(&self.modules[0].source, position, &message))
 
                return Err(ParseError::new_error(&self.modules[0].source, position, &message))
 
            }
 
        }
 

	
 
        // let mut writer = ASTWriter::new();
 
        // let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap();
 
        // writer.write_ast(&mut file, &self.heap);
 

	
 
        Ok(())
 
    }
 

	
 
    pub fn parse_inner(h: &mut Heap, pd: RootId) -> VisitorResult {
 
        // TODO: @cleanup, slowly phasing out old compiler
 
        // NestedSynchronousStatements::new().visit_protocol_description(h, pd)?;
 
        // ChannelStatementOccurrences::new().visit_protocol_description(h, pd)?;
 
        // FunctionStatementReturns::new().visit_protocol_description(h, pd)?;
 
        // ComponentStatementReturnNew::new().visit_protocol_description(h, pd)?;
 
        // CheckBuiltinOccurrences::new().visit_protocol_description(h, pd)?;
 
        // BuildSymbolDeclarations::new().visit_protocol_description(h, pd)?;
 
        // LinkCallExpressions::new().visit_protocol_description(h, pd)?;
 
        // BuildScope::new().visit_protocol_description(h, pd)?;
 
        // ResolveVariables::new().visit_protocol_description(h, pd)?;
 
        LinkStatements::new().visit_protocol_description(h, pd)?;
 
        // BuildLabels::new().visit_protocol_description(h, pd)?;
 
        // ResolveLabels::new().visit_protocol_description(h, pd)?;
src/protocol/parser/symbol_table.rs
Show inline comments
 
@@ -68,176 +68,176 @@ impl SymbolValue {
 
/// and secondly it will resolve all symbols within a module to their
 
/// appropriate definitions (in case of enums, functions, etc.) and namespaces
 
/// (currently only external modules can act as namespaces). If a symbol clashes
 
/// or if a symbol cannot be resolved this will be an error.
 
///
 
/// Within the compilation process the symbol table is responsible for resolving
 
/// namespaced identifiers (e.g. Module::Enum::EnumVariant) to the appropriate
 
/// definition (i.e. not namespaces; as the language has no way to use
 
/// namespaces except for using them in namespaced identifiers).
 
pub(crate) struct SymbolTable {
 
    // Lookup from module name (not any aliases) to the root id
 
    module_lookup: HashMap<Vec<u8>, RootId>,
 
    // Lookup from within a module, to a particular imported (potentially
 
    // aliased) or defined symbol. Basically speaking: if the source code of a
 
    // module contains correctly imported/defined symbols, then this lookup
 
    // will always return the corresponding definition
 
    symbol_lookup: HashMap<SymbolKey, SymbolValue>,
 
}
 

	
 
impl SymbolTable {
 
    pub(crate) fn new() -> Self {
 
        Self{ module_lookup: HashMap::new(), symbol_lookup: HashMap::new() }
 
    }
 

	
 
    pub(crate) fn build(&mut self, heap: &Heap, modules: &[LexedModule]) -> Result<(), ParseError2> {
 
    pub(crate) fn build(&mut self, heap: &Heap, modules: &[LexedModule]) -> Result<(), ParseError> {
 
        // Sanity checks
 
        debug_assert!(self.module_lookup.is_empty());
 
        debug_assert!(self.symbol_lookup.is_empty());
 
        if cfg!(debug_assertions) {
 
            for (index, module) in modules.iter().enumerate() {
 
                debug_assert_eq!(
 
                    index, module.root_id.index as usize,
 
                    "module RootId does not correspond to LexedModule index"
 
                )
 
            }
 
        }
 

	
 
        // Preparation: create a lookup from module name to root id. This does
 
        // not take aliasing into account.
 
        self.module_lookup.reserve(modules.len());
 
        for module in modules {
 
            // TODO: Maybe put duplicate module name checking here?
 
            // TODO: @string
 
            self.module_lookup.insert(module.module_name.clone(), module.root_id);
 
        }
 

	
 
        // Preparation: determine total number of imports we will be inserting
 
        // into the lookup table. We could just iterate over the arena, but then
 
        // we don't know the source file the import belongs to.
 
        let mut lookup_reserve_size = 0;
 
        for module in modules {
 
            let module_root = &heap[module.root_id];
 
            for import_id in &module_root.imports {
 
                match &heap[*import_id] {
 
                    Import::Module(_) => lookup_reserve_size += 1,
 
                    Import::Symbols(import) => {
 
                        if import.symbols.is_empty() {
 
                            // Add all symbols from the other module
 
                            match self.module_lookup.get(&import.module_name) {
 
                                Some(target_module_id) => {
 
                                    lookup_reserve_size += heap[*target_module_id].definitions.len()
 
                                },
 
                                None => {
 
                                    return Err(
 
                                        ParseError2::new_error(&module.source, import.position, "Cannot resolve module")
 
                                        ParseError::new_error(&module.source, import.position, "Cannot resolve module")
 
                                    );
 
                                }
 
                            }
 
                        } else {
 
                            lookup_reserve_size += import.symbols.len();
 
                        }
 
                    }
 
                }
 
            }
 

	
 
            lookup_reserve_size += module_root.definitions.len();
 
        }
 

	
 
        self.symbol_lookup.reserve(lookup_reserve_size);
 

	
 
        // First pass: we go through all of the modules and add lookups to
 
        // symbols that are defined within that module. Cross-module imports are
 
        // not yet resolved
 
        for module in modules {
 
            let root = &heap[module.root_id];
 
            for definition_id in &root.definitions {
 
                let definition = &heap[*definition_id];
 
                let identifier = definition.identifier();
 
                if let Err(previous_position) = self.add_definition_symbol(
 
                    identifier.position, SymbolKey::from_identifier(module.root_id, &identifier),
 
                    module.root_id, *definition_id
 
                ) {
 
                    return Err(
 
                        ParseError2::new_error(&module.source, definition.position(), "Symbol is multiply defined")
 
                        ParseError::new_error(&module.source, definition.position(), "Symbol is multiply defined")
 
                            .with_postfixed_info(&module.source, previous_position, "Previous definition was here")
 
                    )
 
                }
 
            }
 
        }
 

	
 
        // Second pass: now that we can find symbols in modules, we can resolve
 
        // all imports (if they're correct, that is)
 
        for module in modules {
 
            let root = &heap[module.root_id];
 
            for import_id in &root.imports {
 
                let import = &heap[*import_id];
 
                match import {
 
                    Import::Module(import) => {
 
                        // Find the module using its name
 
                        let target_root_id = self.resolve_module(&import.module_name);
 
                        if target_root_id.is_none() {
 
                            return Err(ParseError2::new_error(&module.source, import.position, "Could not resolve module"));
 
                            return Err(ParseError::new_error(&module.source, import.position, "Could not resolve module"));
 
                        }
 
                        let target_root_id = target_root_id.unwrap();
 
                        if target_root_id == module.root_id {
 
                            return Err(ParseError2::new_error(&module.source, import.position, "Illegal import of self"));
 
                            return Err(ParseError::new_error(&module.source, import.position, "Illegal import of self"));
 
                        }
 

	
 
                        // Add the target module under its alias
 
                        if let Err(previous_position) = self.add_namespace_symbol(
 
                            import.position, SymbolKey::from_identifier(module.root_id, &import.alias),
 
                            target_root_id
 
                        ) {
 
                            return Err(
 
                                ParseError2::new_error(&module.source, import.position, "Symbol is multiply defined")
 
                                ParseError::new_error(&module.source, import.position, "Symbol is multiply defined")
 
                                    .with_postfixed_info(&module.source, previous_position, "Previous definition was here")
 
                            );
 
                        }
 
                    },
 
                    Import::Symbols(import) => {
 
                        // Find the target module using its name
 
                        let target_root_id = self.resolve_module(&import.module_name);
 
                        if target_root_id.is_none() {
 
                            return Err(ParseError2::new_error(&module.source, import.position, "Could not resolve module of symbol imports"));
 
                            return Err(ParseError::new_error(&module.source, import.position, "Could not resolve module of symbol imports"));
 
                        }
 
                        let target_root_id = target_root_id.unwrap();
 
                        if target_root_id == module.root_id {
 
                            return Err(ParseError2::new_error(&module.source, import.position, "Illegal import of self"));
 
                            return Err(ParseError::new_error(&module.source, import.position, "Illegal import of self"));
 
                        }
 

	
 
                        // Determine which symbols to import
 
                        if import.symbols.is_empty() {
 
                            // Import of all symbols, not using any aliases
 
                            for definition_id in &heap[target_root_id].definitions {
 
                                let definition = &heap[*definition_id];
 
                                let identifier = definition.identifier();
 
                                if let Err(previous_position) = self.add_definition_symbol(
 
                                    import.position, SymbolKey::from_identifier(module.root_id, identifier),
 
                                    target_root_id, *definition_id
 
                                ) {
 
                                    return Err(
 
                                        ParseError2::new_error(
 
                                        ParseError::new_error(
 
                                            &module.source, import.position,
 
                                            &format!("Imported symbol '{}' is already defined", String::from_utf8_lossy(&identifier.value))
 
                                        )
 
                                        .with_postfixed_info(
 
                                            &modules[target_root_id.index as usize].source,
 
                                            definition.position(),
 
                                            "The imported symbol is defined here"
 
                                        )
 
                                        .with_postfixed_info(
 
                                            &module.source, previous_position, "And is previously defined here"
 
                                        )
 
                                    )
 
                                }
 
                            }
 
                        } else {
 
                            // Import of specific symbols, optionally using aliases
 
                            for symbol in &import.symbols {
 
                                // Because we have already added per-module definitions, we can use
 
                                // the table to lookup this particular symbol. Note: within a single
 
                                // module a namespace-import and a symbol-import may not collide.
 
                                // Hence per-module symbols are unique.
 
                                // However: if we import a symbol from another module, we don't want
 
                                // to "import a module's imported symbol". And so if we do find
 
                                // a symbol match, we need to make sure it is a definition from
 
@@ -246,59 +246,59 @@ impl SymbolTable {
 
                                let target_symbol = self.symbol_lookup.get(&key);
 
                                let symbol_definition_id = match target_symbol {
 
                                    Some(target_symbol) => {
 
                                        match target_symbol.symbol {
 
                                            Symbol::Definition((symbol_root_id, symbol_definition_id)) => {
 
                                                if symbol_root_id == target_root_id {
 
                                                    Some(symbol_definition_id)
 
                                                } else {
 
                                                    // This is imported within the target module, and not
 
                                                    // defined within the target module
 
                                                    None
 
                                                }
 
                                            },
 
                                            Symbol::Namespace(_) => {
 
                                                // We don't import a module's "module import"
 
                                                None
 
                                            }
 
                                        }
 
                                    },
 
                                    None => None
 
                                };
 

	
 
                                if symbol_definition_id.is_none() {
 
                                    return Err(
 
                                        ParseError2::new_error(&module.source, symbol.position, "Could not resolve symbol")
 
                                        ParseError::new_error(&module.source, symbol.position, "Could not resolve symbol")
 
                                    )
 
                                }
 
                                let symbol_definition_id = symbol_definition_id.unwrap();
 

	
 
                                if let Err(previous_position) = self.add_definition_symbol(
 
                                    symbol.position, SymbolKey::from_identifier(module.root_id, &symbol.alias),
 
                                    target_root_id, symbol_definition_id
 
                                ) {
 
                                    return Err(
 
                                        ParseError2::new_error(&module.source, symbol.position, "Symbol is multiply defined")
 
                                        ParseError::new_error(&module.source, symbol.position, "Symbol is multiply defined")
 
                                            .with_postfixed_info(&module.source, previous_position, "Previous definition was here")
 
                                    )
 
                                }
 
                            }
 
                        }
 
                    }
 
                }
 
            }
 
        }
 
        fn find_name(heap: &Heap, root_id: RootId) -> String {
 
            let root = &heap[root_id];
 
            for pragma_id in &root.pragmas {
 
                match &heap[*pragma_id] {
 
                    Pragma::Module(module) => {
 
                        return String::from_utf8_lossy(&module.value).to_string()
 
                    },
 
                    _ => {},
 
                }
 
            }
 

	
 
            return String::from("Unknown")
 
        }
 

	
 
        debug_assert_eq!(
src/protocol/parser/type_resolver.rs
Show inline comments
 
@@ -1270,124 +1270,124 @@ macro_rules! debug_assert_expr_ids_unique_and_known {
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2);
 
    };
 
    // Generic case
 
    ($resolver:ident, $id1:ident, $id2:ident, $($tail:ident),+) => {
 
        debug_assert_ne!($id1, $id2);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2, $($tail),+);
 
    };
 
}
 

	
 
macro_rules! debug_assert_ptrs_distinct {
 
    // Base case
 
    ($ptr1:ident, $ptr2:ident) => {
 
        debug_assert!(!std::ptr::eq($ptr1, $ptr2));
 
    };
 
    // Generic case
 
    ($ptr1:ident, $ptr2:ident, $($tail:ident),+) => {
 
        debug_assert_ptrs_distinct!($ptr1, $ptr2);
 
        debug_assert_ptrs_distinct!($ptr2, $($tail),+);
 
    };
 
}
 

	
 
impl TypeResolvingVisitor {
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError2> {
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError> {
 
        // Keep inferring until we can no longer make any progress
 
        while let Some(next_expr_id) = self.expr_queued.iter().next() {
 
            let next_expr_id = *next_expr_id;
 
            self.expr_queued.remove(&next_expr_id);
 
            self.progress_expr(ctx, next_expr_id)?;
 
        }
 

	
 
        // We check if we have all the types we need. If we're typechecking a 
 
        // polymorphic procedure more than once, then we have already annotated
 
        // the AST and have now performed typechecking for a different 
 
        // monomorph. In that case we just need to perform typechecking, no need
 
        // to annotate the AST again.
 
        let definition_id = match &self.definition_type {
 
            DefinitionType::Component(id) => id.upcast(),
 
            DefinitionType::Function(id) => id.upcast(),
 
            _ => unreachable!(),
 
        };
 

	
 
        let already_checked = ctx.types.get_base_definition(&definition_id).unwrap().has_any_monomorph();
 
        for (expr_id, expr_type) in self.expr_types.iter_mut() {
 
            if !expr_type.is_done {
 
                // Auto-infer numberlike/integerlike types to a regular int
 
                if expr_type.parts.len() == 1 && expr_type.parts[0] == InferenceTypePart::IntegerLike {
 
                    expr_type.parts[0] = InferenceTypePart::Int;
 
                } else {
 
                    let expr = &ctx.heap[*expr_id];
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Could not fully infer the type of this expression (got '{}')",
 
                            expr_type.display_name(&ctx.heap)
 
                        )
 
                    ))
 
                }
 
            }
 

	
 
            if !already_checked {
 
                let concrete_type = ctx.heap[*expr_id].get_type_mut();
 
                expr_type.write_concrete_type(concrete_type);
 
            } else {
 
                if cfg!(debug_assertions) {
 
                    let mut concrete_type = ConcreteType::default();
 
                    expr_type.write_concrete_type(&mut concrete_type);
 
                    debug_assert_eq!(*ctx.heap[*expr_id].get_type(), concrete_type);
 
                }
 
            }
 
        }
 

	
 
        // All types are fine
 
        ctx.types.add_monomorph(&definition_id, self.poly_vars.clone());
 

	
 
        // Check all things we need to monomorphize
 
        // TODO: Struct/enum/union monomorphization
 
        for (expr_id, extra_data) in self.extra_data.iter() {
 
            if extra_data.poly_vars.is_empty() { continue; }
 

	
 
            // Retrieve polymorph variable specification. Those of struct 
 
            // literals and those of procedure calls need to be fully inferred.
 
            // The remaining ones (e.g. select expressions) allow partial 
 
            // inference of types, as long as the accessed field's type is
 
            // fully inferred.
 
            let needs_full_inference = match &ctx.heap[*expr_id] {
 
                Expression::Call(_) => true,
 
                Expression::Literal(_) => true,
 
                _ => false
 
            };
 

	
 
            if needs_full_inference {
 
                let mut monomorph_types = Vec::with_capacity(extra_data.poly_vars.len());
 
                for (poly_idx, poly_type) in extra_data.poly_vars.iter().enumerate() {
 
                    if !poly_type.is_done {
 
                        // TODO: Single clean function for function signatures and polyvars.
 
                        // TODO: Better error message
 
                        let expr = &ctx.heap[*expr_id];
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, expr.position(),
 
                            &format!(
 
                                "Could not fully infer the type of polymorphic variable {} of this expression (got '{}')",
 
                                poly_idx, poly_type.display_name(&ctx.heap)
 
                            )
 
                        ))
 
                    }
 

	
 
                    let mut concrete_type = ConcreteType::default();
 
                    poly_type.write_concrete_type(&mut concrete_type);
 
                    monomorph_types.insert(poly_idx, concrete_type);
 
                }
 

	
 
                // Resolve to the appropriate expression and instantiate 
 
                // monomorphs.
 
                match &ctx.heap[*expr_id] {
 
                    Expression::Call(call_expr) => {
 
                        // Add to type table if not yet typechecked
 
                        if let Method::Symbolic(symbolic) = &call_expr.method {
 
                            let definition_id = symbolic.definition.unwrap();
 
                            if !ctx.types.has_monomorph(&definition_id, &monomorph_types) {
 
                                let root_id = ctx.types
 
                                    .get_base_definition(&definition_id)
 
                                    .unwrap()
 
@@ -1401,169 +1401,169 @@ impl TypeResolvingVisitor {
 
                                    definition_id,
 
                                    monomorph_types,
 
                                };
 
                                if !queue.contains(&queue_element) {
 
                                    queue.push(queue_element);
 
                                }
 
                            }
 
                        }
 
                    },
 
                    Expression::Literal(lit_expr) => {
 
                        let lit_struct = lit_expr.value.as_struct();
 
                        let definition_id = lit_struct.definition.as_ref().unwrap();
 
                        if !ctx.types.has_monomorph(definition_id, &monomorph_types) {
 
                            ctx.types.add_monomorph(definition_id, monomorph_types);
 
                        }
 
                    },
 
                    _ => unreachable!("needs fully inference, but not a struct literal or call expression")
 
                }
 
            } // else: was just a helper structure...
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> Result<(), ParseError2> {
 
    fn progress_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> Result<(), ParseError> {
 
        match &ctx.heap[id] {
 
            Expression::Assignment(expr) => {
 
                let id = expr.this;
 
                self.progress_assignment_expr(ctx, id)
 
            },
 
            Expression::Conditional(expr) => {
 
                let id = expr.this;
 
                self.progress_conditional_expr(ctx, id)
 
            },
 
            Expression::Binary(expr) => {
 
                let id = expr.this;
 
                self.progress_binary_expr(ctx, id)
 
            },
 
            Expression::Unary(expr) => {
 
                let id = expr.this;
 
                self.progress_unary_expr(ctx, id)
 
            },
 
            Expression::Indexing(expr) => {
 
                let id = expr.this;
 
                self.progress_indexing_expr(ctx, id)
 
            },
 
            Expression::Slicing(expr) => {
 
                let id = expr.this;
 
                self.progress_slicing_expr(ctx, id)
 
            },
 
            Expression::Select(expr) => {
 
                let id = expr.this;
 
                self.progress_select_expr(ctx, id)
 
            },
 
            Expression::Array(expr) => {
 
                let id = expr.this;
 
                self.progress_array_expr(ctx, id)
 
            },
 
            Expression::Literal(expr) => {
 
                let id = expr.this;
 
                self.progress_literal_expr(ctx, id)
 
            },
 
            Expression::Call(expr) => {
 
                let id = expr.this;
 
                self.progress_call_expr(ctx, id)
 
            },
 
            Expression::Variable(expr) => {
 
                let id = expr.this;
 
                self.progress_variable_expr(ctx, id)
 
            }
 
        }
 
    }
 

	
 
    fn progress_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> Result<(), ParseError2> {
 
    fn progress_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> Result<(), ParseError> {
 
        use AssignmentOperator as AO;
 

	
 
        // TODO: Assignable check
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.left;
 
        let arg2_expr_id = expr.right;
 

	
 
        debug_log!("Assignment expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let progress_base = match expr.operation {
 
            AO::Set =>
 
                false,
 
            AO::Multiplied | AO::Divided | AO::Added | AO::Subtracted =>
 
                self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?,
 
            AO::Remained | AO::ShiftedLeft | AO::ShiftedRight |
 
            AO::BitwiseAnded | AO::BitwiseXored | AO::BitwiseOred =>
 
                self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?,
 
        };
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = self.apply_equal3_constraint(
 
            ctx, upcast_id, arg1_expr_id, arg2_expr_id, 0
 
        )?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_base || progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 

	
 
        if progress_base || progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> Result<(), ParseError2> {
 
    fn progress_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> Result<(), ParseError> {
 
        // Note: test expression type is already enforced
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.true_expression;
 
        let arg2_expr_id = expr.false_expression;
 

	
 
        debug_log!("Conditional expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = self.apply_equal3_constraint(
 
            ctx, upcast_id, arg1_expr_id, arg2_expr_id, 0
 
        )?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> Result<(), ParseError2> {
 
    fn progress_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> Result<(), ParseError> {
 
        // Note: our expression type might be fixed by our parent, but we still
 
        // need to make sure it matches the type associated with our operation.
 
        use BinaryOperator as BO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_id = expr.left;
 
        let arg2_id = expr.right;
 

	
 
        debug_log!("Binary expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = match expr.operation {
 
            BO::Concatenate => {
 
                // Arguments may be arrays/slices, output is always an array
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &ARRAY_TEMPLATE)?;
 
                let progress_arg1 = self.apply_forced_constraint(ctx, arg1_id, &ARRAYLIKE_TEMPLATE)?;
 
                let progress_arg2 = self.apply_forced_constraint(ctx, arg2_id, &ARRAYLIKE_TEMPLATE)?;
 

	
 
                // If they're all arraylike, then we want the subtype to match
 
                let (subtype_expr, subtype_arg1, subtype_arg2) =
 
@@ -1605,166 +1605,166 @@ impl TypeResolvingVisitor {
 
                (progress_expr, progress_arg_base || progress_arg1, progress_arg_base || progress_arg2)
 
            },
 
            BO::Add | BO::Subtract | BO::Multiply | BO::Divide => {
 
                // All equal of number type
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg1, progress_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg1, progress_base || progress_arg2)
 
            },
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_id); }
 
        if progress_arg2 { self.queue_expr(arg2_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> Result<(), ParseError2> {
 
    fn progress_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> Result<(), ParseError> {
 
        use UnaryOperation as UO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg_id = expr.expression;
 

	
 
        debug_log!("Unary expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg  type: {}", self.expr_types.get(&arg_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg) = match expr.operation {
 
            UO::Positive | UO::Negative => {
 
                // Equal types of numeric class
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, arg_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg)
 
            },
 
            UO::BitwiseNot | UO::PreIncrement | UO::PreDecrement | UO::PostIncrement | UO::PostDecrement => {
 
                // Equal types of integer class
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, arg_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg)
 
            },
 
            UO::LogicalNot => {
 
                // Both booleans
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                (progress_expr, progress_arg)
 
            }
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg  type [{}]: {}", progress_arg, self.expr_types.get(&arg_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg { self.queue_expr(arg_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> Result<(), ParseError2> {
 
    fn progress_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let subject_id = expr.subject;
 
        let index_id = expr.index;
 

	
 
        debug_log!("Indexing expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Subject type: {}", self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Index   type: {}", self.expr_types.get(&index_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // Make sure subject is arraylike and index is integerlike
 
        let progress_subject_base = self.apply_forced_constraint(ctx, subject_id, &ARRAYLIKE_TEMPLATE)?;
 
        let progress_index = self.apply_forced_constraint(ctx, index_id, &INTEGERLIKE_TEMPLATE)?;
 

	
 
        // Make sure if output is of T then subject is Array<T>
 
        let (progress_expr, progress_subject) =
 
            self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, subject_id, 1)?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Subject type [{}]: {}", progress_subject_base || progress_subject, self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Index   type [{}]: {}", progress_index, self.expr_types.get(&index_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_subject_base || progress_subject { self.queue_expr(subject_id); }
 
        if progress_index { self.queue_expr(index_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> Result<(), ParseError2> {
 
    fn progress_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let subject_id = expr.subject;
 
        let from_id = expr.from_index;
 
        let to_id = expr.to_index;
 

	
 
        debug_log!("Slicing expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Subject type: {}", self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - FromIdx type: {}", self.expr_types.get(&from_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - ToIdx   type: {}", self.expr_types.get(&to_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // Make sure subject is arraylike and indices are of equal integerlike
 
        let progress_subject_base = self.apply_forced_constraint(ctx, subject_id, &ARRAYLIKE_TEMPLATE)?;
 
        let progress_idx_base = self.apply_forced_constraint(ctx, from_id, &INTEGERLIKE_TEMPLATE)?;
 
        let (progress_from, progress_to) = self.apply_equal2_constraint(ctx, upcast_id, from_id, 0, to_id, 0)?;
 

	
 
        // Make sure if output is of T then subject is Array<T>
 
        let (progress_expr, progress_subject) =
 
            self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, subject_id, 1)?;
 

	
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Subject type [{}]: {}", progress_subject_base || progress_subject, self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - FromIdx type [{}]: {}", progress_idx_base || progress_from, self.expr_types.get(&from_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - ToIdx   type [{}]: {}", progress_idx_base || progress_to, self.expr_types.get(&to_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_subject_base || progress_subject { self.queue_expr(subject_id); }
 
        if progress_idx_base || progress_from { self.queue_expr(from_id); }
 
        if progress_idx_base || progress_to { self.queue_expr(to_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> Result<(), ParseError2> {
 
    fn progress_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        
 
        debug_log!("Select expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Subject type: {}", self.expr_types.get(&ctx.heap[id].subject).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let expr = &mut ctx.heap[id];
 
        let subject_id = expr.subject;
 

	
 
        fn determine_inference_type_instance<'a>(types: &'a TypeTable, infer_type: &InferenceType) -> Result<Option<&'a DefinedType>, ()> {
 
            for part in &infer_type.parts {
 
                if part.is_marker() || !part.is_concrete() {
 
                    continue;
 
                }
 

	
 
                // Part is concrete, check if it is an instance of something
 
                if let InferenceTypePart::Instance(definition_id, _num_sub) = part {
 
                    // Lookup type definition and ensure the specified field 
 
                    // name exists on the struct
 
                    let definition = types.get_base_definition(definition_id);
 
                    debug_assert!(definition.is_some());
 
                    let definition = definition.unwrap();
 

	
 
@@ -1780,89 +1780,89 @@ impl TypeResolvingVisitor {
 
        }
 

	
 
        let (progress_subject, progress_expr) = match &mut expr.field {
 
            Field::Length => {
 
                let progress_subject = self.apply_forced_constraint(ctx, subject_id, &ARRAYLIKE_TEMPLATE)?;
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 

	
 
                (progress_subject, progress_expr)
 
            },
 
            Field::Symbolic(field) => {
 
                // Retrieve the struct definition id and field index if possible 
 
                // and not previously determined
 
                if field.definition.is_none() {
 
                    // Not yet known, check if we can determine it
 
                    let subject_type = self.expr_types.get(&subject_id).unwrap();
 
                    let type_def = determine_inference_type_instance(&ctx.types, subject_type);
 

	
 
                    match type_def {
 
                        Ok(Some(type_def)) => {
 
                            // Subject type is known, check if it is a 
 
                            // struct and the field exists on the struct
 
                            let struct_def = if let DefinedTypeVariant::Struct(struct_def) = &type_def.definition {
 
                                struct_def
 
                            } else {
 
                                return Err(ParseError2::new_error(
 
                                return Err(ParseError::new_error(
 
                                    &ctx.module.source, field.identifier.position,
 
                                    &format!(
 
                                        "Can only apply field access to structs, got a subject of type '{}'",
 
                                        subject_type.display_name(&ctx.heap)
 
                                    )
 
                                ));
 
                            };
 

	
 
                            for (field_def_idx, field_def) in struct_def.fields.iter().enumerate() {
 
                                if field_def.identifier == field.identifier {
 
                                    // Set field definition and index
 
                                    field.definition = Some(type_def.ast_definition);
 
                                    field.field_idx = field_def_idx;
 
                                    break;
 
                                }
 
                            }
 

	
 
                            if field.definition.is_none() {
 
                                let field_position = field.identifier.position;
 
                                let ast_struct_def = ctx.heap[type_def.ast_definition].as_struct();
 
                                return Err(ParseError2::new_error(
 
                                return Err(ParseError::new_error(
 
                                    &ctx.module.source, field_position,
 
                                    &format!(
 
                                        "This field does not exist on the struct '{}'",
 
                                        &String::from_utf8_lossy(&ast_struct_def.identifier.value)
 
                                    )
 
                                ))
 
                            }
 

	
 
                            // Encountered definition and field index for the
 
                            // first time
 
                            self.insert_initial_select_polymorph_data(ctx, id);
 
                        },
 
                        Ok(None) => {
 
                            // Type of subject is not yet known, so we 
 
                            // cannot make any progress yet
 
                            return Ok(())
 
                        },
 
                        Err(()) => {
 
                            return Err(ParseError2::new_error(
 
                            return Err(ParseError::new_error(
 
                                &ctx.module.source, field.identifier.position,
 
                                &format!(
 
                                    "Can only apply field access to structs, got a subject of type '{}'",
 
                                    subject_type.display_name(&ctx.heap)
 
                                )
 
                            ));
 
                        }
 
                    }
 
                }
 

	
 
                // If here then field definition and index are known, and the
 
                // initial type (based on the struct's definition) has been
 
                // applied.
 
                // Check to see if we can infer anything about the subject's and
 
                // the field's polymorphic variables
 
                let poly_data = self.extra_data.get_mut(&upcast_id).unwrap();
 
                let mut poly_progress = HashSet::new();
 
                
 
                // Apply to struct's type
 
                let signature_type: *mut _ = &mut poly_data.embedded[0];
 
                let subject_type: *mut _ = self.expr_types.get_mut(&subject_id).unwrap();
 

	
 
                let (_, progress_subject) = Self::apply_equal2_signature_constraint(
 
                    ctx, upcast_id, Some(subject_id), poly_data, &mut poly_progress,
 
@@ -1896,90 +1896,90 @@ impl TypeResolvingVisitor {
 
                    poly_data, &poly_progress, signature_type, subject_type
 
                );
 

	
 
                let signature_type: *mut _ = &mut poly_data.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    poly_data, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                (progress_subject, progress_expr)
 
            }
 
        };
 

	
 
        if progress_subject { self.queue_expr(subject_id); }
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Subject type [{}]: {}", progress_subject, self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> Result<(), ParseError2> {
 
    fn progress_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let expr_elements = expr.elements.clone(); // TODO: @performance
 

	
 
        debug_log!("Array expr ({} elements): {}", expr_elements.len(), upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // All elements should have an equal type
 
        let progress = self.apply_equal_n_constraint(ctx, upcast_id, &expr_elements)?;
 
        for (progress_arg, arg_id) in progress.iter().zip(expr_elements.iter()) {
 
            if *progress_arg {
 
                self.queue_expr(*arg_id);
 
            }
 
        }
 

	
 
        // And the output should be an array of the element types
 
        let mut expr_progress = self.apply_forced_constraint(ctx, upcast_id, &ARRAY_TEMPLATE)?;
 
        if !expr_elements.is_empty() {
 
            let first_arg_id = expr_elements[0];
 
            let (inner_expr_progress, arg_progress) = self.apply_equal2_constraint(
 
                ctx, upcast_id, upcast_id, 1, first_arg_id, 0
 
            )?;
 

	
 
            expr_progress = expr_progress || inner_expr_progress;
 

	
 
            // Note that if the array type progressed the type of the arguments,
 
            // then we should enqueue this progression function again
 
            // TODO: @fix Make apply_equal_n accept a start idx as well
 
            if arg_progress { self.queue_expr(upcast_id); }
 
        }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type [{}]: {}", expr_progress, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if expr_progress { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> Result<(), ParseError2> {
 
    fn progress_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 

	
 
        debug_log!("Literal expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let progress_expr = match &expr.value {
 
            Literal::Null => {
 
                self.apply_forced_constraint(ctx, upcast_id, &MESSAGE_TEMPLATE)?
 
            },
 
            Literal::Integer(_) => {
 
                self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?
 
            },
 
            Literal::True | Literal::False => {
 
                self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?
 
            },
 
            Literal::Character(_) => todo!("character literals"),
 
            Literal::Struct(data) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
@@ -2061,49 +2061,49 @@ impl TypeResolvingVisitor {
 
                // For the return type
 
                let signature_type: *mut _ = &mut extra.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    extra, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                progress_expr
 
            }
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // TODO: FIX!!!!
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    // TODO: @cleanup, see how this can be cleaned up once I implement
 
    //  polymorphic struct/enum/union literals. These likely follow the same
 
    //  pattern as here.
 
    fn progress_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> Result<(), ParseError2> {
 
    fn progress_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 

	
 
        debug_log!("Call expr '{}': {}", match &expr.method {
 
            Method::Create => String::from("create"),
 
            Method::Fires => String::from("fires"),
 
            Method::Get => String::from("get"),
 
            Method::Put => String::from("put"),
 
            Method::Symbolic(method) => String::from_utf8_lossy(&method.identifier.value).to_string()
 
        },upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 
        debug_log!(" * During (inferring types from arguments and return type):");
 

	
 
        // Check if we can make progress using the arguments and/or return types
 
        // while keeping track of the polyvars we've extended
 
        let mut poly_progress = HashSet::new();
 
        debug_assert_eq!(extra.embedded.len(), expr.arguments.len());
 

	
 
        for (arg_idx, arg_id) in expr.arguments.clone().into_iter().enumerate() {
 
            let signature_type: *mut _ = &mut extra.embedded[arg_idx];
 
            let argument_type: *mut _ = self.expr_types.get_mut(&arg_id).unwrap();
 
            let (_, progress_arg) = Self::apply_equal2_signature_constraint(
 
@@ -2174,68 +2174,68 @@ impl TypeResolvingVisitor {
 
        }
 

	
 
        // Once more for the return type
 
        let signature_type: *mut _ = &mut extra.returned;
 
        let ret_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
        let progress_ret = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
            extra, &poly_progress, signature_type, ret_type
 
        );
 
        debug_log!(
 
            "   - Ret type | sig: {}, arg: {}", 
 
            unsafe{&*signature_type}.display_name(&ctx.heap), 
 
            unsafe{&*ret_type}.display_name(&ctx.heap)
 
        );
 
        if progress_ret {
 
            self.queue_expr_parent(ctx, upcast_id);
 
        }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> Result<(), ParseError2> {
 
    fn progress_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let var_expr = &ctx.heap[id];
 
        let var_id = var_expr.declaration.unwrap();
 

	
 
        debug_log!("Variable expr '{}': {}", &String::from_utf8_lossy(&ctx.heap[var_id].identifier().value), upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Var  type: {}", self.var_types.get(&var_id).unwrap().var_type.display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // Retrieve shared variable type and expression type and apply inference
 
        let var_data = self.var_types.get_mut(&var_id).unwrap();
 
        let expr_type = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            &mut var_data.var_type as *mut _, 0, expr_type, 0
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            let var_decl = &ctx.heap[var_id];
 
            return Err(ParseError2::new_error(
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, var_decl.position(),
 
                &format!(
 
                    "Conflicting types for this variable, previously assigned the type '{}'",
 
                    var_data.var_type.display_name(&ctx.heap)
 
                )
 
            ).with_postfixed_info(
 
                &ctx.module.source, var_expr.position,
 
                &format!(
 
                    "But inferred to have incompatible type '{}' here",
 
                    expr_type.display_name(&ctx.heap)
 
                )
 
            ))
 
        }
 

	
 
        let progress_var = infer_res.modified_lhs();
 
        let progress_expr = infer_res.modified_rhs();
 

	
 
        if progress_var {
 
            // Let other variable expressions using this type progress as well
 
            for other_expr in var_data.used_at.iter() {
 
                if *other_expr != upcast_id {
 
                    self.expr_queued.insert(*other_expr);
 
                }
 
            }
 
@@ -2247,181 +2247,181 @@ impl TypeResolvingVisitor {
 
                let var_type: *mut _ = &mut var_data.var_type;
 
                let link_data = self.var_types.get_mut(&linked_id).unwrap();
 

	
 
                debug_assert!(
 
                    unsafe{&*var_type}.parts[0] == InferenceTypePart::Input ||
 
                    unsafe{&*var_type}.parts[0] == InferenceTypePart::Output
 
                );
 
                debug_assert!(
 
                    link_data.var_type.parts[0] == InferenceTypePart::Input ||
 
                    link_data.var_type.parts[0] == InferenceTypePart::Output
 
                );
 
                match InferenceType::infer_subtree_for_single_type(&mut link_data.var_type, 1, &unsafe{&*var_type}.parts, 1) {
 
                    SingleInferenceResult::Modified => {
 
                        for other_expr in &link_data.used_at {
 
                            self.expr_queued.insert(*other_expr);
 
                        }
 
                    },
 
                    SingleInferenceResult::Unmodified => {},
 
                    SingleInferenceResult::Incompatible => {
 
                        let var_data = self.var_types.get(&var_id).unwrap();
 
                        let link_data = self.var_types.get(&linked_id).unwrap();
 
                        let var_decl = &ctx.heap[var_id];
 
                        let link_decl = &ctx.heap[linked_id];
 

	
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, var_decl.position(),
 
                            &format!(
 
                                "Conflicting types for this variable, assigned the type '{}'",
 
                                var_data.var_type.display_name(&ctx.heap)
 
                            )
 
                        ).with_postfixed_info(
 
                            &ctx.module.source, link_decl.position(),
 
                            &format!(
 
                                "Because it is incompatible with this variable, assigned the type '{}'",
 
                                link_data.var_type.display_name(&ctx.heap)
 
                            )
 
                        ));
 
                    }
 
                }
 
            }
 
        }
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Var  type [{}]: {}", progress_var, self.var_types.get(&var_id).unwrap().var_type.display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 

	
 
        Ok(())
 
    }
 

	
 
    fn queue_expr_parent(&mut self, ctx: &Ctx, expr_id: ExpressionId) {
 
        if let ExpressionParent::Expression(parent_expr_id, _) = &ctx.heap[expr_id].parent() {
 
            self.expr_queued.insert(*parent_expr_id);
 
        }
 
    }
 

	
 
    fn queue_expr(&mut self, expr_id: ExpressionId) {
 
        self.expr_queued.insert(expr_id);
 
    }
 

	
 
    /// Applies a forced type constraint: the type associated with the supplied
 
    /// expression will be molded into the provided "template". The template may
 
    /// be fully specified (e.g. a bool) or contain "inference" variables (e.g.
 
    /// an array of T)
 
    fn apply_forced_constraint(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
    ) -> Result<bool, ParseError2> {
 
    ) -> Result<bool, ParseError> {
 
        debug_assert_expr_ids_unique_and_known!(self, expr_id);
 
        let expr_type = self.expr_types.get_mut(&expr_id).unwrap();
 
        match InferenceType::infer_subtree_for_single_type(expr_type, 0, template, 0) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(
 
                self.construct_template_type_error(ctx, expr_id, template)
 
            )
 
        }
 
    }
 

	
 
    fn apply_forced_constraint_types(
 
        to_infer: *mut InferenceType, to_infer_start_idx: usize,
 
        template: &[InferenceTypePart], template_start_idx: usize
 
    ) -> Result<bool, ()> {
 
        match InferenceType::infer_subtree_for_single_type(
 
            unsafe{ &mut *to_infer }, to_infer_start_idx,
 
            template, template_start_idx
 
        ) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(()),
 
        }
 
    }
 

	
 
    /// Applies a type constraint that expects the two provided types to be
 
    /// equal. We attempt to make progress in inferring the types. If the call
 
    /// is successful then the composition of all types are made equal.
 
    /// The "parent" `expr_id` is provided to construct errors.
 
    fn apply_equal2_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg1_start_idx: usize,
 
        arg2_id: ExpressionId, arg2_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError2> {
 
    ) -> Result<(bool, bool), ParseError> {
 
        debug_assert_expr_ids_unique_and_known!(self, arg1_id, arg2_id);
 
        let arg1_type: *mut _ = self.expr_types.get_mut(&arg1_id).unwrap();
 
        let arg2_type: *mut _ = self.expr_types.get_mut(&arg2_id).unwrap();
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            arg1_type, arg1_start_idx,
 
            arg2_type, arg2_start_idx
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
        }
 

	
 
        Ok((infer_res.modified_lhs(), infer_res.modified_rhs()))
 
    }
 

	
 
    /// Applies an equal2 constraint between a signature type (e.g. a function
 
    /// argument or struct field) and an expression whose type should match that
 
    /// expression. If we make progress on the signature, then we try to see if
 
    /// any of the embedded polymorphic types can be progressed.
 
    ///
 
    /// `outer_expr_id` is the main expression we're progressing (e.g. a 
 
    /// function call), while `expr_id` is the embedded expression we're 
 
    /// matching against the signature. `expression_type` and 
 
    /// `expression_start_idx` belong to `expr_id`.
 
    fn apply_equal2_signature_constraint(
 
        ctx: &Ctx, outer_expr_id: ExpressionId, expr_id: Option<ExpressionId>,
 
        polymorph_data: &mut ExtraData, polymorph_progress: &mut HashSet<usize>,
 
        signature_type: *mut InferenceType, signature_start_idx: usize,
 
        expression_type: *mut InferenceType, expression_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError2> {
 
    ) -> Result<(bool, bool), ParseError> {
 
        // Safety: cannot mutually infer the same types
 
        //         polymorph_data containers may not be modified
 
        debug_assert_ptrs_distinct!(signature_type, expression_type);
 

	
 
        // Infer the signature and expression type
 
        let infer_res = unsafe { 
 
            InferenceType::infer_subtrees_for_both_types(
 
                signature_type, signature_start_idx,
 
                expression_type, expression_start_idx
 
            ) 
 
        };
 

	
 
        if infer_res == DualInferenceResult::Incompatible {
 
            // TODO: Check if I still need to use this
 
            let outer_position = ctx.heap[outer_expr_id].position();
 
            let (position_name, position) = match expr_id {
 
                Some(expr_id) => ("argument's", ctx.heap[expr_id].position()),
 
                None => ("type's", outer_position)
 
            };
 
            let (signature_display_type, expression_display_type) = unsafe { (
 
                (&*signature_type).display_name(&ctx.heap),
 
                (&*expression_type).display_name(&ctx.heap)
 
            ) };
 

	
 
            return Err(ParseError2::new_error(
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, outer_position,
 
                "Failed to fully resolve the types of this expression"
 
            ).with_postfixed_info(
 
                &ctx.module.source, position,
 
                &format!(
 
                    "Because the {} signature has been resolved to '{}', but the expression has been resolved to '{}'",
 
                    position_name, signature_display_type, expression_display_type
 
                )
 
            ));
 
        }
 

	
 
        // Try to see if we can progress any of the polymorphic variables
 
        let progress_sig = infer_res.modified_lhs();
 
        let progress_expr = infer_res.modified_rhs();
 

	
 
        if progress_sig {
 
            let signature_type = unsafe{&mut *signature_type};
 
            debug_assert!(
 
                signature_type.has_body_marker, 
 
                "made progress on signature type, but it doesn't have a marker"
 
            );
 
            for (poly_idx, poly_section) in signature_type.body_marker_iter() {
 
                let polymorph_type = &mut polymorph_data.poly_vars[poly_idx];
 
                match Self::apply_forced_constraint_types(
 
@@ -2482,93 +2482,93 @@ impl TypeResolvingVisitor {
 
        // If we made any progress on the signature's type, then we also need to
 
        // apply it to the expression that is supposed to match the signature.
 
        if modified_sig {
 
            match InferenceType::infer_subtree_for_single_type(
 
                expr_type, 0, &signature_type.parts, 0
 
            ) {
 
                SingleInferenceResult::Modified => true,
 
                SingleInferenceResult::Unmodified => false,
 
                SingleInferenceResult::Incompatible =>
 
                    unreachable!("encountered failure while reapplying modified signature to expression after polyvar inference")
 
            }
 
        } else {
 
            false
 
        }
 
    }
 

	
 
    /// Applies a type constraint that expects all three provided types to be
 
    /// equal. In case we can make progress in inferring the types then we
 
    /// attempt to do so. If the call is successful then the composition of all
 
    /// types is made equal.
 
    fn apply_equal3_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId,
 
        start_idx: usize
 
    ) -> Result<(bool, bool, bool), ParseError2> {
 
    ) -> Result<(bool, bool, bool), ParseError> {
 
        // Safety: all expression IDs are always distinct, and we do not modify
 
        //  the container
 
        debug_assert_expr_ids_unique_and_known!(self, expr_id, arg1_id, arg2_id);
 
        let expr_type: *mut _ = self.expr_types.get_mut(&expr_id).unwrap();
 
        let arg1_type: *mut _ = self.expr_types.get_mut(&arg1_id).unwrap();
 
        let arg2_type: *mut _ = self.expr_types.get_mut(&arg2_id).unwrap();
 

	
 
        let expr_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(expr_type, start_idx, arg1_type, start_idx)
 
        };
 
        if expr_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_expr_type_error(ctx, expr_id, arg1_id));
 
        }
 

	
 
        let args_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(arg1_type, start_idx, arg2_type, start_idx) };
 
        if args_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
        }
 

	
 
        // If all types are compatible, but the second call caused the arg1_type
 
        // to be expanded, then we must also assign this to expr_type.
 
        let mut progress_expr = expr_res.modified_lhs();
 
        let mut progress_arg1 = expr_res.modified_rhs();
 
        let progress_arg2 = args_res.modified_rhs();
 

	
 
        if args_res.modified_lhs() { 
 
            unsafe {
 
                let end_idx = InferenceType::find_subtree_end_idx(&(*arg2_type).parts, start_idx);
 
                let subtree = &((*arg2_type).parts[start_idx..end_idx]);
 
                (*expr_type).replace_subtree(start_idx, subtree);
 
            }
 
            progress_expr = true;
 
            progress_arg1 = true;
 
        }
 

	
 
        Ok((progress_expr, progress_arg1, progress_arg2))
 
    }
 

	
 
    // TODO: @optimize Since we only deal with a single type this might be done
 
    //  a lot more efficiently, methinks (disregarding the allocations here)
 
    fn apply_equal_n_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId, args: &[ExpressionId],
 
    ) -> Result<Vec<bool>, ParseError2> {
 
    ) -> Result<Vec<bool>, ParseError> {
 
        // Early exit
 
        match args.len() {
 
            0 => return Ok(vec!()),         // nothing to progress
 
            1 => return Ok(vec![false]),    // only one type, so nothing to infer
 
            _ => {}
 
        }
 

	
 
        let mut progress = Vec::new();
 
        progress.resize(args.len(), false);
 

	
 
        // Do pairwise inference, keep track of the last entry we made progress
 
        // on. Once done we need to update everything to the most-inferred type.
 
        let mut arg_iter = args.iter();
 
        let mut last_arg_id = *arg_iter.next().unwrap();
 
        let mut last_lhs_progressed = 0;
 
        let mut lhs_arg_idx = 0;
 

	
 
        while let Some(next_arg_id) = arg_iter.next() {
 
            let arg1_type: *mut _ = self.expr_types.get_mut(&last_arg_id).unwrap();
 
            let arg2_type: *mut _ = self.expr_types.get_mut(next_arg_id).unwrap();
 

	
 
            let res = unsafe {
 
                InferenceType::infer_subtrees_for_both_types(arg1_type, 0, arg2_type, 0)
 
            };
 
@@ -2588,49 +2588,49 @@ impl TypeResolvingVisitor {
 
            last_arg_id = *next_arg_id;
 
            lhs_arg_idx += 1;
 
        }
 

	
 
        // Re-infer everything. Note that we do not need to re-infer the type
 
        // exactly at `last_lhs_progressed`, but only everything up to it.
 
        let last_type: *mut _ = self.expr_types.get_mut(args.last().unwrap()).unwrap();
 
        for arg_idx in 0..last_lhs_progressed {
 
            let arg_type: *mut _ = self.expr_types.get_mut(&args[arg_idx]).unwrap();
 
            unsafe{
 
                (*arg_type).replace_subtree(0, &(*last_type).parts);
 
            }
 
            progress[arg_idx] = true;
 
        }
 

	
 
        Ok(progress)
 
    }
 

	
 
    /// Determines the `InferenceType` for the expression based on the
 
    /// expression parent. Note that if the parent is another expression, we do
 
    /// not take special action, instead we let parent expressions fix the type
 
    /// of subexpressions before they have a chance to call this function.
 
    fn insert_initial_expr_inference_type(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId
 
    ) -> Result<(), ParseError2> {
 
    ) -> Result<(), ParseError> {
 
        use ExpressionParent as EP;
 
        use InferenceTypePart as ITP;
 

	
 
        let expr = &ctx.heap[expr_id];
 
        let inference_type = match expr.parent() {
 
            EP::None =>
 
                // Should have been set by linker
 
                unreachable!(),
 
            EP::ExpressionStmt(_) | EP::Expression(_, _) =>
 
                // Determined during type inference
 
                InferenceType::new(false, false, vec![ITP::Unknown]),
 
            EP::If(_) | EP::While(_) | EP::Assert(_) =>
 
                // Must be a boolean
 
                InferenceType::new(false, true, vec![ITP::Bool]),
 
            EP::Return(_) =>
 
                // Must match the return type of the function
 
                if let DefinitionType::Function(func_id) = self.definition_type {
 
                    let return_parser_type_id = ctx.heap[func_id].return_type;
 
                    self.determine_inference_type_from_parser_type(ctx, return_parser_type_id, true)
 
                } else {
 
                    // Cannot happen: definition always set upon body traversal
 
                    // and "return" calls in components are illegal.
 
                    unreachable!();
 
                },
 
@@ -2964,128 +2964,128 @@ impl TypeResolvingVisitor {
 
                            }
 

	
 
                            infer_type.push(ITP::Instance(*definition_id, symbolic.poly_args2.len()));
 
                            let mut poly_arg_idx = symbolic.poly_args2.len();
 
                            while poly_arg_idx > 0 {
 
                                poly_arg_idx -= 1;
 
                                to_consider.push_front(symbolic.poly_args2[poly_arg_idx]);
 
                            }
 
                        }
 
                    }
 
                }
 
            }
 
        }
 

	
 
        InferenceType::new(has_markers, !has_inferred, infer_type)
 
    }
 

	
 
    /// Construct an error when an expression's type does not match. This
 
    /// happens if we infer the expression type from its arguments (e.g. the
 
    /// expression type of an addition operator is the type of the arguments)
 
    /// But the expression type was already set due to our parent (e.g. an
 
    /// "if statement" or a "logical not" always expecting a boolean)
 
    fn construct_expr_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, arg_id: ExpressionId
 
    ) -> ParseError2 {
 
    ) -> ParseError {
 
        // TODO: Expand and provide more meaningful information for humans
 
        let expr = &ctx.heap[expr_id];
 
        let arg_expr = &ctx.heap[arg_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 
        let arg_type = self.expr_types.get(&arg_id).unwrap();
 

	
 
        return ParseError2::new_error(
 
        return ParseError::new_error(
 
            &ctx.module.source, expr.position(),
 
            &format!(
 
                "Incompatible types: this expression expected a '{}'", 
 
                expr_type.display_name(&ctx.heap)
 
            )
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg_expr.position(),
 
            &format!(
 
                "But this expression yields a '{}'",
 
                arg_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_arg_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId
 
    ) -> ParseError2 {
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let arg1 = &ctx.heap[arg1_id];
 
        let arg2 = &ctx.heap[arg2_id];
 

	
 
        let arg1_type = self.expr_types.get(&arg1_id).unwrap();
 
        let arg2_type = self.expr_types.get(&arg2_id).unwrap();
 

	
 
        return ParseError2::new_error(
 
        return ParseError::new_error(
 
            &ctx.module.source, expr.position(),
 
            "Incompatible types: cannot apply this expression"
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg1.position(),
 
            &format!(
 
                "Because this expression has type '{}'",
 
                arg1_type.display_name(&ctx.heap)
 
            )
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg2.position(),
 
            &format!(
 
                "But this expression has type '{}'",
 
                arg2_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_template_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
    ) -> ParseError2 {
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 

	
 
        return ParseError2::new_error(
 
        return ParseError::new_error(
 
            &ctx.module.source, expr.position(),
 
            &format!(
 
                "Incompatible types: got a '{}' but expected a '{}'",
 
                expr_type.display_name(&ctx.heap), 
 
                InferenceType::partial_display_name(&ctx.heap, template)
 
            )
 
        )
 
    }
 

	
 
    /// Constructs a human interpretable error in the case that type inference
 
    /// on a polymorphic variable to a function call or literal construction 
 
    /// failed. This may only be caused by a pair of inference types (which may 
 
    /// come from arguments or the return type) having two different inferred 
 
    /// values for that polymorphic variable.
 
    ///
 
    /// So we find this pair and construct the error using it.
 
    ///
 
    /// We assume that the expression is a function call or a struct literal,
 
    /// and that an actual error has occurred.
 
    fn construct_poly_arg_error(
 
        ctx: &Ctx, poly_data: &ExtraData, expr_id: ExpressionId
 
    ) -> ParseError2 {
 
    ) -> ParseError {
 
        // Helper function to check for polymorph mismatch between two inference
 
        // types.
 
        fn has_poly_mismatch<'a>(type_a: &'a InferenceType, type_b: &'a InferenceType) -> Option<(usize, &'a [InferenceTypePart], &'a [InferenceTypePart])> {
 
            if !type_a.has_body_marker || !type_b.has_body_marker {
 
                return None
 
            }
 

	
 
            for (marker_a, section_a) in type_a.body_marker_iter() {
 
                for (marker_b, section_b) in type_b.body_marker_iter() {
 
                    if marker_a != marker_b {
 
                        // Not the same polymorphic variable
 
                        continue;
 
                    }
 

	
 
                    if !InferenceType::check_subtrees(section_a, 0, section_b, 0) {
 
                        // Not compatible
 
                        return Some((marker_a, section_a, section_b))
 
                    }
 
                }
 
            }
 

	
 
            None
 
        }
 

	
 
@@ -3105,75 +3105,75 @@ impl TypeResolvingVisitor {
 
                        },
 
                        Definition::Component(definition) => {
 
                            String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string()
 
                        }
 
                    };
 
                    let func_name = String::from_utf8_lossy(&symbolic.identifier.value).to_string();
 
                    (poly_var, func_name)
 
                }
 
            }
 
        }
 

	
 
        fn get_poly_var_and_type_name(ctx: &Ctx, poly_var_idx: usize, definition_id: DefinitionId) -> (String, String) {
 
            let definition = &ctx.heap[definition_id];
 
            match definition {
 
                Definition::Enum(_) | Definition::Function(_) | Definition::Component(_) =>
 
                    unreachable!("get_poly_var_and_type_name called on non-struct value"),
 
                Definition::Struct(definition) => (
 
                    String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string(),
 
                    String::from_utf8_lossy(&definition.identifier.value).to_string()
 
                ),
 
            }
 
        }
 

	
 
        // Helper function to construct initial error
 
        fn construct_main_error(ctx: &Ctx, poly_var_idx: usize, expr: &Expression) -> ParseError2 {
 
        fn construct_main_error(ctx: &Ctx, poly_var_idx: usize, expr: &Expression) -> ParseError {
 
            match expr {
 
                Expression::Call(expr) => {
 
                    let (poly_var, func_name) = get_poly_var_and_func_name(ctx, poly_var_idx, expr);
 
                    return ParseError2::new_error(
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' of '{}'",
 
                            poly_var, func_name
 
                        )
 
                    )
 
                },
 
                Expression::Literal(expr) => {
 
                    let lit_struct = expr.value.as_struct();
 
                    let (poly_var, struct_name) = get_poly_var_and_type_name(ctx, poly_var_idx, lit_struct.definition.unwrap());
 
                    return ParseError2::new_error(
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' of instantiation of '{}'",
 
                            poly_var, struct_name
 
                        )
 
                    )
 
                },
 
                Expression::Select(expr) => {
 
                    let field = expr.field.as_symbolic();
 
                    let (poly_var, struct_name) = get_poly_var_and_type_name(ctx, poly_var_idx, field.definition.unwrap());
 
                    return ParseError2::new_error(
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' while accessing field '{}' of '{}'",
 
                            poly_var, &String::from_utf8_lossy(&field.identifier.value), struct_name
 
                        )
 
                    )
 
                }
 
                _ => unreachable!("called construct_poly_arg_error without a call/literal expression")
 
            }
 
        }
 

	
 
        // Actual checking
 
        let expr = &ctx.heap[expr_id];
 
        let (expr_args, expr_return_name) = match expr {
 
            Expression::Call(expr) => 
 
                (
 
                    expr.arguments.clone(),
 
                    "return type"
 
                ),
 
            Expression::Literal(expr) => 
 
                (
 
                    expr.value.as_struct().fields
 
                        .iter()
 
                        .map(|f| f.value)
src/protocol/parser/type_table.rs
Show inline comments
 
@@ -297,49 +297,49 @@ pub(crate) struct TypeTable {
 
}
 

	
 
pub(crate) struct TypeCtx<'a> {
 
    symbols: &'a SymbolTable,
 
    heap: &'a mut Heap,
 
    modules: &'a [LexedModule]
 
}
 

	
 
impl<'a> TypeCtx<'a> {
 
    pub(crate) fn new(symbols: &'a SymbolTable, heap: &'a mut Heap, modules: &'a [LexedModule]) -> Self {
 
        Self{ symbols, heap, modules }
 
    }
 
}
 

	
 
impl TypeTable {
 
    /// Construct a new type table without any resolved types.
 
    pub(crate) fn new() -> Self {
 
        Self{ 
 
            lookup: HashMap::new(), 
 
            iter: TypeIterator::new(), 
 
            parser_type_iter: VecDeque::with_capacity(64), 
 
        }
 
    }
 

	
 
    pub(crate) fn build_base_types(&mut self, ctx: &mut TypeCtx) -> Result<(), ParseError2> {
 
    pub(crate) fn build_base_types(&mut self, ctx: &mut TypeCtx) -> Result<(), ParseError> {
 
        // Make sure we're allowed to cast root_id to index into ctx.modules
 
        debug_assert!(self.lookup.is_empty());
 
        debug_assert!(self.iter.top().is_none());
 
        debug_assert!(self.parser_type_iter.is_empty());
 

	
 
        if cfg!(debug_assertions) {
 
            for (index, module) in ctx.modules.iter().enumerate() {
 
                debug_assert_eq!(index, module.root_id.index as usize);
 
            }
 
        }
 

	
 
        // Use context to guess hashmap size
 
        let reserve_size = ctx.heap.definitions.len();
 
        self.lookup.reserve(reserve_size);
 

	
 
        // TODO: @cleanup Rework this hack
 
        for root_idx in 0..ctx.modules.len() {
 
            let last_definition_idx = ctx.heap[ctx.modules[root_idx].root_id].definitions.len();
 
            for definition_idx in 0..last_definition_idx {
 
                let definition_id = ctx.heap[ctx.modules[root_idx].root_id].definitions[definition_idx];
 
                self.resolve_base_definition(ctx, definition_id)?;
 
            }
 
        }
 

	
 
@@ -359,121 +359,121 @@ impl TypeTable {
 
    /// Instantiates a monomorph for a given base definition.
 
    pub(crate) fn add_monomorph(&mut self, definition_id: &DefinitionId, types: Vec<ConcreteType>) {
 
        debug_assert!(
 
            self.lookup.contains_key(definition_id),
 
            "attempting to instantiate monomorph of definition unknown to type table"
 
        );
 

	
 
        let definition = self.lookup.get_mut(definition_id).unwrap();
 
        definition.add_monomorph(types);
 
    }
 

	
 
    /// Checks if a given definition already has a specific monomorph
 
    pub(crate) fn has_monomorph(&mut self, definition_id: &DefinitionId, types: &Vec<ConcreteType>) -> bool {
 
        debug_assert!(
 
            self.lookup.contains_key(definition_id),
 
            "attempting to check monomorph existence of definition unknown to type table"
 
        );
 

	
 
        let definition = self.lookup.get(definition_id).unwrap();
 
        definition.has_monomorph(types)
 
    }
 

	
 
    /// This function will resolve just the basic definition of the type, it
 
    /// will not handle any of the monomorphized instances of the type.
 
    fn resolve_base_definition<'a>(&'a mut self, ctx: &mut TypeCtx, definition_id: DefinitionId) -> Result<(), ParseError2> {
 
    fn resolve_base_definition<'a>(&'a mut self, ctx: &mut TypeCtx, definition_id: DefinitionId) -> Result<(), ParseError> {
 
        // Check if we have already resolved the base definition
 
        if self.lookup.contains_key(&definition_id) { return Ok(()); }
 

	
 
        let root_id = Self::find_root_id(ctx, definition_id);
 
        self.iter.reset(root_id, definition_id);
 

	
 
        while let Some((root_id, definition_id)) = self.iter.top() {
 
            // We have a type to resolve
 
            let definition = &ctx.heap[definition_id];
 

	
 
            let can_pop_breadcrumb = match definition {
 
                // TODO: @cleanup Borrow rules hax
 
                Definition::Enum(_) => self.resolve_base_enum_definition(ctx, root_id, definition_id),
 
                Definition::Struct(_) => self.resolve_base_struct_definition(ctx, root_id, definition_id),
 
                Definition::Component(_) => self.resolve_base_component_definition(ctx, root_id, definition_id),
 
                Definition::Function(_) => self.resolve_base_function_definition(ctx, root_id, definition_id),
 
            }?;
 

	
 
            // Otherwise: `ingest_resolve_result` has pushed a new breadcrumb
 
            // that we must follow before we can resolve the current type
 
            if can_pop_breadcrumb {
 
                self.iter.pop();
 
            }
 
        }
 

	
 
        // We must have resolved the type
 
        debug_assert!(self.lookup.contains_key(&definition_id), "base type not resolved");
 
        Ok(())
 
    }
 

	
 
    /// Resolve the basic enum definition to an entry in the type table. It will
 
    /// not instantiate any monomorphized instances of polymorphic enum
 
    /// definitions. If a subtype has to be resolved first then this function
 
    /// will return `false` after calling `ingest_resolve_result`.
 
    fn resolve_base_enum_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
    fn resolve_base_enum_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_enum());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base enum already resolved");
 
        
 
        let definition = ctx.heap[definition_id].as_enum();
 

	
 
        // Check if the enum should be implemented as a classic enumeration or
 
        // a tagged union. Keep track of variant index for error messages. Make
 
        // sure all embedded types are resolved.
 
        let mut first_tag_value = None;
 
        let mut first_int_value = None;
 
        for variant in &definition.variants {
 
            match &variant.value {
 
                EnumVariantValue::None => {},
 
                EnumVariantValue::Integer(_) => if first_int_value.is_none() {
 
                    first_int_value = Some(variant.position);
 
                },
 
                EnumVariantValue::Type(variant_type_id) => {
 
                    if first_tag_value.is_none() {
 
                        first_tag_value = Some(variant.position);
 
                    }
 

	
 
                    // Check if the embedded type needs to be resolved
 
                    let resolve_result = self.resolve_base_parser_type(ctx, &definition.poly_vars, root_id, *variant_type_id)?;
 
                    if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                        return Ok(false)
 
                    }
 
                }
 
            }
 
        }
 

	
 
        if first_tag_value.is_some() && first_int_value.is_some() {
 
            // Not illegal, but useless and probably a programmer mistake
 
            let module_source = &ctx.modules[root_id.index as usize].source;
 
            let tag_pos = first_tag_value.unwrap();
 
            let int_pos = first_int_value.unwrap();
 
            return Err(
 
                ParseError2::new_error(
 
                ParseError::new_error(
 
                    module_source, definition.position,
 
                    "Illegal combination of enum integer variant(s) and enum union variant(s)"
 
                )
 
                    .with_postfixed_info(module_source, int_pos, "Assigning an integer value here")
 
                    .with_postfixed_info(module_source, tag_pos, "Embedding a type in a union variant here")
 
            );
 
        }
 

	
 
        // Enumeration is legal
 
        if first_tag_value.is_some() {
 
            // Implement as a tagged union
 

	
 
            // Determine the union variants
 
            let mut tag_value = -1;
 
            let mut variants = Vec::with_capacity(definition.variants.len());
 
            for variant in &definition.variants {
 
                tag_value += 1;
 
                let parser_type = match &variant.value {
 
                    EnumVariantValue::None => {
 
                        None
 
                    },
 
                    EnumVariantValue::Type(parser_type_id) => {
 
                        // Type should be resolvable, we checked this above
 
                        Some(*parser_type_id)
 
@@ -557,49 +557,49 @@ impl TypeTable {
 

	
 
            // Note: although we cannot have embedded type dependent on the
 
            // polymorphic variables, they might still be present as tokens
 
            let definition_id = definition.this.upcast();
 
            self.lookup.insert(definition_id, DefinedType {
 
                ast_root: root_id,
 
                ast_definition: definition_id,
 
                definition: DefinedTypeVariant::Enum(EnumType{
 
                    variants,
 
                    representation: Self::enum_tag_type(min_enum_value, max_enum_value)
 
                }),
 
                poly_vars: self.create_initial_poly_vars(&definition.poly_vars),
 
                is_polymorph: false,
 
                is_pointerlike: false,
 
                monomorphs: Vec::new()
 
            });
 
        }
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic struct definition to an entry in the type table. It
 
    /// will not instantiate any monomorphized instances of polymorphic struct
 
    /// definitions.
 
    fn resolve_base_struct_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
    fn resolve_base_struct_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_struct());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base struct already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_struct();
 

	
 
        // Make sure all fields point to resolvable types
 
        for field_definition in &definition.fields {
 
            let resolve_result = self.resolve_base_parser_type(ctx, &definition.poly_vars, root_id, field_definition.parser_type)?;
 
            if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                return Ok(false)
 
            }
 
        }
 

	
 
        // All fields types are resolved, construct base type
 
        let mut fields = Vec::with_capacity(definition.fields.len());
 
        for field_definition in &definition.fields {
 
            fields.push(StructField{
 
                identifier: field_definition.field.clone(),
 
                parser_type: field_definition.parser_type,
 
            })
 
        }
 

	
 
        // And make sure no conflicts exist in field names and/or polymorphic args
 
        self.check_identifier_collision(
 
@@ -612,49 +612,49 @@ impl TypeTable {
 
        for field in &fields {
 
            self.check_and_resolve_embedded_type_and_modify_poly_args(ctx, definition_id, &mut poly_args, root_id, field.parser_type)?;
 
        }
 

	
 
        let is_polymorph = poly_args.iter().any(|arg| arg.is_in_use);
 

	
 
        self.lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Struct(StructType{
 
                fields,
 
            }),
 
            poly_vars: poly_args,
 
            is_polymorph,
 
            is_pointerlike: false, // TODO: @cyclic
 
            monomorphs: Vec::new(),
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic function definition to an entry in the type table. It
 
    /// will not instantiate any monomorphized instances of polymorphic function
 
    /// definitions.
 
    fn resolve_base_function_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
    fn resolve_base_function_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_function());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base function already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_function();
 
        let return_type = definition.return_type;
 

	
 
        // Check the return type
 
        let resolve_result = self.resolve_base_parser_type(
 
            ctx, &definition.poly_vars, root_id, definition.return_type
 
        )?;
 
        if !self.ingest_resolve_result(ctx, resolve_result)? {
 
            return Ok(false)
 
        }
 

	
 
        // Check the argument types
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            let resolve_result = self.resolve_base_parser_type(
 
                ctx, &definition.poly_vars, root_id, param.parser_type
 
            )?;
 
            if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                return Ok(false)
 
            }
 
        }
 
@@ -684,49 +684,49 @@ impl TypeTable {
 
        }
 

	
 
        let is_polymorph = poly_args.iter().any(|arg| arg.is_in_use);
 

	
 
        // Construct entry in type table
 
        self.lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Function(FunctionType{
 
                return_type,
 
                arguments,
 
            }),
 
            poly_vars: poly_args,
 
            is_polymorph,
 
            is_pointerlike: false, // TODO: @cyclic
 
            monomorphs: Vec::new(),
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic component definition to an entry in the type table.
 
    /// It will not instantiate any monomorphized instancees of polymorphic
 
    /// component definitions.
 
    fn resolve_base_component_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
    fn resolve_base_component_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_component());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base component already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_component();
 
        let component_variant = definition.variant;
 

	
 
        // Check argument types
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            let resolve_result = self.resolve_base_parser_type(
 
                ctx, &definition.poly_vars, root_id, param.parser_type
 
            )?;
 
            if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                return Ok(false)
 
            }
 
        }
 

	
 
        // Construct argument types
 
        let mut arguments = Vec::with_capacity(definition.parameters.len());
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            arguments.push(FunctionArgument{
 
                identifier: param.identifier.clone(),
 
                parser_type: param.parser_type
 
@@ -748,207 +748,207 @@ impl TypeTable {
 
        let is_polymorph = poly_args.iter().any(|v| v.is_in_use);
 

	
 
        // Construct entry in type table
 
        self.lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Component(ComponentType{
 
                variant: component_variant,
 
                arguments,
 
            }),
 
            poly_vars: poly_args,
 
            is_polymorph,
 
            is_pointerlike: false, // TODO: @cyclic
 
            monomorphs: Vec::new(),
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Takes a ResolveResult and returns `true` if the caller can happily
 
    /// continue resolving its current type, or `false` if the caller must break
 
    /// resolving the current type and exit to the outer resolving loop. In the
 
    /// latter case the `result` value was `ResolveResult::Unresolved`, implying
 
    /// that the type must be resolved first.
 
    fn ingest_resolve_result(&mut self, ctx: &TypeCtx, result: ResolveResult) -> Result<bool, ParseError2> {
 
    fn ingest_resolve_result(&mut self, ctx: &TypeCtx, result: ResolveResult) -> Result<bool, ParseError> {
 
        match result {
 
            ResolveResult::BuiltIn | ResolveResult::PolyArg(_) => Ok(true),
 
            ResolveResult::Resolved(_) => Ok(true),
 
            ResolveResult::Unresolved((root_id, definition_id)) => {
 
                if self.iter.contains(root_id, definition_id) {
 
                    // Cyclic dependency encountered
 
                    // TODO: Allow this
 
                    let mut error = ParseError2::new_error(
 
                    let mut error = ParseError::new_error(
 
                        &ctx.modules[root_id.index as usize].source, ctx.heap[definition_id].position(),
 
                        "Evaluating this type definition results in a cyclic type"
 
                    );
 

	
 
                    for (breadcrumb_idx, (root_id, definition_id)) in self.iter.breadcrumbs.iter().enumerate() {
 
                        let msg = if breadcrumb_idx == 0 {
 
                            "The cycle started with this definition"
 
                        } else {
 
                            "Which depends on this definition"
 
                        };
 

	
 
                        error = error.with_postfixed_info(
 
                            &ctx.modules[root_id.index as usize].source,
 
                            ctx.heap[*definition_id].position(), msg
 
                        );
 
                    }
 

	
 
                    Err(error)
 
                } else {
 
                    // Type is not yet resolved, so push IDs on iterator and
 
                    // continue the resolving loop
 
                    self.iter.push(root_id, definition_id);
 
                    Ok(false)
 
                }
 
            }
 
        }
 
    }
 

	
 
    /// Each type definition may consist of several embedded subtypes. This
 
    /// function checks whether that embedded type is a builtin, a direct
 
    /// reference to a polymorphic argument, or an (un)resolved type definition.
 
    /// If the embedded type's symbol cannot be found then this function returns
 
    /// an error.
 
    ///
 
    /// If the embedded type is resolved, then one always receives the type's
 
    /// (module, definition) tuple. If any of the types in the embedded type's
 
    /// tree is not yet resolved, then one may receive a (module, definition)
 
    /// tuple that does not correspond to the `parser_type_id` passed into this
 
    /// function.
 
    fn resolve_base_parser_type(&mut self, ctx: &TypeCtx, poly_vars: &Vec<Identifier>, root_id: RootId, parser_type_id: ParserTypeId) -> Result<ResolveResult, ParseError2> {
 
    fn resolve_base_parser_type(&mut self, ctx: &TypeCtx, poly_vars: &Vec<Identifier>, root_id: RootId, parser_type_id: ParserTypeId) -> Result<ResolveResult, ParseError> {
 
        use ParserTypeVariant as PTV;
 

	
 
        // Prepping iterator
 
        self.parser_type_iter.clear();
 
        self.parser_type_iter.push_back(parser_type_id);
 

	
 
        // Result for the very first time we resolve a
 
        let mut resolve_result = None;
 
        let mut set_resolve_result = |v: ResolveResult| {
 
            if resolve_result.is_none() { resolve_result = Some(v); }
 
        };
 

	
 
        'resolve_loop: while let Some(parser_type_id) = self.parser_type_iter.pop_back() {
 
            let parser_type = &ctx.heap[parser_type_id];
 

	
 
            match &parser_type.variant {
 
                // Builtin types. An array is a builtin as it is implemented as a
 
                // couple of pointers, so we do not require the subtype to be fully
 
                // resolved. Similar for input/output ports.
 
                PTV::Array(_) | PTV::Input(_) | PTV::Output(_) | PTV::Message |
 
                PTV::Bool | PTV::Byte | PTV::Short | PTV::Int | PTV::Long |
 
                PTV::String => {
 
                    set_resolve_result(ResolveResult::BuiltIn);
 
                },
 
                // IntegerLiteral types and the inferred marker are not allowed in
 
                // definitions of types
 
                PTV::IntegerLiteral |
 
                PTV::Inferred => {
 
                    debug_assert!(false, "Encountered illegal ParserTypeVariant within type definition");
 
                    unreachable!();
 
                },
 
                // Symbolic type, make sure its base type, and the base types
 
                // of all members of the embedded type tree are resolved. We
 
                // don't care about monomorphs yet.
 
                PTV::Symbolic(symbolic) => {
 
                    // Check if the symbolic type is one of the definition's
 
                    // polymorphic arguments. If so then we can halt the
 
                    // execution
 
                    for (poly_arg_idx, poly_arg) in poly_vars.iter().enumerate() {
 
                        if symbolic.identifier.matches_identifier(poly_arg) {
 
                            set_resolve_result(ResolveResult::PolyArg(poly_arg_idx));
 
                            continue 'resolve_loop;
 
                        }
 
                    }
 

	
 
                    // Lookup the definition in the symbol table
 
                    let (symbol, mut ident_iter) = ctx.symbols.resolve_namespaced_identifier(root_id, &symbolic.identifier);
 
                    if symbol.is_none() {
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            &ctx.modules[root_id.index as usize].source, symbolic.identifier.position,
 
                            "Could not resolve type"
 
                        ))
 
                    }
 

	
 
                    let symbol_value = symbol.unwrap();
 
                    let module_source = &ctx.modules[root_id.index as usize].source;
 

	
 
                    match symbol_value.symbol {
 
                        Symbol::Namespace(_) => {
 
                            // Reference to a namespace instead of a type
 
                            let last_ident = ident_iter.prev();
 
                            return if ident_iter.num_remaining() == 0 {
 
                                // Could also have polymorphic args, but we 
 
                                // don't care, just throw this error: 
 
                                Err(ParseError2::new_error(
 
                                Err(ParseError::new_error(
 
                                    module_source, symbolic.identifier.position,
 
                                    "Expected a type, got a module name"
 
                                ))
 
                            } else if last_ident.is_some() && last_ident.map(|(_, poly_args)| poly_args.is_some()).unwrap() {
 
                                // Halted at a namespaced because we encountered
 
                                // polymorphic arguments
 
                                Err(ParseError2::new_error(
 
                                Err(ParseError::new_error(
 
                                    module_source, symbolic.identifier.position,
 
                                    "Illegal specification of polymorphic arguments to a module name"
 
                                ))
 
                            } else {
 
                                // Impossible (with the current implementation 
 
                                // of the symbol table)
 
                                unreachable!(
 
                                    "Got namespace symbol with {} returned symbols from {}",
 
                                    ident_iter.num_returned(),
 
                                    &String::from_utf8_lossy(&symbolic.identifier.value)
 
                                );
 
                            }
 
                        },
 
                        Symbol::Definition((root_id, definition_id)) => {
 
                            let definition = &ctx.heap[definition_id];
 
                            if ident_iter.num_remaining() > 0 {
 
                                // Namespaced identifier is longer than the type
 
                                // we found. Return the appropriate message
 
                                return if definition.is_struct() || definition.is_enum() {
 
                                    Err(ParseError2::new_error(
 
                                    Err(ParseError::new_error(
 
                                        module_source, symbolic.identifier.position,
 
                                        &format!(
 
                                            "Unknown type '{}', did you mean to use '{}'?",
 
                                            String::from_utf8_lossy(&symbolic.identifier.value),
 
                                            String::from_utf8_lossy(&definition.identifier().value)
 
                                        )
 
                                    ))
 
                                } else {
 
                                    Err(ParseError2::new_error(
 
                                    Err(ParseError::new_error(
 
                                        module_source, symbolic.identifier.position,
 
                                        "Unknown datatype"
 
                                    ))
 
                                }
 
                            }
 

	
 
                            // Found a match, make sure it is a datatype
 
                            if !(definition.is_struct() || definition.is_enum()) {
 
                                return Err(ParseError2::new_error(
 
                                return Err(ParseError::new_error(
 
                                    module_source, symbolic.identifier.position,
 
                                    "Embedded types must be datatypes (structs or enums)"
 
                                ))
 
                            }
 

	
 
                            // Found a struct/enum definition
 
                            if !self.lookup.contains_key(&definition_id) {
 
                                // Type is not yet resoled, immediately return
 
                                // this
 
                                return Ok(ResolveResult::Unresolved((root_id, definition_id)));
 
                            }
 

	
 
                            // Type is resolved, so set as result, but continue
 
                            // iterating over the parser types in the embedded
 
                            // type's tree
 
                            set_resolve_result(ResolveResult::Resolved((root_id, definition_id)));
 

	
 
                            // Note: because we're resolving parser types, not
 
                            // embedded types, we're parsing a tree, so we can't
 
                            // get stuck in a cyclic loop.
 
                            let last_ident = ident_iter.prev();
 
                            if let Some((_, Some(poly_args))) = last_ident {
 
                                for poly_arg_type_id in poly_args {
 
                                    self.parser_type_iter.push_back(*poly_arg_type_id);
 
@@ -968,49 +968,49 @@ impl TypeTable {
 
    fn create_initial_poly_vars(&self, poly_args: &[Identifier]) -> Vec<PolyVar> {
 
        poly_args
 
            .iter()
 
            .map(|v| PolyVar{ identifier: v.clone(), is_in_use: false })
 
            .collect()
 
    }
 

	
 
    /// This function modifies the passed `poly_args` by checking the embedded
 
    /// type tree. This should be called after `resolve_base_parser_type` is
 
    /// called on each node in this tree: we assume that each symbolic type was
 
    /// resolved to either a polymorphic arg or a definition.
 
    ///
 
    /// This function will also make sure that if the embedded type has
 
    /// polymorphic variables itself, that the number of polymorphic variables
 
    /// matches the number of arguments in the associated definition.
 
    ///
 
    /// Finally, for all embedded types (which includes function/component 
 
    /// arguments and return types) in type definitions we will modify the AST
 
    /// when the embedded type is a polymorphic variable or points to another
 
    /// user-defined type.
 
    fn check_and_resolve_embedded_type_and_modify_poly_args(
 
        &mut self, ctx: &mut TypeCtx, 
 
        type_definition_id: DefinitionId, poly_args: &mut [PolyVar], 
 
        root_id: RootId, embedded_type_id: ParserTypeId,
 
    ) -> Result<(), ParseError2> {
 
    ) -> Result<(), ParseError> {
 
        use ParserTypeVariant as PTV;
 

	
 
        self.parser_type_iter.clear();
 
        self.parser_type_iter.push_back(embedded_type_id);
 

	
 
        'type_loop: while let Some(embedded_type_id) = self.parser_type_iter.pop_back() {
 
            let embedded_type = &mut ctx.heap[embedded_type_id];
 

	
 
            match &mut embedded_type.variant {
 
                PTV::Message | PTV::Bool | 
 
                PTV::Byte | PTV::Short | PTV::Int | PTV::Long |
 
                PTV::String => {
 
                    // Builtins, no modification/iteration required
 
                },
 
                PTV::IntegerLiteral | PTV::Inferred => {
 
                    // TODO: @hack Allowed for now so we can continue testing 
 
                    //  the parser/lexer
 
                    // debug_assert!(false, "encountered illegal parser type during ParserType/PolyArg modification");
 
                    // unreachable!();
 
                },
 
                PTV::Array(subtype_id) |
 
                PTV::Input(subtype_id) |
 
                PTV::Output(subtype_id) => {
 
                    // Outer type is fixed, but inner type might be symbolic
 
@@ -1044,120 +1044,120 @@ impl TypeTable {
 

	
 
                    if cfg!(debug_assertions) {
 
                        // Everything here should already be checked in 
 
                        // `resolve_base_parser_type`.
 
                        let type_class = defined_type.definition.type_class();
 
                        debug_assert!(
 
                            type_class == TypeClass::Struct || type_class == TypeClass::Enum || type_class == TypeClass::Union,
 
                            "embedded type's class is not struct, enum or union"
 
                        );
 
                        debug_assert_eq!(poly_args.len(), symbolic.identifier.poly_args.len());
 
                    }
 
    
 
                    if poly_args.len() != defined_type.poly_vars.len() {
 
                        // Mismatch in number of polymorphic arguments. This is 
 
                        // not allowed in type definitions (no inference is 
 
                        // allowed within type definitions, only in bodies of
 
                        // functions/components).
 
                        let module_source = &ctx.modules[root_id.index as usize].source;
 
                        let number_args_msg = if defined_type.poly_vars.is_empty() {
 
                            String::from("is not polymorphic")
 
                        } else {
 
                            format!("accepts {} polymorphic arguments", defined_type.poly_vars.len())
 
                        };
 
    
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            module_source, symbolic.identifier.position,
 
                            &format!(
 
                                "The type '{}' {}, but {} polymorphic arguments were provided",
 
                                String::from_utf8_lossy(&symbolic.identifier.strip_poly_args()),
 
                                number_args_msg, poly_args.len()
 
                            )
 
                        ));
 
                    }
 
    
 
                    self.parser_type_iter.extend(poly_args);
 
                    debug_assert!(symbolic.variant.is_none(), "symbolic parser type's variant already resolved");
 
                    symbolic.variant = Some(SymbolicParserTypeVariant::Definition(definition_id));
 
                }
 
            }
 
        }
 

	
 
        // All nodes in the embedded type tree were valid
 
        Ok(())
 
    }
 

	
 
    /// Go through a list of identifiers and ensure that all identifiers have
 
    /// unique names
 
    fn check_identifier_collision<T: Sized, F: Fn(&T) -> &Identifier>(
 
        &self, ctx: &TypeCtx, root_id: RootId, items: &[T], getter: F, item_name: &'static str
 
    ) -> Result<(), ParseError2> {
 
    ) -> Result<(), ParseError> {
 
        for (item_idx, item) in items.iter().enumerate() {
 
            let item_ident = getter(item);
 
            for other_item in &items[0..item_idx] {
 
                let other_item_ident = getter(other_item);
 
                if item_ident == other_item_ident {
 
                    let module_source = &ctx.modules[root_id.index as usize].source;
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        module_source, item_ident.position, &format!("This {} is defined more than once", item_name)
 
                    ).with_postfixed_info(
 
                        module_source, other_item_ident.position, &format!("The other {} is defined here", item_name)
 
                    ));
 
                }
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    /// Go through a list of polymorphic arguments and make sure that the
 
    /// arguments all have unique names, and the arguments do not conflict with
 
    /// any symbols defined at the module scope.
 
    fn check_poly_args_collision(
 
        &self, ctx: &TypeCtx, root_id: RootId, poly_args: &[Identifier]
 
    ) -> Result<(), ParseError2> {
 
    ) -> Result<(), ParseError> {
 
        // Make sure polymorphic arguments are unique and none of the
 
        // identifiers conflict with any imported scopes
 
        for (arg_idx, poly_arg) in poly_args.iter().enumerate() {
 
            for other_poly_arg in &poly_args[..arg_idx] {
 
                if poly_arg == other_poly_arg {
 
                    let module_source = &ctx.modules[root_id.index as usize].source;
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        module_source, poly_arg.position,
 
                        "This polymorphic argument is defined more than once"
 
                    ).with_postfixed_info(
 
                        module_source, other_poly_arg.position,
 
                        "It conflicts with this polymorphic argument"
 
                    ));
 
                }
 
            }
 

	
 
            // Check if identifier conflicts with a symbol defined or imported
 
            // in the current module
 
            if let Some(symbol) = ctx.symbols.resolve_symbol(root_id, &poly_arg.value) {
 
                // We have a conflict
 
                let module_source = &ctx.modules[root_id.index as usize].source;
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    module_source, poly_arg.position,
 
                    "This polymorphic argument conflicts with another symbol"
 
                ).with_postfixed_info(
 
                    module_source, symbol.position,
 
                    "It conflicts due to this symbol"
 
                ));
 
            }
 
        }
 

	
 
        // All arguments are fine
 
        Ok(())
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Small utilities
 
    //--------------------------------------------------------------------------
 

	
 
    fn enum_tag_type(min_tag_value: i64, max_tag_value: i64) -> PrimitiveType {
 
        // TODO: @consistency tag values should be handled correctly
 
        debug_assert!(min_tag_value < max_tag_value);
 
        let abs_max_value = min_tag_value.abs().max(max_tag_value.abs());
 
        if abs_max_value <= u8::max_value() as i64 {
 
            PrimitiveType::Byte
 
        } else if abs_max_value <= u16::max_value() as i64 {
src/protocol/parser/utils.rs
Show inline comments
 
    use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use super::symbol_table::*;
 
use super::type_table::*;
 

	
 
/// Utility result type.
 
pub(crate) enum FindTypeResult<'t, 'i> {
 
    // Found the type exactly
 
    Found((&'t DefinedType, NamespacedIdentifierIter<'i>)),
 
    // Could not match symbol
 
    SymbolNotFound{ident_pos: InputPosition},
 
    // Matched part of the namespaced identifier, but not completely
 
    SymbolPartial{ident_pos: InputPosition, ident_iter: NamespacedIdentifierIter<'i>},
 
    // Symbol matched, but points to a namespace/module instead of a type
 
    SymbolNamespace{ident_pos: InputPosition, symbol_pos: InputPosition},
 
}
 

	
 
// TODO: @cleanup Find other uses of this pattern
 
impl<'t, 'i> FindTypeResult<'t, 'i> {
 
    /// Utility function to transform the `FindTypeResult` into a `Result` where
 
    /// `Ok` contains the resolved type, and `Err` contains a `ParseError` which
 
    /// can be readily returned. This is the most common use.
 
    pub(crate) fn as_parse_error(self, module_source: &InputSource) -> Result<(&'t DefinedType, NamespacedIdentifierIter<'i>), ParseError2> {
 
    pub(crate) fn as_parse_error(self, module_source: &InputSource) -> Result<(&'t DefinedType, NamespacedIdentifierIter<'i>), ParseError> {
 
        match self {
 
            FindTypeResult::Found(defined_type) => Ok(defined_type),
 
            FindTypeResult::SymbolNotFound{ident_pos} => {
 
                Err(ParseError2::new_error(
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos,
 
                    "Could not resolve this identifier to a symbol"
 
                ))
 
            },
 
            FindTypeResult::SymbolPartial{ident_pos, ident_iter} => {
 
                Err(ParseError2::new_error(
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos, 
 
                    &format!(
 
                        "Could not fully resolve this identifier to a symbol, was only able to match '{}'",
 
                        &String::from_utf8_lossy(ident_iter.returned_section())
 
                    )
 
                ))
 
            },
 
            FindTypeResult::SymbolNamespace{ident_pos, symbol_pos} => {
 
                Err(ParseError2::new_error(
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos,
 
                    "This identifier was resolved to a namespace instead of a type"
 
                ).with_postfixed_info(
 
                    module_source, symbol_pos,
 
                    "This is the referenced namespace"
 
                ))
 
            }
 
        }
 
    }
 
}
 

	
 
/// Attempt to find the type pointer to by a (root, identifier) combination. The
 
/// type must match exactly (no parts in the namespace iterator remaining) and
 
/// must be a type, not a namespace. 
 
pub(crate) fn find_type_definition<'t, 'i>(
 
    symbols: &SymbolTable, types: &'t TypeTable, 
 
    root_id: RootId, identifier: &'i NamespacedIdentifier
 
) -> FindTypeResult<'t, 'i> {
 
    // Lookup symbol
 
    let (symbol, ident_iter) = symbols.resolve_namespaced_identifier(root_id, identifier);
 
    if symbol.is_none() { 
 
        return FindTypeResult::SymbolNotFound{ident_pos: identifier.position};
 
    }
 
    
 
@@ -77,76 +77,76 @@ pub(crate) fn find_type_definition<'t, 'i>(
 
        Symbol::Namespace(_) => {
 
            FindTypeResult::SymbolNamespace{
 
                ident_pos: identifier.position, 
 
                symbol_pos: symbol.position
 
            }
 
        },
 
        Symbol::Definition((_, definition_id)) => {
 
            // If this function is called correctly, then we should always be
 
            // able to match the definition's ID to an entry in the type table.
 
            let definition = types.get_base_definition(&definition_id);
 
            debug_assert!(definition.is_some());
 
            FindTypeResult::Found((definition.unwrap(), ident_iter))
 
        }
 
    }
 
}
 

	
 
pub(crate) enum MatchPolymorphResult<'t> {
 
    Matching,
 
    InferAll(usize),
 
    Mismatch{defined_type: &'t DefinedType, ident_position: InputPosition, num_specified: usize},
 
    NoneExpected{defined_type: &'t DefinedType, ident_position: InputPosition},
 
}
 

	
 
impl<'t> MatchPolymorphResult<'t> {
 
    pub(crate) fn as_parse_error(self, heap: &Heap, module_source: &InputSource) -> Result<usize, ParseError2> {
 
    pub(crate) fn as_parse_error(self, heap: &Heap, module_source: &InputSource) -> Result<usize, ParseError> {
 
        match self {
 
            MatchPolymorphResult::Matching => Ok(0),
 
            MatchPolymorphResult::InferAll(count) => {
 
                debug_assert!(count > 0);
 
                Ok(count)
 
            },
 
            MatchPolymorphResult::Mismatch{defined_type, ident_position, num_specified} => {
 
                let type_identifier = heap[defined_type.ast_definition].identifier();
 
                let args_name = if defined_type.poly_vars.len() == 1 {
 
                    "argument"
 
                } else {
 
                    "arguments"
 
                };
 

	
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    module_source, ident_position,
 
                    &format!(
 
                        "expected {} polymorphic {} (or none, to infer them) for the type {}, but {} were specified",
 
                        defined_type.poly_vars.len(), args_name, 
 
                        &String::from_utf8_lossy(&type_identifier.value),
 
                        num_specified
 
                    )
 
                ))
 
            },
 
            MatchPolymorphResult::NoneExpected{defined_type, ident_position, ..} => {
 
                let type_identifier = heap[defined_type.ast_definition].identifier();
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    module_source, ident_position,
 
                    &format!(
 
                        "the type {} is not polymorphic",
 
                        &String::from_utf8_lossy(&type_identifier.value)
 
                    )
 
                ))
 
            }
 
        }
 
    }
 
}
 

	
 
/// Attempt to match the polymorphic arguments to the number of polymorphic
 
/// variables in the definition.
 
pub(crate) fn match_polymorphic_args_to_vars<'t>(
 
    defined_type: &'t DefinedType, poly_args: Option<&[ParserTypeId]>, ident_position: InputPosition
 
) -> MatchPolymorphResult<'t> {
 
    if defined_type.poly_vars.is_empty() {
 
        // No polymorphic variables on type
 
        if poly_args.is_some() {
 
            return MatchPolymorphResult::NoneExpected{
 
                defined_type,
 
                ident_position, 
 
            };
 
        }
src/protocol/parser/visitor.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::parser::{symbol_table::*, type_table::*, LexedModule};
 

	
 
type Unit = ();
 
pub(crate) type VisitorResult = Result<Unit, ParseError2>;
 
pub(crate) type VisitorResult = Result<Unit, ParseError>;
 

	
 
/// Globally configured vector capacity for statement buffers in visitor 
 
/// implementations
 
pub(crate) const STMT_BUFFER_INIT_CAPACITY: usize = 256;
 
/// Globally configured vector capacity for expression buffers in visitor
 
/// implementations
 
pub(crate) const EXPR_BUFFER_INIT_CAPACITY: usize = 256;
 
/// Globally configured vector capacity for parser type buffers in visitor
 
/// implementations
 
pub(crate) const TYPE_BUFFER_INIT_CAPACITY: usize = 128;
 

	
 
/// General context structure that is used while traversing the AST.
 
pub(crate) struct Ctx<'p> {
 
    pub heap: &'p mut Heap,
 
    pub module: &'p LexedModule,
 
    pub symbols: &'p mut SymbolTable,
 
    pub types: &'p mut TypeTable,
 
}
 

	
 
/// Visitor is a generic trait that will fully walk the AST. The default
 
/// implementation of the visitors is to not recurse. The exception is the
 
/// top-level `visit_definition`, `visit_stmt` and `visit_expr` methods, which
 
/// call the appropriate visitor function.
 
pub(crate) trait Visitor2 {

Changeset was too big and was cut off... Show full diff anyway

0 comments (0 inline, 0 general)