Changeset - c87205ed6292
[Not reviewed]
0 10 0
MH - 4 years ago 2021-04-02 14:59:36
contact@maxhenger.nl
cleanup consume_type function name and rename ParseError
10 files changed with 224 insertions and 230 deletions:
0 comments (0 inline, 0 general)
src/protocol/inputsource.rs
Show inline comments
 
use std::fmt;
 
use std::fs::File;
 
use std::io;
 
use std::path::Path;
 

	
 
use backtrace::Backtrace;
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct InputSource {
 
    pub(crate) filename: String,
 
    pub(crate) input: Vec<u8>,
 
    line: usize,
 
    column: usize,
 
    offset: usize,
 
}
 

	
 
static STD_LIB_PDL: &'static [u8] = b"
 
primitive forward(in<msg> i, out<msg> o) {
 
    while(true) synchronous put(o, get(i));
 
}
 
primitive sync(in<msg> i, out<msg> o) {
 
    while(true) synchronous if(fires(i)) put(o, get(i));
 
}
 
primitive alternator(in<msg> i, out<msg> l, out<msg> r) {
 
    while(true) {
 
        synchronous if(fires(i)) put(l, get(i));
 
        synchronous if(fires(i)) put(r, get(i));
 
    }
 
}
 
primitive replicator(in<msg> i, out<msg> l, out<msg> r) {
 
    while(true) synchronous {
 
        if(fires(i)) {
 
            msg m = get(i);
 
            put(l, m);
 
            put(r, m);
 
        }
 
    }
 
}
 
primitive merger(in<msg> l, in<msg> r, out<msg> o) {
 
    while(true) synchronous {
 
        if(fires(l))      put(o, get(l));
 
        else if(fires(r)) put(o, get(r));
 
    }
 
}
 
";
 

	
 
impl InputSource {
 
    // Constructors
 
    pub fn new<R: io::Read, S: ToString>(filename: S, reader: &mut R) -> io::Result<InputSource> {
 
        let mut vec = Vec::new();
 
        reader.read_to_end(&mut vec)?;
 
        vec.extend(STD_LIB_PDL.to_vec());
 
        Ok(InputSource {
 
            filename: filename.to_string(),
 
            input: vec,
 
            line: 1,
 
            column: 1,
 
            offset: 0,
 
        })
 
    }
 
    // Constructor helpers
 
    pub fn from_file(path: &Path) -> io::Result<InputSource> {
 
        let filename = path.file_name();
 
        match filename {
 
            Some(filename) => {
 
                let mut f = File::open(path)?;
 
                InputSource::new(filename.to_string_lossy(), &mut f)
 
            }
 
            None => Err(io::Error::new(io::ErrorKind::NotFound, "Invalid path")),
 
        }
 
    }
 
    pub fn from_string(string: &str) -> io::Result<InputSource> {
 
        let buffer = Box::new(string);
 
        let mut bytes = buffer.as_bytes();
 
        InputSource::new(String::new(), &mut bytes)
 
    }
 
    pub fn from_buffer(buffer: &[u8]) -> io::Result<InputSource> {
 
        InputSource::new(String::new(), &mut Box::new(buffer))
 
    }
 
    // Internal methods
 
    pub fn pos(&self) -> InputPosition {
 
        InputPosition { line: self.line, column: self.column, offset: self.offset }
 
    }
 
    pub fn seek(&mut self, pos: InputPosition) {
 
        debug_assert!(pos.offset < self.input.len());
 
        self.line = pos.line;
 
        self.column = pos.column;
 
        self.offset = pos.offset;
 
    }
 
    // pub fn error<S: ToString>(&self, message: S) -> ParseError {
 
    //     self.pos().parse_error(message)
 
    // }
 
    pub fn is_eof(&self) -> bool {
 
        self.next() == None
 
    }
 

	
 
    pub fn next(&self) -> Option<u8> {
 
        if self.offset < self.input.len() {
 
            Some(self.input[self.offset])
 
        } else {
 
            None
 
        }
 
    }
 

	
 
    pub fn lookahead(&self, pos: usize) -> Option<u8> {
 
        let offset_pos = self.offset + pos;
 
        if offset_pos < self.input.len() {
 
            Some(self.input[offset_pos])
 
        } else {
 
            None
 
        }
 
    }
 

	
 
    pub fn has(&self, to_compare: &[u8]) -> bool {
 
        if self.offset + to_compare.len() <= self.input.len() {
 
            for idx in 0..to_compare.len() {
 
                if to_compare[idx] != self.input[self.offset + idx] {
 
                    return false;
 
                }
 
            }
 

	
 
            true
 
        } else {
 
            false
 
        }
 
    }
 

	
 
    pub fn consume(&mut self) {
 
        match self.next() {
 
            Some(x) if x == b'\r' && self.lookahead(1) != Some(b'\n') || x == b'\n' => {
 
                self.line += 1;
 
                self.offset += 1;
 
                self.column = 1;
 
            }
 
            Some(_) => {
 
                self.offset += 1;
 
                self.column += 1;
 
            }
 
            None => {}
 
        }
 
    }
 
}
 

	
 
impl fmt::Display for InputSource {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        self.pos().fmt(f)
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
 
pub struct InputPosition {
 
    line: usize,
 
    column: usize,
 
    pub(crate) offset: usize,
 
}
 

	
 
impl InputPosition {
 
    fn context<'a>(&self, source: &'a InputSource) -> &'a [u8] {
 
        let start = self.offset - (self.column - 1);
 
        let mut end = self.offset;
 
        while end < source.input.len() {
 
            let cur = (*source.input)[end];
 
            if cur == b'\n' || cur == b'\r' {
 
                break;
 
            }
 
            end += 1;
 
        }
 
        &source.input[start..end]
 
    }
 
    // fn parse_error<S: ToString>(&self, message: S) -> ParseError {
 
    //     ParseError { position: *self, message: message.to_string(), backtrace: Backtrace::new() }
 
    // }
 
    fn eval_error<S: ToString>(&self, message: S) -> EvalError {
 
        EvalError { position: *self, message: message.to_string(), backtrace: Backtrace::new() }
 
    }
 

	
 
    pub(crate) fn col(&self) -> usize { self.column }
 
}
 

	
 
impl Default for InputPosition {
 
    fn default() -> Self {
 
        Self{ line: 1, column: 1, offset: 0 }
 
    }
 
}
 

	
 
impl fmt::Display for InputPosition {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}:{}", self.line, self.column)
 
    }
 
}
 

	
 
pub trait SyntaxElement {
 
    fn position(&self) -> InputPosition;
 
    fn error<S: ToString>(&self, message: S) -> EvalError {
 
        self.position().eval_error(message)
 
    }
 
}
 

	
 
#[derive(Debug)]
 
pub enum ParseErrorType {
 
    Info,
 
    Error
 
}
 

	
 
#[derive(Debug)]
 
pub struct ParseErrorStatement {
 
    pub(crate) error_type: ParseErrorType,
 
    pub(crate) position: InputPosition,
 
    pub(crate) filename: String,
 
    pub(crate) context: String,
 
    pub(crate) message: String,
 
}
 

	
 
impl ParseErrorStatement {
 
    fn from_source(error_type: ParseErrorType, source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        // Seek line start and end
 
        let line_start = position.offset - (position.column - 1);
 
        let mut line_end = position.offset;
 
        while line_end < source.input.len() && source.input[line_end] != b'\n' {
 
            line_end += 1;
 
        }
 

	
 
        // Compensate for '\r\n'
 
        if line_end > line_start && source.input[line_end - 1] == b'\r' {
 
            line_end -= 1;
 
        }
 

	
 
        Self{
 
            error_type,
 
            position,
 
            filename: source.filename.clone(),
 
            context: String::from_utf8_lossy(&source.input[line_start..line_end]).to_string(),
 
            message: msg.to_string()
 
        }
 
    }
 
}
 

	
 
impl fmt::Display for ParseErrorStatement {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        // Write message
 
        match self.error_type {
 
            ParseErrorType::Info => write!(f, " INFO: ")?,
 
            ParseErrorType::Error => write!(f, "ERROR: ")?,
 
        }
 
        writeln!(f, "{}", &self.message)?;
 

	
 
        // Write originating file/line/column
 
        if self.filename.is_empty() {
 
            writeln!(f, " +- at {}:{}", self.position.line, self.position.column)?;
 
        } else {
 
            writeln!(f, " +- at {}:{}:{}", self.filename, self.position.line, self.position.column)?;
 
        }
 

	
 
        // Write source context
 
        writeln!(f, " | ")?;
 
        writeln!(f, " | {}", self.context)?;
 

	
 
        // Write underline indicating where the error ocurred
 
        debug_assert!(self.position.column <= self.context.chars().count());
 
        let mut arrow = String::with_capacity(self.context.len() + 3);
 
        arrow.push_str(" | ");
 
        let mut char_col = 1;
 
        for char in self.context.chars() {
 
            if char_col == self.position.column { break; }
 
            if char == '\t' {
 
                arrow.push('\t');
 
            } else {
 
                arrow.push(' ');
 
            }
 

	
 
            char_col += 1;
 
        }
 
        arrow.push('^');
 
        writeln!(f, "{}", arrow)?;
 

	
 
        Ok(())
 
    }
 
}
 

	
 
#[derive(Debug)]
 
pub struct ParseError2 {
 
pub struct ParseError {
 
    pub(crate) statements: Vec<ParseErrorStatement>
 
}
 

	
 
impl fmt::Display for ParseError2 {
 
impl fmt::Display for ParseError {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        if self.statements.is_empty() {
 
            return Ok(())
 
        }
 

	
 
        self.statements[0].fmt(f)?;
 
        for statement in self.statements.iter().skip(1) {
 
            writeln!(f)?;
 
            statement.fmt(f)?;
 
        }
 

	
 
        Ok(())
 
    }
 
}
 

	
 
impl ParseError2 {
 
impl ParseError {
 
    pub fn empty() -> Self {
 
        Self{ statements: Vec::new() }
 
    }
 

	
 
    pub fn new_error(source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        Self{ statements: vec!(ParseErrorStatement::from_source(ParseErrorType::Error, source, position, msg))}
 
    }
 

	
 
    pub fn with_prefixed(mut self, error_type: ParseErrorType, source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        self.statements.insert(0, ParseErrorStatement::from_source(error_type, source, position, msg));
 
        self
 
    }
 

	
 
    pub fn with_postfixed(mut self, error_type: ParseErrorType, source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        self.statements.push(ParseErrorStatement::from_source(error_type, source, position, msg));
 
        self
 
    }
 

	
 
    pub fn with_postfixed_info(self, source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        self.with_postfixed(ParseErrorType::Info, source, position, msg)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EvalError {
 
    position: InputPosition,
 
    message: String,
 
    backtrace: Backtrace,
 
}
 

	
 
impl EvalError {
 
    pub fn new<S: ToString>(position: InputPosition, message: S) -> EvalError {
 
        EvalError { position, message: message.to_string(), backtrace: Backtrace::new() }
 
    }
 
    // Diagnostic methods
 
    pub fn write<A: io::Write>(&self, source: &InputSource, writer: &mut A) -> io::Result<()> {
 
        if !source.filename.is_empty() {
 
            writeln!(
 
                writer,
 
                "Evaluation error at {}:{}: {}",
 
                source.filename, self.position, self.message
 
            )?;
 
        } else {
 
            writeln!(writer, "Evaluation error at {}: {}", self.position, self.message)?;
 
        }
 
        let line = self.position.context(source);
 
        writeln!(writer, "{}", String::from_utf8_lossy(line))?;
 
        let mut arrow: Vec<u8> = Vec::new();
 
        for pos in 1..self.position.column {
 
            let c = line[pos - 1];
 
            if c == b'\t' {
 
                arrow.push(b'\t')
 
            } else {
 
                arrow.push(b' ')
 
            }
 
        }
 
        arrow.push(b'^');
 
        writeln!(writer, "{}", String::from_utf8_lossy(&arrow))
 
    }
 
    pub fn print(&self, source: &InputSource) {
 
        self.write(source, &mut std::io::stdout()).unwrap()
 
    }
 
    pub fn display<'a>(&'a self, source: &'a InputSource) -> DisplayEvalError<'a> {
 
        DisplayEvalError::new(self, source)
 
    }
 
}
 

	
 
impl From<EvalError> for io::Error {
 
    fn from(_: EvalError) -> io::Error {
 
        io::Error::new(io::ErrorKind::InvalidInput, "eval error")
 
    }
 
}
 

	
 
#[derive(Clone, Copy)]
 
pub struct DisplayEvalError<'a> {
 
    error: &'a EvalError,
 
    source: &'a InputSource,
 
}
 

	
 
impl DisplayEvalError<'_> {
 
    fn new<'a>(error: &'a EvalError, source: &'a InputSource) -> DisplayEvalError<'a> {
 
        DisplayEvalError { error, source }
 
    }
 
}
 

	
 
impl fmt::Display for DisplayEvalError<'_> {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        let mut vec: Vec<u8> = Vec::new();
 
        match self.error.write(self.source, &mut vec) {
 
            Err(_) => {
 
                return fmt::Result::Err(fmt::Error);
 
            }
 
            Ok(_) => {}
 
        }
 
        write!(f, "{}", String::from_utf8_lossy(&vec))
 
    }
 
}
 

	
 
// #[cfg(test)]
 
// mod tests {
 
//     use super::*;
 

	
 
//     #[test]
 
//     fn test_from_string() {
 
//         let mut is = InputSource::from_string("#version 100\n").unwrap();
 
//         assert!(is.input.len() == 13);
 
//         assert!(is.line == 1);
 
//         assert!(is.column == 1);
 
//         assert!(is.offset == 0);
 
//         let ps = is.pos();
 
//         assert!(ps.line == 1);
 
//         assert!(ps.column == 1);
 
//         assert!(ps.offset == 0);
 
//         assert!(is.next() == Some(b'#'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'v'));
 
//         assert!(is.lookahead(1) == Some(b'e'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'e'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'r'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b's'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'i'));
 
//         is.consume();
 
//         {
 
//             let ps = is.pos();
 
//             assert_eq!(b"#version 100", ps.context(&is));
 
//             let er = is.error("hello world!");
 
//             let mut vec: Vec<u8> = Vec::new();
 
//             er.write(&is, &mut vec).unwrap();
 
//             assert_eq!(
 
//                 "Parse error at 1:7: hello world!\n#version 100\n      ^\n",
 
//                 String::from_utf8_lossy(&vec)
 
//             );
 
//         }
 
//         assert!(is.next() == Some(b'o'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'n'));
 
//         is.consume();
 
//         assert!(is.input.len() == 13);
 
//         assert!(is.line == 1);
 
//         assert!(is.column == 9);
 
//         assert!(is.offset == 8);
 
//         assert!(is.next() == Some(b' '));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'1'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'0'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'0'));
 
//         is.consume();
 
//         assert!(is.input.len() == 13);
 
//         assert!(is.line == 1);
 
//         assert!(is.column == 13);
 
//         assert!(is.offset == 12);
 
//         assert!(is.next() == Some(b'\n'));
 
//         is.consume();
 
//         assert!(is.input.len() == 13);
 
//         assert!(is.line == 2);
 
//         assert!(is.column == 1);
 
//         assert!(is.offset == 13);
 
//         {
 
//             let ps = is.pos();
 
//             assert_eq!(b"", ps.context(&is));
 
//         }
 
//         assert!(is.next() == None);
 
//         is.consume();
 
//         assert!(is.next() == None);
 
//     }
 

	
 
//     #[test]
 
//     fn test_split() {
 
//         let mut is = InputSource::from_string("#version 100\n").unwrap();
 
//         let backup = is.clone();
 
//         assert!(is.next() == Some(b'#'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'v'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'e'));
 
//         is.consume();
 
//         is = backup;
 
//         assert!(is.next() == Some(b'#'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'v'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'e'));
 
//         is.consume();
 
//     }
 
// }
src/protocol/lexer.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 

	
 
const MAX_LEVEL: usize = 128;
 
const MAX_NAMESPACES: u8 = 8; // only three levels are supported at the moment
 

	
 
macro_rules! debug_log {
 
    ($format:literal) => {
 
        enabled_debug_print!(true, "lexer", $format);
 
    };
 
    ($format:literal, $($args:expr),*) => {
 
        enabled_debug_print!(true, "lexer", $format, $($args),*);
 
    };
 
}
 

	
 
macro_rules! debug_line {
 
    ($source:expr) => {
 
        {
 
            let mut buffer = String::with_capacity(128);
 
            for idx in 0..buffer.capacity() {
 
                let next = $source.lookahead(idx);
 
                if next.is_none() || Some(b'\n') == next { break; }
 
                buffer.push(next.unwrap() as char);
 
            }
 
            buffer
 
        }
 
    };
 
}
 
fn is_vchar(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c >= 0x21 && c <= 0x7E
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn is_wsp(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c == b' ' || c == b'\t'
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn is_ident_start(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c >= b'A' && c <= b'Z' || c >= b'a' && c <= b'z'
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn is_ident_rest(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c >= b'A' && c <= b'Z' || c >= b'a' && c <= b'z' || c >= b'0' && c <= b'9' || c == b'_'
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn is_constant(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c >= b'0' && c <= b'9' || c == b'\''
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn is_integer_start(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c >= b'0' && c <= b'9'
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn is_integer_rest(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c >= b'0' && c <= b'9'
 
            || c >= b'a' && c <= b'f'
 
            || c >= b'A' && c <= b'F'
 
            || c == b'x'
 
            || c == b'o'
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn lowercase(x: u8) -> u8 {
 
    if x >= b'A' && x <= b'Z' {
 
        x - b'A' + b'a'
 
    } else {
 
        x
 
    }
 
}
 

	
 
fn identifier_as_namespaced(identifier: Identifier) -> NamespacedIdentifier {
 
    let identifier_len = identifier.value.len();
 
    debug_assert!(identifier_len < u16::max_value() as usize);
 
    NamespacedIdentifier{
 
        position: identifier.position,
 
        value: identifier.value,
 
        poly_args: Vec::new(),
 
        parts: vec![
 
            NamespacedIdentifierPart::Identifier{start: 0, end: identifier_len as u16}
 
        ],
 
    }
 
}
 

	
 
pub struct Lexer<'a> {
 
    source: &'a mut InputSource,
 
    level: usize,
 
}
 

	
 
impl Lexer<'_> {
 
    pub fn new(source: &mut InputSource) -> Lexer {
 
        Lexer { source, level: 0 }
 
    }
 
    fn error_at_pos(&self, msg: &str) -> ParseError2 {
 
        ParseError2::new_error(self.source, self.source.pos(), msg)
 
    fn error_at_pos(&self, msg: &str) -> ParseError {
 
        ParseError::new_error(self.source, self.source.pos(), msg)
 
    }
 
    fn consume_line(&mut self) -> Result<Vec<u8>, ParseError2> {
 
    fn consume_line(&mut self) -> Result<Vec<u8>, ParseError> {
 
        let mut result: Vec<u8> = Vec::new();
 
        let mut next = self.source.next();
 
        while next.is_some() && next != Some(b'\n') && next != Some(b'\r') {
 
            if !(is_vchar(next) || is_wsp(next)) {
 
                return Err(self.error_at_pos("Expected visible character or whitespace"));
 
            }
 
            result.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 
        if next.is_some() {
 
            self.source.consume();
 
        }
 
        if next == Some(b'\r') && self.source.next() == Some(b'\n') {
 
            self.source.consume();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_whitespace(&mut self, expected: bool) -> Result<(), ParseError2> {
 
    fn consume_whitespace(&mut self, expected: bool) -> Result<(), ParseError> {
 
        let mut found = false;
 
        let mut next = self.source.next();
 
        while next.is_some() {
 
            if next == Some(b' ')
 
                || next == Some(b'\t')
 
                || next == Some(b'\r')
 
                || next == Some(b'\n')
 
            {
 
                self.source.consume();
 
                next = self.source.next();
 
                found = true;
 
                continue;
 
            }
 
            if next == Some(b'/') {
 
                next = self.source.lookahead(1);
 
                if next == Some(b'/') {
 
                    self.source.consume(); // slash
 
                    self.source.consume(); // slash
 
                    self.consume_line()?;
 
                    next = self.source.next();
 
                    found = true;
 
                    continue;
 
                }
 
                if next == Some(b'*') {
 
                    self.source.consume(); // slash
 
                    self.source.consume(); // star
 
                    next = self.source.next();
 
                    while next.is_some() {
 
                        if next == Some(b'*') {
 
                            next = self.source.lookahead(1);
 
                            if next == Some(b'/') {
 
                                self.source.consume(); // star
 
                                self.source.consume(); // slash
 
                                break;
 
                            }
 
                        }
 
                        self.source.consume();
 
                        next = self.source.next();
 
                    }
 
                    next = self.source.next();
 
                    found = true;
 
                    continue;
 
                }
 
            }
 
            break;
 
        }
 
        if expected && !found {
 
            Err(self.error_at_pos("Expected whitespace"))
 
        } else {
 
            Ok(())
 
        }
 
    }
 
    fn consume_any_chars(&mut self) {
 
        if !is_ident_start(self.source.next()) { return }
 
        self.source.consume();
 
        while is_ident_rest(self.source.next()) {
 
            self.source.consume()
 
        }
 
    }
 
    fn has_keyword(&self, keyword: &[u8]) -> bool {
 
        if !self.source.has(keyword) {
 
            return false;
 
        }
 

	
 
        // Word boundary
 
        let next = self.source.lookahead(keyword.len());
 
        if next.is_none() { return true; }
 
        return !is_ident_rest(next);
 
    }
 
    fn consume_keyword(&mut self, keyword: &[u8]) -> Result<(), ParseError2> {
 
    fn consume_keyword(&mut self, keyword: &[u8]) -> Result<(), ParseError> {
 
        let len = keyword.len();
 
        for i in 0..len {
 
            let expected = Some(lowercase(keyword[i]));
 
            let next = self.source.next();
 
            if next != expected {
 
                return Err(self.error_at_pos(&format!("Expected keyword '{}'", String::from_utf8_lossy(keyword))));
 
            }
 
            self.source.consume();
 
        }
 
        if let Some(next) = self.source.next() {
 
            if next >= b'A' && next <= b'Z' || next >= b'a' && next <= b'z' || next >= b'0' && next <= b'9' {
 
                return Err(self.error_at_pos(&format!("Expected word boundary after '{}'", String::from_utf8_lossy(keyword))));
 
            }
 
        }
 
        Ok(())
 
    }
 
    fn has_string(&self, string: &[u8]) -> bool {
 
        self.source.has(string)
 
    }
 
    fn consume_string(&mut self, string: &[u8]) -> Result<(), ParseError2> {
 
    fn consume_string(&mut self, string: &[u8]) -> Result<(), ParseError> {
 
        let len = string.len();
 
        for i in 0..len {
 
            let expected = Some(string[i]);
 
            let next = self.source.next();
 
            if next != expected {
 
                return Err(self.error_at_pos(&format!("Expected {}", String::from_utf8_lossy(string))));
 
            }
 
            self.source.consume();
 
        }
 
        Ok(())
 
    }
 
    /// Generic comma-separated consumer. If opening delimiter is not found then
 
    /// `Ok(None)` will be returned. Otherwise will consume the comma separated
 
    /// values, allowing a trailing comma. If no comma is found and the closing
 
    /// delimiter is not found, then a parse error with `expected_end_msg` is
 
    /// returned.
 
    fn consume_comma_separated<T, F>(
 
        &mut self, h: &mut Heap, open: u8, close: u8, expected_end_msg: &str, func: F
 
    ) -> Result<Option<Vec<T>>, ParseError2>
 
        where F: Fn(&mut Lexer, &mut Heap) -> Result<T, ParseError2>
 
    ) -> Result<Option<Vec<T>>, ParseError>
 
        where F: Fn(&mut Lexer, &mut Heap) -> Result<T, ParseError>
 
    {
 
        if Some(open) != self.source.next() {
 
            return Ok(None)
 
        }
 

	
 
        self.source.consume();
 
        self.consume_whitespace(false)?;
 
        let mut elements = Vec::new();
 
        let mut had_comma = true;
 

	
 
        loop {
 
            if Some(close) == self.source.next() {
 
                self.source.consume();
 
                break;
 
            } else if !had_comma {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    &self.source, self.source.pos(), expected_end_msg
 
                ));
 
            }
 

	
 
            elements.push(func(self, h)?);
 
            self.consume_whitespace(false)?;
 

	
 
            had_comma = self.source.next() == Some(b',');
 
            if had_comma {
 
                self.source.consume();
 
                self.consume_whitespace(false)?;
 
            }
 
        }
 

	
 
        Ok(Some(elements))
 
    }
 
    /// Essentially the same as `consume_comma_separated`, but will not allocate
 
    /// memory. Will return `Ok(true)` and leave the input position at the end
 
    /// the comma-separated list if well formed and `Ok(false)` if the list is
 
    /// not present. Otherwise returns `Err(())` and leaves the input position 
 
    /// at a "random" position.
 
    fn consume_comma_separated_spilled_without_pos_recovery<F: Fn(&mut Lexer) -> bool>(
 
        &mut self, open: u8, close: u8, func: F
 
    ) -> Result<bool, ()> {
 
        if Some(open) != self.source.next() {
 
            return Ok(false);
 
        }
 

	
 
        self.source.consume();
 
        if self.consume_whitespace(false).is_err() { return Err(()) };
 
        let mut had_comma = true;
 
        loop {
 
            if Some(close) == self.source.next() {
 
                self.source.consume();
 
                return Ok(true);
 
            } else if !had_comma {
 
                return Err(());
 
            }
 

	
 
            if !func(self) { return Err(()); }
 
            if self.consume_whitespace(false).is_err() { return Err(()) };
 

	
 
            had_comma = self.source.next() == Some(b',');
 
            if had_comma {
 
                self.source.consume();
 
                if self.consume_whitespace(false).is_err() { return Err(()); }
 
            }
 
        }
 
    }
 
    fn consume_ident(&mut self) -> Result<Vec<u8>, ParseError2> {
 
    fn consume_ident(&mut self) -> Result<Vec<u8>, ParseError> {
 
        if !self.has_identifier() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let mut result = Vec::new();
 
        let mut next = self.source.next();
 
        result.push(next.unwrap());
 
        self.source.consume();
 
        next = self.source.next();
 
        while is_ident_rest(next) {
 
            result.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 
        Ok(result)
 
    }
 
    fn has_integer(&mut self) -> bool {
 
        is_integer_start(self.source.next())
 
    }
 
    fn consume_integer(&mut self) -> Result<i64, ParseError2> {
 
    fn consume_integer(&mut self) -> Result<i64, ParseError> {
 
        let position = self.source.pos();
 
        let mut data = Vec::new();
 
        let mut next = self.source.next();
 
        while is_integer_rest(next) {
 
            data.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 

	
 
        let data_len = data.len();
 
        debug_assert_ne!(data_len, 0);
 
        if data_len == 1 {
 
            debug_assert!(data[0] >= b'0' && data[0] <= b'9');
 
            return Ok((data[0] - b'0') as i64);
 
        } else {
 
            // TODO: Fix, u64 should be supported as well
 
            let parsed = if data[1] == b'b' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 2)
 
            } else if data[1] == b'o' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 8)
 
            } else if data[1] == b'x' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 16)
 
            } else {
 
                // Assume decimal
 
                let data = String::from_utf8_lossy(&data);
 
                i64::from_str_radix(&data, 10)
 
            };
 

	
 
            if let Err(_err) = parsed {
 
                return Err(ParseError2::new_error(&self.source, position, "Invalid integer constant"));
 
                return Err(ParseError::new_error(&self.source, position, "Invalid integer constant"));
 
            }
 

	
 
            Ok(parsed.unwrap())
 
        }
 
    }
 

	
 
    // Statement keywords
 
    // TODO: Clean up these functions
 
    fn has_statement_keyword(&self) -> bool {
 
        self.has_keyword(b"channel")
 
            || self.has_keyword(b"skip")
 
            || self.has_keyword(b"if")
 
            || self.has_keyword(b"while")
 
            || self.has_keyword(b"break")
 
            || self.has_keyword(b"continue")
 
            || self.has_keyword(b"synchronous")
 
            || self.has_keyword(b"return")
 
            || self.has_keyword(b"assert")
 
            || self.has_keyword(b"goto")
 
            || self.has_keyword(b"new")
 
    }
 
    fn has_type_keyword(&self) -> bool {
 
        self.has_keyword(b"in")
 
            || self.has_keyword(b"out")
 
            || self.has_keyword(b"msg")
 
            || self.has_keyword(b"boolean")
 
            || self.has_keyword(b"byte")
 
            || self.has_keyword(b"short")
 
            || self.has_keyword(b"int")
 
            || self.has_keyword(b"long")
 
            || self.has_keyword(b"auto")
 
    }
 
    fn has_builtin_keyword(&self) -> bool {
 
        self.has_keyword(b"get")
 
            || self.has_keyword(b"fires")
 
            || self.has_keyword(b"create")
 
            || self.has_keyword(b"length")
 
    }
 
    fn has_reserved(&self) -> bool {
 
        self.has_statement_keyword()
 
            || self.has_type_keyword()
 
            || self.has_builtin_keyword()
 
            || self.has_keyword(b"let")
 
            || self.has_keyword(b"struct")
 
            || self.has_keyword(b"enum")
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
            || self.has_keyword(b"null")
 
    }
 

	
 
    // Identifiers
 

	
 
    fn has_identifier(&self) -> bool {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return false;
 
        }
 
        let next = self.source.next();
 
        is_ident_start(next)
 
    }
 
    fn consume_identifier(&mut self) -> Result<Identifier, ParseError2> {
 
    fn consume_identifier(&mut self) -> Result<Identifier, ParseError> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let position = self.source.pos();
 
        let value = self.consume_ident()?;
 
        Ok(Identifier{ position, value })
 
    }
 
    fn consume_identifier_spilled(&mut self) -> Result<(), ParseError2> {
 
    fn consume_identifier_spilled(&mut self) -> Result<(), ParseError> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        self.consume_ident()?;
 
        Ok(())
 
    }
 

	
 
    fn consume_namespaced_identifier(&mut self, h: &mut Heap) -> Result<NamespacedIdentifier, ParseError2> {
 
    fn consume_namespaced_identifier(&mut self, h: &mut Heap) -> Result<NamespacedIdentifier, ParseError> {
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        // Consumes a part of the namespaced identifier, returns a boolean
 
        // indicating whether polymorphic arguments were specified.
 
        // TODO: Continue here: if we fail to properly parse the polymorphic
 
        //  arguments, assume we have reached the end of the namespaced 
 
        //  identifier and are instead dealing with a less-than operator. Ugly?
 
        //  Yes. Needs tokenizer? Yes. 
 
        fn consume_part(
 
            l: &mut Lexer, h: &mut Heap, ident: &mut NamespacedIdentifier,
 
            backup_pos: &mut InputPosition
 
        ) -> Result<(), ParseError2> {
 
        ) -> Result<(), ParseError> {
 
            // Consume identifier
 
            if !ident.value.is_empty() {
 
                ident.value.extend(b"::");
 
            }
 
            let ident_start = ident.value.len();
 
            ident.value.extend(l.consume_ident()?);
 
            ident.parts.push(NamespacedIdentifierPart::Identifier{
 
                start: ident_start as u16,
 
                end: ident.value.len() as u16
 
            });
 

	
 
            // Maybe consume polymorphic args.
 
            *backup_pos = l.source.pos();
 
            l.consume_whitespace(false)?;
 
            match l.consume_polymorphic_args(h, true)? {
 
                Some(args) => {
 
                    let poly_start = ident.poly_args.len();
 
                    ident.poly_args.extend(args);
 

	
 
                    ident.parts.push(NamespacedIdentifierPart::PolyArgs{
 
                        start: poly_start as u16,
 
                        end: ident.poly_args.len() as u16,
 
                    });
 

	
 
                    *backup_pos = l.source.pos();
 
                },
 
                None => {}
 
            };
 

	
 
            Ok(())
 
        }
 

	
 
        let mut ident = NamespacedIdentifier{
 
            position: self.source.pos(),
 
            value: Vec::new(),
 
            poly_args: Vec::new(),
 
            parts: Vec::new(),
 
        };
 

	
 
        // Keep consume parts separted by "::". We don't consume the trailing
 
        // whitespace, hence we keep a backup position at the end of the last
 
        // valid part of the namespaced identifier (i.e. the last ident, or the
 
        // last encountered polymorphic arguments).
 
        let mut backup_pos = self.source.pos();
 
        consume_part(self, h, &mut ident, &mut backup_pos)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_whitespace(false)?;
 
            consume_part(self, h, &mut ident, &mut backup_pos)?;
 
            self.consume_whitespace(false)?;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(ident)
 
    }
 

	
 
    fn consume_namespaced_identifier_spilled(&mut self) -> Result<(), ParseError2> {
 
    fn consume_namespaced_identifier_spilled(&mut self) -> Result<(), ParseError> {
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        debug_log!("consume_nsident2_spilled: {}", debug_line!(self.source));
 

	
 
        fn consume_part_spilled(l: &mut Lexer, backup_pos: &mut InputPosition) -> Result<(), ParseError2> {
 
        fn consume_part_spilled(l: &mut Lexer, backup_pos: &mut InputPosition) -> Result<(), ParseError> {
 
            l.consume_ident()?;
 
            *backup_pos = l.source.pos();
 
            l.consume_whitespace(false)?;
 
            match l.maybe_consume_poly_args_spilled_without_pos_recovery() {
 
                Ok(true) => { *backup_pos = l.source.pos(); },
 
                Ok(false) => {},
 
                Err(_) => { return Err(l.error_at_pos("Failed to parse poly args (spilled)")) },
 
            }
 
            Ok(())
 
        }
 

	
 
        let mut backup_pos = self.source.pos();
 
        consume_part_spilled(self, &mut backup_pos)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_whitespace(false)?;
 
            consume_part_spilled(self, &mut backup_pos)?;
 
            self.consume_whitespace(false)?;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(())
 
    }
 

	
 
    // Types and type annotations
 

	
 
    /// Consumes a type definition. When called the input position should be at
 
    /// the type specification. When done the input position will be at the end
 
    /// of the type specifications (hence may be at whitespace).
 
    fn consume_type2(&mut self, h: &mut Heap, allow_inference: bool) -> Result<ParserTypeId, ParseError2> {
 
    fn consume_type(&mut self, h: &mut Heap, allow_inference: bool) -> Result<ParserTypeId, ParseError> {
 
        // Small helper function to convert in/out polymorphic arguments. Not
 
        // pretty, but return boolean is true if the error is due to inference
 
        // not being allowed
 
        let reduce_port_poly_args = |
 
            heap: &mut Heap,
 
            port_pos: &InputPosition,
 
            args: Vec<ParserTypeId>,
 
        | -> Result<ParserTypeId, bool> {
 
            match args.len() {
 
                0 => if allow_inference {  
 
                    Ok(heap.alloc_parser_type(|this| ParserType{
 
                        this,
 
                        pos: port_pos.clone(),
 
                        variant: ParserTypeVariant::Inferred
 
                    }))
 
                } else {
 
                    Err(true)
 
                },
 
                1 => Ok(args[0]),
 
                _ => Err(false)
 
            }
 
        };
 

	
 
        // Consume the type
 
        debug_log!("consume_type2: {}", debug_line!(self.source));
 
        debug_log!("consume_type: {}", debug_line!(self.source));
 
        let pos = self.source.pos();
 
        let parser_type_variant = if self.has_keyword(b"msg") {
 
            self.consume_keyword(b"msg")?;
 
            ParserTypeVariant::Message
 
        } else if self.has_keyword(b"boolean") {
 
            self.consume_keyword(b"boolean")?;
 
            ParserTypeVariant::Bool
 
        } else if self.has_keyword(b"byte") {
 
            self.consume_keyword(b"byte")?;
 
            ParserTypeVariant::Byte
 
        } else if self.has_keyword(b"short") {
 
            self.consume_keyword(b"short")?;
 
            ParserTypeVariant::Short
 
        } else if self.has_keyword(b"int") {
 
            self.consume_keyword(b"int")?;
 
            ParserTypeVariant::Int
 
        } else if self.has_keyword(b"long") {
 
            self.consume_keyword(b"long")?;
 
            ParserTypeVariant::Long
 
        } else if self.has_keyword(b"str") {
 
            self.consume_keyword(b"str")?;
 
            ParserTypeVariant::String
 
        } else if self.has_keyword(b"auto") {
 
            if !allow_inference {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                        &self.source, pos,
 
                        "Type inference is not allowed here"
 
                ));
 
            }
 

	
 
            self.consume_keyword(b"auto")?;
 
            ParserTypeVariant::Inferred
 
        } else if self.has_keyword(b"in") {
 
            // TODO: @cleanup: not particularly neat to have this special case
 
            //  where we enforce polyargs in the parser-phase
 
            self.consume_keyword(b"in")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?.unwrap_or_default();
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error|  {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'in' only allows for 1 polymorphic argument"
 
                    };
 
                    ParseError2::new_error(&self.source, pos, msg)
 
                    ParseError::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Input(poly_arg)
 
        } else if self.has_keyword(b"out") {
 
            self.consume_keyword(b"out")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?.unwrap_or_default();
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error| {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'out' only allows for 1 polymorphic argument, but {} were specified"
 
                    };
 
                    ParseError2::new_error(&self.source, pos, msg)
 
                    ParseError::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Output(poly_arg)
 
        } else {
 
            // Must be a symbolic type
 
            let identifier = self.consume_namespaced_identifier(h)?;
 
            ParserTypeVariant::Symbolic(SymbolicParserType{identifier, variant: None, poly_args2: Vec::new()})
 
        };
 

	
 
        // If the type was a basic type (not supporting polymorphic type
 
        // arguments), then we make sure the user did not specify any of them.
 
        let mut backup_pos = self.source.pos();
 
        if !parser_type_variant.supports_polymorphic_args() {
 
            self.consume_whitespace(false)?;
 
            if let Some(b'<') = self.source.next() {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    &self.source, self.source.pos(),
 
                    "This type does not allow polymorphic arguments"
 
                ));
 
            }
 

	
 
            self.source.seek(backup_pos);
 
        }
 

	
 
        let mut parser_type_id = h.alloc_parser_type(|this| ParserType{
 
            this, pos, variant: parser_type_variant
 
        });
 

	
 
        // If we're dealing with arrays, then we need to wrap the currently
 
        // parsed type in array types
 
        self.consume_whitespace(false)?;
 
        while let Some(b'[') = self.source.next() {
 
            let pos = self.source.pos();
 
            self.source.consume();
 
            self.consume_whitespace(false)?;
 
            if let Some(b']') = self.source.next() {
 
                // Type is wrapped in an array
 
                self.source.consume();
 
                parser_type_id = h.alloc_parser_type(|this| ParserType{
 
                    this, pos, variant: ParserTypeVariant::Array(parser_type_id)
 
                });
 
                backup_pos = self.source.pos();
 

	
 
                // In case we're dealing with another array
 
                self.consume_whitespace(false)?;
 
            } else {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    &self.source, pos,
 
                    "Expected a closing ']'"
 
                ));
 
            }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(parser_type_id)
 
    }
 

	
 
    /// Attempts to consume a type without returning it. If it doesn't encounter
 
    /// a well-formed type, then the input position is left at a "random"
 
    /// position.
 
    fn maybe_consume_type_spilled_without_pos_recovery(&mut self) -> bool {
 
        // Consume type identifier
 
        debug_log!("maybe_consume_type_spilled_...: {}", debug_line!(self.source));
 
        if self.has_type_keyword() {
 
            self.consume_any_chars();
 
        } else {
 
            let ident = self.consume_namespaced_identifier_spilled();
 
            if ident.is_err() { return false; }
 
        }
 

	
 
        // Consume any polymorphic arguments that follow the type identifier
 
        let mut backup_pos = self.source.pos();
 
        if self.consume_whitespace(false).is_err() { return false; }
 
        
 
        // Consume any array specifiers. Make sure we always leave the input
 
        // position at the end of the last array specifier if we do find a
 
        // valid type
 
        if self.consume_whitespace(false).is_err() { return false; }
 
        while let Some(b'[') = self.source.next() {
 
            self.source.consume();
 
            if self.consume_whitespace(false).is_err() { return false; }
 
            if self.source.next() != Some(b']') { return false; }
 
            self.source.consume();
 
            backup_pos = self.source.pos();
 
            if self.consume_whitespace(false).is_err() { return false; }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return true;
 
    }
 

	
 
    fn maybe_consume_type_spilled(&mut self) -> bool {
 
        let backup_pos = self.source.pos();
 
        if !self.maybe_consume_type_spilled_without_pos_recovery() {
 
            self.source.seek(backup_pos);
 
            return false;
 
        }
 

	
 
        return true;
 
    }
 

	
 
    /// Attempts to consume polymorphic arguments without returning them. If it
 
    /// doesn't encounter well-formed polymorphic arguments, then the input
 
    /// position is left at a "random" position. Returns a boolean indicating if
 
    /// the poly_args list was present.
 
    fn maybe_consume_poly_args_spilled_without_pos_recovery(&mut self) -> Result<bool, ()> {
 
        debug_log!("maybe_consume_poly_args_spilled_...: {}", debug_line!(self.source));
 
        self.consume_comma_separated_spilled_without_pos_recovery(
 
            b'<', b'>', |lexer| {
 
                lexer.maybe_consume_type_spilled_without_pos_recovery()
 
            })
 
    }
 

	
 
    /// Consumes polymorphic arguments and its delimiters if specified. If
 
    /// polyargs are present then the args are consumed and the input position
 
    /// will be placed after the polyarg list. If polyargs are not present then
 
    /// the input position will remain unmodified and an empty vector will be
 
    /// returned.
 
    ///
 
    /// Polymorphic arguments represent the specification of the parametric
 
    /// types of a polymorphic type: they specify the value of the polymorphic
 
    /// type's polymorphic variables.
 
    fn consume_polymorphic_args(&mut self, h: &mut Heap, allow_inference: bool) -> Result<Option<Vec<ParserTypeId>>, ParseError2> {
 
    fn consume_polymorphic_args(&mut self, h: &mut Heap, allow_inference: bool) -> Result<Option<Vec<ParserTypeId>>, ParseError> {
 
        self.consume_comma_separated(
 
            h, b'<', b'>', "Expected the end of the polymorphic argument list",
 
            |lexer, heap| lexer.consume_type2(heap, allow_inference)
 
            |lexer, heap| lexer.consume_type(heap, allow_inference)
 
        )
 
    }
 

	
 
    /// Consumes polymorphic variables. These are identifiers that are used
 
    /// within polymorphic types. The input position may be at whitespace. If
 
    /// polymorphic variables are present then the whitespace, wrapping
 
    /// delimiters and the polymorphic variables are consumed. Otherwise the
 
    /// input position will stay where it is. If no polymorphic variables are
 
    /// present then an empty vector will be returned.
 
    fn consume_polymorphic_vars(&mut self, h: &mut Heap) -> Result<Vec<Identifier>, ParseError2> {
 
    fn consume_polymorphic_vars(&mut self, h: &mut Heap) -> Result<Vec<Identifier>, ParseError> {
 
        let backup_pos = self.source.pos();
 
        match self.consume_comma_separated(
 
            h, b'<', b'>', "Expected the end of the polymorphic variable list",
 
            |lexer, _heap| lexer.consume_identifier()
 
        )? {
 
            Some(poly_vars) => Ok(poly_vars),
 
            None => {
 
                self.source.seek(backup_pos);
 
                Ok(vec!())
 
            }
 
        }
 
    }
 

	
 
    // Parameters
 

	
 
    fn consume_parameter(&mut self, h: &mut Heap) -> Result<ParameterId, ParseError2> {
 
        let parser_type = self.consume_type2(h, false)?;
 
    fn consume_parameter(&mut self, h: &mut Heap) -> Result<ParameterId, ParseError> {
 
        let parser_type = self.consume_type(h, false)?;
 
        self.consume_whitespace(true)?;
 
        let position = self.source.pos();
 
        let identifier = self.consume_identifier()?;
 
        let id =
 
            h.alloc_parameter(|this| Parameter { this, position, parser_type, identifier });
 
        Ok(id)
 
    }
 
    fn consume_parameters(&mut self, h: &mut Heap) -> Result<Vec<ParameterId>, ParseError2> {
 
    fn consume_parameters(&mut self, h: &mut Heap) -> Result<Vec<ParameterId>, ParseError> {
 
        match self.consume_comma_separated(
 
            h, b'(', b')', "Expected the end of the parameter list",
 
            |lexer, heap| lexer.consume_parameter(heap)
 
        )? {
 
            Some(params) => Ok(params),
 
            None => {
 
                Err(ParseError2::new_error(
 
                Err(ParseError::new_error(
 
                    &self.source, self.source.pos(),
 
                    "Expected a parameter list"
 
                ))
 
            }
 
        }
 
    }
 

	
 
    // ====================
 
    // Expressions
 
    // ====================
 

	
 
    fn consume_paren_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_paren_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        self.consume_string(b"(")?;
 
        self.consume_whitespace(false)?;
 
        let result = self.consume_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b")")?;
 
        Ok(result)
 
    }
 
    fn consume_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.level >= MAX_LEVEL {
 
            return Err(self.error_at_pos("Too deeply nested expression"));
 
        }
 
        self.level += 1;
 
        let result = self.consume_assignment_expression(h);
 
        self.level -= 1;
 
        result
 
    }
 
    fn consume_assignment_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_assignment_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let result = self.consume_conditional_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        if self.has_assignment_operator() {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation = self.consume_assignment_operator()?;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_expression(h)?;
 
            Ok(h.alloc_assignment_expression(|this| AssignmentExpression {
 
                this,
 
                position,
 
                left,
 
                operation,
 
                right,
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            })
 
            .upcast())
 
        } else {
 
            Ok(result)
 
        }
 
    }
 
    fn has_assignment_operator(&self) -> bool {
 
        self.has_string(b"=")
 
            || self.has_string(b"*=")
 
            || self.has_string(b"/=")
 
            || self.has_string(b"%=")
 
            || self.has_string(b"+=")
 
            || self.has_string(b"-=")
 
            || self.has_string(b"<<=")
 
            || self.has_string(b">>=")
 
            || self.has_string(b"&=")
 
            || self.has_string(b"^=")
 
            || self.has_string(b"|=")
 
    }
 
    fn consume_assignment_operator(&mut self) -> Result<AssignmentOperator, ParseError2> {
 
    fn consume_assignment_operator(&mut self) -> Result<AssignmentOperator, ParseError> {
 
        if self.has_string(b"=") {
 
            self.consume_string(b"=")?;
 
            Ok(AssignmentOperator::Set)
 
        } else if self.has_string(b"*=") {
 
            self.consume_string(b"*=")?;
 
            Ok(AssignmentOperator::Multiplied)
 
        } else if self.has_string(b"/=") {
 
            self.consume_string(b"/=")?;
 
            Ok(AssignmentOperator::Divided)
 
        } else if self.has_string(b"%=") {
 
            self.consume_string(b"%=")?;
 
            Ok(AssignmentOperator::Remained)
 
        } else if self.has_string(b"+=") {
 
            self.consume_string(b"+=")?;
 
            Ok(AssignmentOperator::Added)
 
        } else if self.has_string(b"-=") {
 
            self.consume_string(b"-=")?;
 
            Ok(AssignmentOperator::Subtracted)
 
        } else if self.has_string(b"<<=") {
 
            self.consume_string(b"<<=")?;
 
            Ok(AssignmentOperator::ShiftedLeft)
 
        } else if self.has_string(b">>=") {
 
            self.consume_string(b">>=")?;
 
            Ok(AssignmentOperator::ShiftedRight)
 
        } else if self.has_string(b"&=") {
 
            self.consume_string(b"&=")?;
 
            Ok(AssignmentOperator::BitwiseAnded)
 
        } else if self.has_string(b"^=") {
 
            self.consume_string(b"^=")?;
 
            Ok(AssignmentOperator::BitwiseXored)
 
        } else if self.has_string(b"|=") {
 
            self.consume_string(b"|=")?;
 
            Ok(AssignmentOperator::BitwiseOred)
 
        } else {
 
            Err(self.error_at_pos("Expected assignment operator"))
 
        }
 
    }
 
    fn consume_conditional_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_conditional_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let result = self.consume_concat_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        if self.has_string(b"?") {
 
            let position = self.source.pos();
 
            let test = result;
 
            self.consume_string(b"?")?;
 
            self.consume_whitespace(false)?;
 
            let true_expression = self.consume_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            self.consume_string(b":")?;
 
            self.consume_whitespace(false)?;
 
            let false_expression = self.consume_expression(h)?;
 
            Ok(h.alloc_conditional_expression(|this| ConditionalExpression {
 
                this,
 
                position,
 
                test,
 
                true_expression,
 
                false_expression,
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            })
 
            .upcast())
 
        } else {
 
            Ok(result)
 
        }
 
    }
 
    fn consume_concat_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_concat_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_lor_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"@") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"@")?;
 
            let operation = BinaryOperator::Concatenate;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_lor_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_lor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_lor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_land_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"||") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"||")?;
 
            let operation = BinaryOperator::LogicalOr;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_land_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_land_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_land_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_bor_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"&&") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"&&")?;
 
            let operation = BinaryOperator::LogicalAnd;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_bor_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_bor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_bor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_xor_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"|") && !self.has_string(b"||") && !self.has_string(b"|=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"|")?;
 
            let operation = BinaryOperator::BitwiseOr;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_xor_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_xor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_xor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_band_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"^") && !self.has_string(b"^=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"^")?;
 
            let operation = BinaryOperator::BitwiseXor;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_band_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_band_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_band_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_eq_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"&") && !self.has_string(b"&&") && !self.has_string(b"&=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"&")?;
 
            let operation = BinaryOperator::BitwiseAnd;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_eq_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_eq_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_eq_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_rel_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"==") || self.has_string(b"!=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"==") {
 
                self.consume_string(b"==")?;
 
                operation = BinaryOperator::Equality;
 
            } else {
 
                self.consume_string(b"!=")?;
 
                operation = BinaryOperator::Inequality;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_rel_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_rel_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_rel_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_shift_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"<=")
 
            || self.has_string(b">=")
 
            || self.has_string(b"<") && !self.has_string(b"<<=")
 
            || self.has_string(b">") && !self.has_string(b">>=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"<=") {
 
                self.consume_string(b"<=")?;
 
                operation = BinaryOperator::LessThanEqual;
 
            } else if self.has_string(b">=") {
 
                self.consume_string(b">=")?;
 
                operation = BinaryOperator::GreaterThanEqual;
 
            } else if self.has_string(b"<") {
 
                self.consume_string(b"<")?;
 
                operation = BinaryOperator::LessThan;
 
            } else {
 
                self.consume_string(b">")?;
 
                operation = BinaryOperator::GreaterThan;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_shift_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_shift_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_shift_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_add_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"<<") && !self.has_string(b"<<=")
 
            || self.has_string(b">>") && !self.has_string(b">>=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"<<") {
 
                self.consume_string(b"<<")?;
 
                operation = BinaryOperator::ShiftLeft;
 
            } else {
 
                self.consume_string(b">>")?;
 
                operation = BinaryOperator::ShiftRight;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_add_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_add_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_add_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_mul_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"+") && !self.has_string(b"+=")
 
            || self.has_string(b"-") && !self.has_string(b"-=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"+") {
 
                self.consume_string(b"+")?;
 
                operation = BinaryOperator::Add;
 
            } else {
 
                self.consume_string(b"-")?;
 
                operation = BinaryOperator::Subtract;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_mul_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_mul_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_mul_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_prefix_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"*") && !self.has_string(b"*=")
 
            || self.has_string(b"/") && !self.has_string(b"/=")
 
            || self.has_string(b"%") && !self.has_string(b"%=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"*") {
 
                self.consume_string(b"*")?;
 
                operation = BinaryOperator::Multiply;
 
            } else if self.has_string(b"/") {
 
                self.consume_string(b"/")?;
 
                operation = BinaryOperator::Divide;
 
            } else {
 
                self.consume_string(b"%")?;
 
                operation = BinaryOperator::Remainder;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_prefix_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_prefix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_prefix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.has_string(b"+")
 
            || self.has_string(b"-")
 
            || self.has_string(b"~")
 
            || self.has_string(b"!")
 
        {
 
            let position = self.source.pos();
 
            let operation;
 
            if self.has_string(b"+") {
 
                self.consume_string(b"+")?;
 
                if self.has_string(b"+") {
 
                    self.consume_string(b"+")?;
 
                    operation = UnaryOperation::PreIncrement;
 
                } else {
 
                    operation = UnaryOperation::Positive;
 
                }
 
            } else if self.has_string(b"-") {
 
                self.consume_string(b"-")?;
 
                if self.has_string(b"-") {
 
                    self.consume_string(b"-")?;
 
                    operation = UnaryOperation::PreDecrement;
 
                } else {
 
                    operation = UnaryOperation::Negative;
 
                }
 
            } else if self.has_string(b"~") {
 
                self.consume_string(b"~")?;
 
                operation = UnaryOperation::BitwiseNot;
 
            } else {
 
                self.consume_string(b"!")?;
 
                operation = UnaryOperation::LogicalNot;
 
            }
 
            self.consume_whitespace(false)?;
 
            if self.level >= MAX_LEVEL {
 
                return Err(self.error_at_pos("Too deeply nested expression"));
 
            }
 
            self.level += 1;
 
            let result = self.consume_prefix_expression(h);
 
            self.level -= 1;
 
            let expression = result?;
 
            return Ok(h
 
                .alloc_unary_expression(|this| UnaryExpression {
 
                    this,
 
                    position,
 
                    operation,
 
                    expression,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast());
 
        }
 
        self.consume_postfix_expression(h)
 
    }
 
    fn consume_postfix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_postfix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        let mut result = self.consume_primary_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"++")
 
            || self.has_string(b"--")
 
            || self.has_string(b"[")
 
            || (self.has_string(b".") && !self.has_string(b".."))
 
        {
 
            let mut position = self.source.pos();
 
            if self.has_string(b"++") {
 
                self.consume_string(b"++")?;
 
                let operation = UnaryOperation::PostIncrement;
 
                let expression = result;
 
                self.consume_whitespace(false)?;
 
                result = h
 
                    .alloc_unary_expression(|this| UnaryExpression {
 
                        this,
 
                        position,
 
                        operation,
 
                        expression,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            } else if self.has_string(b"--") {
 
                self.consume_string(b"--")?;
 
                let operation = UnaryOperation::PostDecrement;
 
                let expression = result;
 
                self.consume_whitespace(false)?;
 
                result = h
 
                    .alloc_unary_expression(|this| UnaryExpression {
 
                        this,
 
                        position,
 
                        operation,
 
                        expression,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            } else if self.has_string(b"[") {
 
                self.consume_string(b"[")?;
 
                self.consume_whitespace(false)?;
 
                let subject = result;
 
                let index = self.consume_expression(h)?;
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b"..") || self.has_string(b":") {
 
                    position = self.source.pos();
 
                    if self.has_string(b"..") {
 
                        self.consume_string(b"..")?;
 
                    } else {
 
                        self.consume_string(b":")?;
 
                    }
 
                    self.consume_whitespace(false)?;
 
                    let to_index = self.consume_expression(h)?;
 
                    self.consume_whitespace(false)?;
 
                    result = h
 
                        .alloc_slicing_expression(|this| SlicingExpression {
 
                            this,
 
                            position,
 
                            subject,
 
                            from_index: index,
 
                            to_index,
 
                            parent: ExpressionParent::None,
 
                            concrete_type: ConcreteType::default(),
 
                        })
 
                        .upcast();
 
                } else {
 
                    result = h
 
                        .alloc_indexing_expression(|this| IndexingExpression {
 
                            this,
 
                            position,
 
                            subject,
 
                            index,
 
                            parent: ExpressionParent::None,
 
                            concrete_type: ConcreteType::default(),
 
                        })
 
                        .upcast();
 
                }
 
                self.consume_string(b"]")?;
 
                self.consume_whitespace(false)?;
 
            } else {
 
                assert!(self.has_string(b"."));
 
                self.consume_string(b".")?;
 
                self.consume_whitespace(false)?;
 
                let subject = result;
 
                let field;
 
                if self.has_keyword(b"length") {
 
                    self.consume_keyword(b"length")?;
 
                    field = Field::Length;
 
                } else {
 
                    let identifier = self.consume_identifier()?;
 
                    field = Field::Symbolic(FieldSymbolic{
 
                        identifier,
 
                        definition: None,
 
                        field_idx: 0,
 
                    });
 
                }
 
                result = h
 
                    .alloc_select_expression(|this| SelectExpression {
 
                        this,
 
                        position,
 
                        subject,
 
                        field,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            }
 
        }
 
        Ok(result)
 
    }
 
    fn consume_primary_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
    fn consume_primary_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError> {
 
        if self.has_string(b"(") {
 
            return self.consume_paren_expression(h);
 
        }
 
        if self.has_string(b"{") {
 
            return Ok(self.consume_array_expression(h)?.upcast());
 
        }
 
        if self.has_builtin_literal() {
 
            return Ok(self.consume_builtin_literal_expression(h)?.upcast());
 
        }
 
        if self.has_struct_literal() {
 
            return Ok(self.consume_struct_literal_expression(h)?.upcast());
 
        }
 
        if self.has_call_expression() {
 
            return Ok(self.consume_call_expression(h)?.upcast());
 
        }
 
        Ok(self.consume_variable_expression(h)?.upcast())
 
    }
 
    fn consume_array_expression(&mut self, h: &mut Heap) -> Result<ArrayExpressionId, ParseError2> {
 
    fn consume_array_expression(&mut self, h: &mut Heap) -> Result<ArrayExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        let mut elements = Vec::new();
 
        self.consume_string(b"{")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b"}") {
 
            while self.source.next().is_some() {
 
                elements.push(self.consume_expression(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b"}") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?;
 
            }
 
        }
 
        self.consume_string(b"}")?;
 
        Ok(h.alloc_array_expression(|this| ArrayExpression {
 
            this,
 
            position,
 
            elements,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn has_builtin_literal(&self) -> bool {
 
        is_constant(self.source.next())
 
            || self.has_keyword(b"null")
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
    }
 
    fn consume_builtin_literal_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<LiteralExpressionId, ParseError2> {
 
    ) -> Result<LiteralExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        let value;
 
        if self.has_keyword(b"null") {
 
            self.consume_keyword(b"null")?;
 
            value = Literal::Null;
 
        } else if self.has_keyword(b"true") {
 
            self.consume_keyword(b"true")?;
 
            value = Literal::True;
 
        } else if self.has_keyword(b"false") {
 
            self.consume_keyword(b"false")?;
 
            value = Literal::False;
 
        } else if self.source.next() == Some(b'\'') {
 
            self.source.consume();
 
            let mut data = Vec::new();
 
            let mut next = self.source.next();
 
            while next != Some(b'\'') && (is_vchar(next) || next == Some(b' ')) {
 
                data.push(next.unwrap());
 
                self.source.consume();
 
                next = self.source.next();
 
            }
 
            if next != Some(b'\'') || data.is_empty() {
 
                return Err(self.error_at_pos("Expected character constant"));
 
            }
 
            self.source.consume();
 
            value = Literal::Character(data);
 
        } else {
 
            if !self.has_integer() {
 
                return Err(self.error_at_pos("Expected integer constant"));
 
            }
 

	
 
            value = Literal::Integer(self.consume_integer()?);
 
        }
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression {
 
            this,
 
            position,
 
            value,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 

	
 
    fn has_struct_literal(&mut self) -> bool {
 
        // A struct literal is written as:
 
        //      namespace::StructName<maybe_one_of_these, auto>{ field: expr }
 
        // We will parse up until the opening brace to see if we're dealing with
 
        // a struct literal.
 
        let backup_pos = self.source.pos();
 
        let result = self.consume_namespaced_identifier_spilled().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.source.next() == Some(b'{');
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 

	
 
    fn consume_struct_literal_expression(&mut self, h: &mut Heap) -> Result<LiteralExpressionId, ParseError2> {
 
    fn consume_struct_literal_expression(&mut self, h: &mut Heap) -> Result<LiteralExpressionId, ParseError> {
 
        // Consume identifier and polymorphic arguments
 
        debug_log!("consume_struct_literal_expression: {}", debug_line!(self.source));
 
        let position = self.source.pos();
 
        let identifier = self.consume_namespaced_identifier(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume fields
 
        let fields = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected the end of the list of struct fields",
 
            |lexer, heap| {
 
                let identifier = lexer.consume_identifier()?;
 
                lexer.consume_whitespace(false)?;
 
                lexer.consume_string(b":")?;
 
                lexer.consume_whitespace(false)?;
 
                let value = lexer.consume_expression(heap)?;
 

	
 
                Ok(LiteralStructField{ identifier, value, field_idx: 0 })
 
            }
 
        )? {
 
            Some(fields) => fields,
 
            None => return Err(ParseError2::new_error(
 
            None => return Err(ParseError::new_error(
 
                self.source, self.source.pos(),
 
                "A struct literal must be followed by its field values"
 
            ))
 
        };
 

	
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression{
 
            this,
 
            position,
 
            value: Literal::Struct(LiteralStruct{
 
                identifier,
 
                fields,
 
                poly_args2: Vec::new(),
 
                definition: None,
 
            }),
 
            parent: ExpressionParent::None,
 
            concrete_type: Default::default()
 
        }))
 
    }
 

	
 
    fn has_call_expression(&mut self) -> bool {
 
        // We need to prevent ambiguity with various operators (because we may
 
        // be specifying polymorphic variables) and variables.
 
        if self.has_builtin_keyword() {
 
            return true;
 
        }
 

	
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 

	
 
        if self.consume_namespaced_identifier_spilled().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.source.next() == Some(b'(') {
 
            // Seems like we have a function call or an enum literal
 
            result = true;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_call_expression(&mut self, h: &mut Heap) -> Result<CallExpressionId, ParseError2> {
 
    fn consume_call_expression(&mut self, h: &mut Heap) -> Result<CallExpressionId, ParseError> {
 
        let position = self.source.pos();
 

	
 
        // Consume method identifier
 
        // TODO: @token Replace this conditional polymorphic arg parsing once we have a tokenizer.
 
        debug_log!("consume_call_expression: {}", debug_line!(self.source));
 
        let method;
 
        let mut consume_poly_args_explicitly = true;
 
        if self.has_keyword(b"get") {
 
            self.consume_keyword(b"get")?;
 
            method = Method::Get;
 
        } else if self.has_keyword(b"put") {
 
            self.consume_keyword(b"put")?;
 
            method = Method::Put;
 
        } else if self.has_keyword(b"fires") {
 
            self.consume_keyword(b"fires")?;
 
            method = Method::Fires;
 
        } else if self.has_keyword(b"create") {
 
            self.consume_keyword(b"create")?;
 
            method = Method::Create;
 
        } else {
 
            let identifier = self.consume_namespaced_identifier(h)?;
 
            method = Method::Symbolic(MethodSymbolic{
 
                identifier,
 
                definition: None
 
            });
 
            consume_poly_args_explicitly = false;
 
        };
 

	
 
        // Consume polymorphic arguments
 
        let poly_args = if consume_poly_args_explicitly {
 
            self.consume_whitespace(false)?;
 
            self.consume_polymorphic_args(h, true)?.unwrap_or_default()
 
        } else {
 
            Vec::new()
 
        };
 

	
 
        // Consume arguments to call
 
        self.consume_whitespace(false)?;
 
        let mut arguments = Vec::new();
 
        self.consume_string(b"(")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b")") {
 
            // TODO: allow trailing comma
 
            while self.source.next().is_some() {
 
                arguments.push(self.consume_expression(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b")") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?
 
            }
 
        }
 
        self.consume_string(b")")?;
 
        Ok(h.alloc_call_expression(|this| CallExpression {
 
            this,
 
            position,
 
            method,
 
            arguments,
 
            poly_args,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn consume_variable_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<VariableExpressionId, ParseError2> {
 
    ) -> Result<VariableExpressionId, ParseError> {
 
        let position = self.source.pos();
 
        debug_log!("consume_variable_expression: {}", debug_line!(self.source));
 

	
 
        // TODO: @token Reimplement when tokenizer is implemented, prevent ambiguities
 
        let identifier = identifier_as_namespaced(self.consume_identifier()?);
 

	
 
        Ok(h.alloc_variable_expression(|this| VariableExpression {
 
            this,
 
            position,
 
            identifier,
 
            declaration: None,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 

	
 
    // ====================
 
    // Statements
 
    // ====================
 

	
 
    /// Consumes any kind of statement from the source and will error if it
 
    /// did not encounter a statement. Will also return an error if the
 
    /// statement is nested too deeply.
 
    ///
 
    /// `wrap_in_block` may be set to true to ensure that the parsed statement
 
    /// will be wrapped in a block statement if it is not already a block
 
    /// statement. This is used to ensure that all `if`, `while` and `sync`
 
    /// statements have a block statement as body.
 
    fn consume_statement(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError2> {
 
    fn consume_statement(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError> {
 
        if self.level >= MAX_LEVEL {
 
            return Err(self.error_at_pos("Too deeply nested statement"));
 
        }
 
        self.level += 1;
 
        let result = self.consume_statement_impl(h, wrap_in_block);
 
        self.level -= 1;
 
        result
 
    }
 
    fn has_label(&mut self) -> bool {
 
        // To prevent ambiguity with expression statements consisting only of an
 
        // identifier or a namespaced identifier, we look ahead and match on the
 
        // *single* colon that signals a labeled statement.
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 
        if self.consume_identifier_spilled().is_ok() {
 
            // next character is ':', second character is NOT ':'
 
            result = Some(b':') == self.source.next() && Some(b':') != self.source.lookahead(1)
 
        }
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_statement_impl(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError2> {
 
    fn consume_statement_impl(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError> {
 
        // Parse and allocate statement
 
        let mut must_wrap = true;
 
        let mut stmt_id = if self.has_string(b"{") {
 
            must_wrap = false;
 
            self.consume_block_statement(h)?
 
        } else if self.has_keyword(b"skip") {
 
            must_wrap = false;
 
            self.consume_skip_statement(h)?.upcast()
 
        } else if self.has_keyword(b"if") {
 
            self.consume_if_statement(h)?.upcast()
 
        } else if self.has_keyword(b"while") {
 
            self.consume_while_statement(h)?.upcast()
 
        } else if self.has_keyword(b"break") {
 
            self.consume_break_statement(h)?.upcast()
 
        } else if self.has_keyword(b"continue") {
 
            self.consume_continue_statement(h)?.upcast()
 
        } else if self.has_keyword(b"synchronous") {
 
            self.consume_synchronous_statement(h)?.upcast()
 
        } else if self.has_keyword(b"return") {
 
            self.consume_return_statement(h)?.upcast()
 
        } else if self.has_keyword(b"assert") {
 
            self.consume_assert_statement(h)?.upcast()
 
        } else if self.has_keyword(b"goto") {
 
            self.consume_goto_statement(h)?.upcast()
 
        } else if self.has_keyword(b"new") {
 
            self.consume_new_statement(h)?.upcast()
 
        } else if self.has_label() {
 
            self.consume_labeled_statement(h)?.upcast()
 
        } else {
 
            self.consume_expression_statement(h)?.upcast()
 
        };
 

	
 
        // Wrap if desired and if needed
 
        if must_wrap && wrap_in_block {
 
            let position = h[stmt_id].position();
 
            let block_wrapper = h.alloc_block_statement(|this| BlockStatement{
 
                this,
 
                position,
 
                statements: vec![stmt_id],
 
                parent_scope: None,
 
                relative_pos_in_parent: 0,
 
                locals: Vec::new(),
 
                labels: Vec::new()
 
            });
 

	
 
            stmt_id = block_wrapper.upcast();
 
        }
 

	
 
        Ok(stmt_id)
 
    }
 
    fn has_local_statement(&mut self) -> bool {
 
        /* To avoid ambiguity, we look ahead to find either the
 
        channel keyword that signals a variable declaration, or
 
        a type annotation followed by another identifier.
 
        Example:
 
          my_type[] x = {5}; // memory statement
 
          my_var[5] = x; // assignment expression, expression statement
 
        Note how both the local and the assignment
 
        start with arbitrary identifier followed by [. */
 
        if self.has_keyword(b"channel") {
 
            return true;
 
        }
 
        if self.has_statement_keyword() {
 
            return false;
 
        }
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 
        if self.maybe_consume_type_spilled_without_pos_recovery() {
 
            // We seem to have a valid type, do we now have an identifier?
 
            if self.consume_whitespace(true).is_ok() {
 
                result = self.has_identifier();
 
            }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_block_statement(&mut self, h: &mut Heap) -> Result<StatementId, ParseError2> {
 
    fn consume_block_statement(&mut self, h: &mut Heap) -> Result<StatementId, ParseError> {
 
        let position = self.source.pos();
 
        let mut statements = Vec::new();
 
        self.consume_string(b"{")?;
 
        self.consume_whitespace(false)?;
 
        while self.has_local_statement() {
 
            let (local_id, stmt_id) = self.consume_local_statement(h)?;
 
            statements.push(local_id.upcast());
 
            if let Some(stmt_id) = stmt_id {
 
                statements.push(stmt_id.upcast());
 
            }
 
            self.consume_whitespace(false)?;
 
        }
 
        while !self.has_string(b"}") {
 
            statements.push(self.consume_statement(h, false)?);
 
            self.consume_whitespace(false)?;
 
        }
 
        self.consume_string(b"}")?;
 
        if statements.is_empty() {
 
            Ok(h.alloc_skip_statement(|this| SkipStatement { this, position, next: None }).upcast())
 
        } else {
 
            Ok(h.alloc_block_statement(|this| BlockStatement {
 
                this,
 
                position,
 
                statements,
 
                parent_scope: None,
 
                relative_pos_in_parent: 0,
 
                locals: Vec::new(),
 
                labels: Vec::new(),
 
            })
 
            .upcast())
 
        }
 
    }
 
    fn consume_local_statement(&mut self, h: &mut Heap) -> Result<(LocalStatementId, Option<ExpressionStatementId>), ParseError2> {
 
    fn consume_local_statement(&mut self, h: &mut Heap) -> Result<(LocalStatementId, Option<ExpressionStatementId>), ParseError> {
 
        if self.has_keyword(b"channel") {
 
            let local_id = self.consume_channel_statement(h)?.upcast();
 
            Ok((local_id, None))
 
        } else {
 
            let (memory_id, stmt_id) = self.consume_memory_statement(h)?;
 
            Ok((memory_id.upcast(), Some(stmt_id)))
 
        }
 
    }
 
    fn consume_channel_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ChannelStatementId, ParseError2> {
 
    ) -> Result<ChannelStatementId, ParseError> {
 
        // Consume channel statement and polymorphic argument if specified.
 
        // Needs a tiny bit of special parsing to ensure the right amount of
 
        // whitespace is present.
 
        let position = self.source.pos();
 
        self.consume_keyword(b"channel")?;
 

	
 
        let expect_whitespace = self.source.next() != Some(b'<');
 
        self.consume_whitespace(expect_whitespace)?;
 
        let poly_args = self.consume_polymorphic_args(h, true)?.unwrap_or_default();
 
        let poly_arg_id = match poly_args.len() {
 
            0 => h.alloc_parser_type(|this| ParserType{
 
                this, pos: position.clone(), variant: ParserTypeVariant::Inferred,
 
            }),
 
            1 => poly_args[0],
 
            _ => return Err(ParseError2::new_error(
 
            _ => return Err(ParseError::new_error(
 
                &self.source, self.source.pos(),
 
                "port construction using 'channel' accepts up to 1 polymorphic argument"
 
            ))
 
        };
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume the output port
 
        let out_parser_type = h.alloc_parser_type(|this| ParserType{
 
            this, pos: position.clone(), variant: ParserTypeVariant::Output(poly_arg_id)
 
        });
 
        let out_identifier = self.consume_identifier()?;
 

	
 
        // Consume the "->" syntax
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b"->")?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume the input port
 
        let in_parser_type = h.alloc_parser_type(|this| ParserType{
 
            this, pos: position.clone(), variant: ParserTypeVariant::Input(poly_arg_id)
 
        });
 
        let in_identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        let out_port = h.alloc_local(|this| Local {
 
            this,
 
            position,
 
            parser_type: out_parser_type,
 
            identifier: out_identifier,
 
            relative_pos_in_block: 0
 
        });
 
        let in_port = h.alloc_local(|this| Local {
 
            this,
 
            position,
 
            parser_type: in_parser_type,
 
            identifier: in_identifier,
 
            relative_pos_in_block: 0
 
        });
 
        Ok(h.alloc_channel_statement(|this| ChannelStatement {
 
            this,
 
            position,
 
            from: out_port,
 
            to: in_port,
 
            relative_pos_in_block: 0,
 
            next: None,
 
        }))
 
    }
 
    fn consume_memory_statement(&mut self, h: &mut Heap) -> Result<(MemoryStatementId, ExpressionStatementId), ParseError2> {
 
    fn consume_memory_statement(&mut self, h: &mut Heap) -> Result<(MemoryStatementId, ExpressionStatementId), ParseError> {
 
        let position = self.source.pos();
 
        let parser_type = self.consume_type2(h, true)?;
 
        let parser_type = self.consume_type(h, true)?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let assignment_position = self.source.pos();
 
        self.consume_string(b"=")?;
 
        self.consume_whitespace(false)?;
 
        let initial = self.consume_expression(h)?;
 
        let variable = h.alloc_local(|this| Local {
 
            this,
 
            position,
 
            parser_type,
 
            identifier: identifier.clone(),
 
            relative_pos_in_block: 0
 
        });
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 

	
 
        // Transform into the variable declaration, followed by an assignment
 
        let memory_stmt_id = h.alloc_memory_statement(|this| MemoryStatement {
 
            this,
 
            position,
 
            variable,
 
            next: None,
 
        });
 
        let variable_expr_id = h.alloc_variable_expression(|this| VariableExpression{
 
            this,
 
            position: identifier.position.clone(),
 
            identifier: identifier_as_namespaced(identifier),
 
            declaration: None,
 
            parent: ExpressionParent::None,
 
            concrete_type: Default::default()
 
        });
 
        let assignment_expr_id = h.alloc_assignment_expression(|this| AssignmentExpression{
 
            this,
 
            position: assignment_position,
 
            left: variable_expr_id.upcast(),
 
            operation: AssignmentOperator::Set,
 
            right: initial,
 
            parent: ExpressionParent::None,
 
            concrete_type: Default::default()
 
        });
 
        let assignment_stmt_id = h.alloc_expression_statement(|this| ExpressionStatement{
 
            this,
 
            position,
 
            expression: assignment_expr_id.upcast(),
 
            next: None
 
        });
 
        Ok((memory_stmt_id, assignment_stmt_id))
 
    }
 
    fn consume_labeled_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<LabeledStatementId, ParseError2> {
 
    ) -> Result<LabeledStatementId, ParseError> {
 
        let position = self.source.pos();
 
        let label = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b":")?;
 
        self.consume_whitespace(false)?;
 
        let body = self.consume_statement(h, false)?;
 
        Ok(h.alloc_labeled_statement(|this| LabeledStatement {
 
            this,
 
            position,
 
            label,
 
            body,
 
            relative_pos_in_block: 0,
 
            in_sync: None,
 
        }))
 
    }
 
    fn consume_skip_statement(&mut self, h: &mut Heap) -> Result<SkipStatementId, ParseError2> {
 
    fn consume_skip_statement(&mut self, h: &mut Heap) -> Result<SkipStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"skip")?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_skip_statement(|this| SkipStatement { this, position, next: None }))
 
    }
 
    fn consume_if_statement(&mut self, h: &mut Heap) -> Result<IfStatementId, ParseError2> {
 
    fn consume_if_statement(&mut self, h: &mut Heap) -> Result<IfStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"if")?;
 
        self.consume_whitespace(false)?;
 
        let test = self.consume_paren_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        let true_body = self.consume_statement(h, true)?;
 
        self.consume_whitespace(false)?;
 
        let false_body = if self.has_keyword(b"else") {
 
            self.consume_keyword(b"else")?;
 
            self.consume_whitespace(false)?;
 
            self.consume_statement(h, true)?
 
        } else {
 
            h.alloc_skip_statement(|this| SkipStatement { this, position, next: None }).upcast()
 
        };
 
        Ok(h.alloc_if_statement(|this| IfStatement { this, position, test, true_body, false_body, end_if: None }))
 
    }
 
    fn consume_while_statement(&mut self, h: &mut Heap) -> Result<WhileStatementId, ParseError2> {
 
    fn consume_while_statement(&mut self, h: &mut Heap) -> Result<WhileStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"while")?;
 
        self.consume_whitespace(false)?;
 
        let test = self.consume_paren_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        let body = self.consume_statement(h, true)?;
 
        Ok(h.alloc_while_statement(|this| WhileStatement {
 
            this,
 
            position,
 
            test,
 
            body,
 
            end_while: None,
 
            in_sync: None,
 
        }))
 
    }
 
    fn consume_break_statement(&mut self, h: &mut Heap) -> Result<BreakStatementId, ParseError2> {
 
    fn consume_break_statement(&mut self, h: &mut Heap) -> Result<BreakStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"break")?;
 
        self.consume_whitespace(false)?;
 
        let label;
 
        if self.has_identifier() {
 
            label = Some(self.consume_identifier()?);
 
            self.consume_whitespace(false)?;
 
        } else {
 
            label = None;
 
        }
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_break_statement(|this| BreakStatement { this, position, label, target: None }))
 
    }
 
    fn consume_continue_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ContinueStatementId, ParseError2> {
 
    ) -> Result<ContinueStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"continue")?;
 
        self.consume_whitespace(false)?;
 
        let label;
 
        if self.has_identifier() {
 
            label = Some(self.consume_identifier()?);
 
            self.consume_whitespace(false)?;
 
        } else {
 
            label = None;
 
        }
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_continue_statement(|this| ContinueStatement {
 
            this,
 
            position,
 
            label,
 
            target: None,
 
        }))
 
    }
 
    fn consume_synchronous_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<SynchronousStatementId, ParseError2> {
 
    ) -> Result<SynchronousStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"synchronous")?;
 
        self.consume_whitespace(false)?;
 
        // TODO: What was the purpose of this? Seems superfluous and confusing?
 
        // let mut parameters = Vec::new();
 
        // if self.has_string(b"(") {
 
        //     self.consume_parameters(h, &mut parameters)?;
 
        //     self.consume_whitespace(false)?;
 
        // } else if !self.has_keyword(b"skip") && !self.has_string(b"{") {
 
        //     return Err(self.error_at_pos("Expected block statement"));
 
        // }
 
        let body = self.consume_statement(h, true)?;
 
        Ok(h.alloc_synchronous_statement(|this| SynchronousStatement {
 
            this,
 
            position,
 
            body,
 
            end_sync: None,
 
            parent_scope: None,
 
        }))
 
    }
 
    fn consume_return_statement(&mut self, h: &mut Heap) -> Result<ReturnStatementId, ParseError2> {
 
    fn consume_return_statement(&mut self, h: &mut Heap) -> Result<ReturnStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"return")?;
 
        self.consume_whitespace(false)?;
 
        let expression = if self.has_string(b"(") {
 
            self.consume_paren_expression(h)
 
        } else {
 
            self.consume_expression(h)
 
        }?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_return_statement(|this| ReturnStatement { this, position, expression }))
 
    }
 
    fn consume_assert_statement(&mut self, h: &mut Heap) -> Result<AssertStatementId, ParseError2> {
 
    fn consume_assert_statement(&mut self, h: &mut Heap) -> Result<AssertStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"assert")?;
 
        self.consume_whitespace(false)?;
 
        let expression = if self.has_string(b"(") {
 
            self.consume_paren_expression(h)
 
        } else {
 
            self.consume_expression(h)
 
        }?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_assert_statement(|this| AssertStatement {
 
            this,
 
            position,
 
            expression,
 
            next: None,
 
        }))
 
    }
 
    fn consume_goto_statement(&mut self, h: &mut Heap) -> Result<GotoStatementId, ParseError2> {
 
    fn consume_goto_statement(&mut self, h: &mut Heap) -> Result<GotoStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"goto")?;
 
        self.consume_whitespace(false)?;
 
        let label = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_goto_statement(|this| GotoStatement { this, position, label, target: None }))
 
    }
 
    fn consume_new_statement(&mut self, h: &mut Heap) -> Result<NewStatementId, ParseError2> {
 
    fn consume_new_statement(&mut self, h: &mut Heap) -> Result<NewStatementId, ParseError> {
 
        let position = self.source.pos();
 
        self.consume_keyword(b"new")?;
 
        self.consume_whitespace(false)?;
 
        let expression = self.consume_call_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_new_statement(|this| NewStatement { this, position, expression, next: None }))
 
    }
 
    fn consume_expression_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ExpressionStatementId, ParseError2> {
 
    ) -> Result<ExpressionStatementId, ParseError> {
 
        let position = self.source.pos();
 
        let expression = self.consume_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(h.alloc_expression_statement(|this| ExpressionStatement {
 
            this,
 
            position,
 
            expression,
 
            next: None,
 
        }))
 
    }
 

	
 
    // ====================
 
    // Symbol definitions
 
    // ====================
 

	
 
    fn has_symbol_definition(&self) -> bool {
 
        self.has_keyword(b"composite")
 
            || self.has_keyword(b"primitive")
 
            || self.has_type_keyword()
 
            || self.has_identifier()
 
    }
 
    fn consume_symbol_definition(&mut self, h: &mut Heap) -> Result<DefinitionId, ParseError2> {
 
    fn consume_symbol_definition(&mut self, h: &mut Heap) -> Result<DefinitionId, ParseError> {
 
        if self.has_keyword(b"struct") {
 
            Ok(self.consume_struct_definition(h)?.upcast())
 
        } else if self.has_keyword(b"enum") {
 
            Ok(self.consume_enum_definition(h)?.upcast())
 
        } else if self.has_keyword(b"composite") || self.has_keyword(b"primitive") {
 
            Ok(self.consume_component_definition(h)?.upcast())
 
        } else {
 
            Ok(self.consume_function_definition(h)?.upcast())
 
        }
 
    }
 
    fn consume_struct_definition(&mut self, h: &mut Heap) -> Result<StructId, ParseError2> {
 
    fn consume_struct_definition(&mut self, h: &mut Heap) -> Result<StructId, ParseError> {
 
        // Parse "struct" keyword, optional polyvars and its identifier
 
        let struct_pos = self.source.pos();
 
        self.consume_keyword(b"struct")?;
 
        self.consume_whitespace(true)?;
 
        let struct_ident = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Parse struct fields
 
        let fields = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected the end of the list of struct fields",
 
            |lexer, heap| {
 
                let position = lexer.source.pos();
 
                let parser_type = lexer.consume_type2(heap, false)?;
 
                let parser_type = lexer.consume_type(heap, false)?;
 
                lexer.consume_whitespace(true)?;
 
                let field = lexer.consume_identifier()?;
 

	
 
                Ok(StructFieldDefinition{ position, field, parser_type })
 
            }
 
        )? {
 
            Some(fields) => fields,
 
            None => return Err(ParseError2::new_error(
 
            None => return Err(ParseError::new_error(
 
                self.source, struct_pos,
 
                "An struct definition must be followed by its fields"
 
            )),
 
        };
 

	
 
        // Valid struct definition
 
        Ok(h.alloc_struct_definition(|this| StructDefinition{
 
            this,
 
            position: struct_pos,
 
            identifier: struct_ident,
 
            poly_vars,
 
            fields,
 
        }))
 
    }
 
    fn consume_enum_definition(&mut self, h: &mut Heap) -> Result<EnumId, ParseError2> {
 
    fn consume_enum_definition(&mut self, h: &mut Heap) -> Result<EnumId, ParseError> {
 
        // Parse "enum" keyword, optional polyvars and its identifier
 
        let enum_pos = self.source.pos();
 
        self.consume_keyword(b"enum")?;
 
        self.consume_whitespace(true)?;
 
        let enum_ident = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        let variants = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected end of enum variant list",
 
            |lexer, heap| {
 
                // Variant identifier
 
                let position = lexer.source.pos();
 
                let identifier = lexer.consume_identifier()?;
 
                lexer.consume_whitespace(false)?;
 

	
 
                // Optional variant value/type
 
                let next = lexer.source.next();
 
                let value = match next {
 
                    Some(b',') => {
 
                        // Do not consume, let `consume_comma_separated` handle
 
                        // the next item
 
                        EnumVariantValue::None
 
                    },
 
                    Some(b'=') => {
 
                        // Integer value
 
                        lexer.source.consume();
 
                        lexer.consume_whitespace(false)?;
 
                        if !lexer.has_integer() {
 
                            return Err(lexer.error_at_pos("expected integer"))
 
                        }
 
                        let value = lexer.consume_integer()?;
 
                        EnumVariantValue::Integer(value)
 
                    },
 
                    Some(b'(') => {
 
                        // Embedded type
 
                        lexer.source.consume();
 
                        lexer.consume_whitespace(false)?;
 
                        let embedded_type = lexer.consume_type2(heap, false)?;
 
                        let embedded_type = lexer.consume_type(heap, false)?;
 
                        lexer.consume_whitespace(false)?;
 
                        lexer.consume_string(b")")?;
 
                        EnumVariantValue::Type(embedded_type)
 
                    },
 
                    _ => {
 
                        return Err(lexer.error_at_pos("Expected ',', '=', or '('"));
 
                    }
 
                };
 

	
 
                Ok(EnumVariantDefinition{ position, identifier, value })
 
            }
 
        )? {
 
            Some(variants) => variants,
 
            None => return Err(ParseError2::new_error(
 
            None => return Err(ParseError::new_error(
 
                self.source, enum_pos,
 
                "An enum definition must be followed by its variants"
 
            )),
 
        };
 

	
 
        Ok(h.alloc_enum_definition(|this| EnumDefinition{
 
            this,
 
            position: enum_pos,
 
            identifier: enum_ident,
 
            poly_vars,
 
            variants,
 
        }))
 
    }
 
    fn consume_component_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError2> {
 
    fn consume_component_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // TODO: Cleanup
 
        if self.has_keyword(b"composite") {
 
            Ok(self.consume_composite_definition(h)?)
 
        } else {
 
            Ok(self.consume_primitive_definition(h)?)
 
        }
 
    }
 
    fn consume_composite_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError2> {
 
    fn consume_composite_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // Parse keyword, optional polyvars and the identifier
 
        let position = self.source.pos();
 
        self.consume_keyword(b"composite")?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Parse body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_component(|this| Component { 
 
            this,
 
            variant: ComponentVariant::Composite,
 
            position,
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body
 
        }))
 
    }
 
    fn consume_primitive_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError2> {
 
    fn consume_primitive_definition(&mut self, h: &mut Heap) -> Result<ComponentId, ParseError> {
 
        // Consume keyword, optional polyvars and identifier
 
        let position = self.source.pos();
 
        self.consume_keyword(b"primitive")?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_component(|this| Component { 
 
            this,
 
            variant: ComponentVariant::Primitive,
 
            position,
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body
 
        }))
 
    }
 
    fn consume_function_definition(&mut self, h: &mut Heap) -> Result<FunctionId, ParseError2> {
 
    fn consume_function_definition(&mut self, h: &mut Heap) -> Result<FunctionId, ParseError> {
 
        // Consume return type, optional polyvars and identifier
 
        let position = self.source.pos();
 
        let return_type = self.consume_type2(h, false)?;
 
        let return_type = self.consume_type(h, false)?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_function(|this| Function {
 
            this,
 
            position,
 
            return_type,
 
            identifier,
 
            poly_vars,
 
            parameters,
 
            body,
 
        }))
 
    }
 
    fn has_pragma(&self) -> bool {
 
        if let Some(c) = self.source.next() {
 
            c == b'#'
 
        } else {
 
            false
 
        }
 
    }
 
    fn consume_pragma(&mut self, h: &mut Heap) -> Result<PragmaId, ParseError2> {
 
    fn consume_pragma(&mut self, h: &mut Heap) -> Result<PragmaId, ParseError> {
 
        let position = self.source.pos();
 
        let next = self.source.next();
 
        if next != Some(b'#') {
 
            return Err(self.error_at_pos("Expected pragma"));
 
        }
 
        self.source.consume();
 
        if !is_vchar(self.source.next()) {
 
            return Err(self.error_at_pos("Expected pragma"));
 
        }
 
        if self.has_string(b"version") {
 
            self.consume_string(b"version")?;
 
            self.consume_whitespace(true)?;
 
            if !self.has_integer() {
 
                return Err(self.error_at_pos("Expected integer constant"));
 
            }
 
            let version = self.consume_integer()?;
 
            debug_assert!(version >= 0);
 
            return Ok(h.alloc_pragma(|this| Pragma::Version(PragmaVersion{
 
                this, position, version: version as u64
 
            })))
 
        } else if self.has_string(b"module") {
 
            self.consume_string(b"module")?;
 
            self.consume_whitespace(true)?;
 
            if !self.has_identifier() {
 
                return Err(self.error_at_pos("Expected identifier"));
 
            }
 
            let mut value = Vec::new();
 
            let mut ident = self.consume_ident()?;
 
            value.append(&mut ident);
 
            while self.has_string(b".") {
 
                self.consume_string(b".")?;
 
                value.push(b'.');
 
                ident = self.consume_ident()?;
 
                value.append(&mut ident);
 
            }
 
            return Ok(h.alloc_pragma(|this| Pragma::Module(PragmaModule{
 
                this, position, value
 
            })));
 
        } else {
 
            return Err(self.error_at_pos("Unknown pragma"));
 
        }
 
    }
 

	
 
    fn has_import(&self) -> bool {
 
        self.has_keyword(b"import")
 
    }
 
    fn consume_import(&mut self, h: &mut Heap) -> Result<ImportId, ParseError2> {
 
    fn consume_import(&mut self, h: &mut Heap) -> Result<ImportId, ParseError> {
 
        // Parse the word "import" and the name of the module
 
        let position = self.source.pos();
 
        self.consume_keyword(b"import")?;
 
        self.consume_whitespace(true)?;
 
        let mut value = Vec::new();
 
        let mut last_ident_pos = self.source.pos();
 
        let mut ident = self.consume_ident()?;
 
        value.append(&mut ident);
 
        let mut last_ident_start = 0;
 

	
 
        while self.has_string(b".") {
 
            self.consume_string(b".")?;
 
            value.push(b'.');
 
            last_ident_pos = self.source.pos();
 
            ident = self.consume_ident()?;
 
            last_ident_start = value.len();
 
            value.append(&mut ident);
 
        }
 

	
 

	
 
        self.consume_whitespace(false)?;
 

	
 
        // Check for the potential aliasing or specific module imports
 
        let import = if self.has_string(b"as") {
 
            self.consume_string(b"as")?;
 
            self.consume_whitespace(true)?;
 
            let alias = self.consume_identifier()?;
 

	
 
            h.alloc_import(|this| Import::Module(ImportModule{
 
                this,
 
                position,
 
                module_name: value,
 
                alias,
 
                module_id: None,
 
            }))
 
        } else if self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_whitespace(false)?;
 

	
 
            let next = self.source.next();
 
            if Some(b'{') == next {
 
                let symbols = match self.consume_comma_separated(
 
                    h, b'{', b'}', "Expected end of import list",
 
                    |lexer, _heap| {
 
                        // Symbol name
 
                        let position = lexer.source.pos();
 
                        let name = lexer.consume_identifier()?;
 
                        lexer.consume_whitespace(false)?;
 

	
 
                        // Symbol alias
 
                        if lexer.has_string(b"as") {
 
                            // With alias
 
                            lexer.consume_string(b"as")?;
 
                            lexer.consume_whitespace(true)?;
 
                            let alias = lexer.consume_identifier()?;
 

	
 
                            Ok(AliasedSymbol{
 
                                position,
 
                                name,
 
                                alias,
 
                                definition_id: None
 
                            })
 
                        } else {
 
                            // Without alias
 
                            Ok(AliasedSymbol{
 
                                position,
 
                                name: name.clone(),
 
                                alias: name,
 
                                definition_id: None
 
                            })
 
                        }
 
                    }
 
                )? {
 
                    Some(symbols) => symbols,
 
                    None => unreachable!(), // because we checked for opening '{'
 
                };
 

	
 
                h.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    position,
 
                    module_name: value,
 
                    module_id: None,
 
                    symbols,
 
                }))
 
            } else if Some(b'*') == next {
 
                self.source.consume();
 
                h.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    position,
 
                    module_name: value,
 
                    module_id: None,
 
                    symbols: Vec::new()
 
                }))
 
            } else if self.has_identifier() {
 
                let position = self.source.pos();
 
                let name = self.consume_identifier()?;
 
                self.consume_whitespace(false)?;
 
                let alias = if self.has_string(b"as") {
 
                    self.consume_string(b"as")?;
 
                    self.consume_whitespace(true)?;
 
                    self.consume_identifier()?
 
                } else {
 
                    name.clone()
 
                };
 

	
 
                h.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    position,
 
                    module_name: value,
 
                    module_id: None,
 
                    symbols: vec![AliasedSymbol{
 
                        position,
 
                        name,
 
                        alias,
 
                        definition_id: None
 
                    }]
 
                }))
 
            } else {
 
                return Err(self.error_at_pos("Expected '*', '{' or a symbol name"));
 
            }
 
        } else {
 
            // No explicit alias or subimports, so implicit alias
 
            let alias_value = Vec::from(&value[last_ident_start..]);
 
            h.alloc_import(|this| Import::Module(ImportModule{
 
                this,
 
                position,
 
                module_name: value,
 
                alias: Identifier{
 
                    position: last_ident_pos,
 
                    value: Vec::from(alias_value),
 
                },
 
                module_id: None,
 
            }))
 
        };
 

	
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(import)
 
    }
 
    pub fn consume_protocol_description(&mut self, h: &mut Heap) -> Result<RootId, ParseError2> {
 
    pub fn consume_protocol_description(&mut self, h: &mut Heap) -> Result<RootId, ParseError> {
 
        let position = self.source.pos();
 
        let mut pragmas = Vec::new();
 
        let mut imports = Vec::new();
 
        let mut definitions = Vec::new();
 
        self.consume_whitespace(false)?;
 
        while self.has_pragma() {
 
            let pragma = self.consume_pragma(h)?;
 
            pragmas.push(pragma);
 
            self.consume_whitespace(false)?;
 
        }
 
        while self.has_import() {
 
            let import = self.consume_import(h)?;
 
            imports.push(import);
 
            self.consume_whitespace(false)?;
 
        }
 
        while self.has_symbol_definition() {
 
            let def = self.consume_symbol_definition(h)?;
 
            definitions.push(def);
 
            self.consume_whitespace(false)?;
 
        }
 
        // end of file
 
        if !self.source.is_eof() {
 
            return Err(self.error_at_pos("Expected end of file"));
 
        }
 
        Ok(h.alloc_protocol_description(|this| Root {
 
            this,
 
            position,
 
            pragmas,
 
            imports,
 
            definitions,
 
        }))
 
    }
 
}
src/protocol/parser/mod.rs
Show inline comments
 
mod depth_visitor;
 
pub(crate) mod symbol_table;
 
pub(crate) mod type_table;
 
mod type_resolver;
 
mod visitor;
 
mod visitor_linker;
 
mod utils;
 

	
 
use depth_visitor::*;
 
use symbol_table::SymbolTable;
 
use visitor::Visitor2;
 
use visitor_linker::ValidityAndLinkerVisitor;
 
use type_resolver::{TypeResolvingVisitor, ResolveQueue};
 
use type_table::{TypeTable, TypeCtx};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::lexer::*;
 

	
 
use std::collections::HashMap;
 
use crate::protocol::ast_printer::ASTWriter;
 

	
 
// TODO: @fixme, pub qualifier
 
pub(crate) struct LexedModule {
 
    pub(crate) source: InputSource,
 
    module_name: Vec<u8>,
 
    version: Option<u64>,
 
    pub(crate) root_id: RootId,
 
}
 

	
 
pub struct Parser {
 
    pub(crate) heap: Heap,
 
    pub(crate) modules: Vec<LexedModule>,
 
    pub(crate) module_lookup: HashMap<Vec<u8>, usize>, // from (optional) module name to `modules` idx
 
    pub(crate) symbol_table: SymbolTable,
 
    pub(crate) type_table: TypeTable,
 
}
 

	
 
impl Parser {
 
    pub fn new() -> Self {
 
        Parser{
 
            heap: Heap::new(),
 
            modules: Vec::new(),
 
            module_lookup: HashMap::new(),
 
            symbol_table: SymbolTable::new(),
 
            type_table: TypeTable::new(),
 
        }
 
    }
 

	
 
    pub fn feed(&mut self, mut source: InputSource) -> Result<RootId, ParseError2> {
 
    pub fn feed(&mut self, mut source: InputSource) -> Result<RootId, ParseError> {
 
        // Lex the input source
 
        let mut lex = Lexer::new(&mut source);
 
        let pd = lex.consume_protocol_description(&mut self.heap)?;
 

	
 
        // Seek the module name and version
 
        let root = &self.heap[pd];
 
        let mut module_name_pos = InputPosition::default();
 
        let mut module_name = Vec::new();
 
        let mut module_version_pos = InputPosition::default();
 
        let mut module_version = None;
 

	
 
        for pragma in &root.pragmas {
 
            match &self.heap[*pragma] {
 
                Pragma::Module(module) => {
 
                    if !module_name.is_empty() {
 
                        return Err(
 
                            ParseError2::new_error(&source, module.position, "Double definition of module name in the same file")
 
                            ParseError::new_error(&source, module.position, "Double definition of module name in the same file")
 
                                .with_postfixed_info(&source, module_name_pos, "Previous definition was here")
 
                        )
 
                    }
 

	
 
                    module_name_pos = module.position.clone();
 
                    module_name = module.value.clone();
 
                },
 
                Pragma::Version(version) => {
 
                    if module_version.is_some() {
 
                        return Err(
 
                            ParseError2::new_error(&source, version.position, "Double definition of module version")
 
                            ParseError::new_error(&source, version.position, "Double definition of module version")
 
                                .with_postfixed_info(&source, module_version_pos, "Previous definition was here")
 
                        )
 
                    }
 

	
 
                    module_version_pos = version.position.clone();
 
                    module_version = Some(version.version);
 
                },
 
            }
 
        }
 

	
 
        // Add module to list of modules and prevent naming conflicts
 
        let cur_module_idx = self.modules.len();
 
        if let Some(prev_module_idx) = self.module_lookup.get(&module_name) {
 
            // Find `#module` statement in other module again
 
            let prev_module = &self.modules[*prev_module_idx];
 
            let prev_module_pos = self.heap[prev_module.root_id].pragmas
 
                .iter()
 
                .find_map(|p| {
 
                    match &self.heap[*p] {
 
                        Pragma::Module(module) => Some(module.position.clone()),
 
                        _ => None
 
                    }
 
                })
 
                .unwrap_or(InputPosition::default());
 

	
 
            let module_name_msg = if module_name.is_empty() {
 
                format!("a nameless module")
 
            } else {
 
                format!("module '{}'", String::from_utf8_lossy(&module_name))
 
            };
 

	
 
            return Err(
 
                ParseError2::new_error(&source, module_name_pos, &format!("Double definition of {} across files", module_name_msg))
 
                ParseError::new_error(&source, module_name_pos, &format!("Double definition of {} across files", module_name_msg))
 
                    .with_postfixed_info(&prev_module.source, prev_module_pos, "Other definition was here")
 
            );
 
        }
 

	
 
        self.modules.push(LexedModule{
 
            source,
 
            module_name: module_name.clone(),
 
            version: module_version,
 
            root_id: pd
 
        });
 
        self.module_lookup.insert(module_name, cur_module_idx);
 
        Ok(pd)
 
    }
 

	
 
    fn resolve_symbols_and_types(&mut self) -> Result<(), ParseError2> {
 
    fn resolve_symbols_and_types(&mut self) -> Result<(), ParseError> {
 
        // Construct the symbol table to resolve any imports and/or definitions,
 
        // then use the symbol table to actually annotate all of the imports.
 
        // If the type table is constructed correctly then all imports MUST be
 
        // resolvable.
 
        self.symbol_table.build(&self.heap, &self.modules)?;
 

	
 
        // Not pretty, but we need to work around rust's borrowing rules, it is
 
        // totally safe to mutate the contents of an AST element that we are
 
        // not borrowing anywhere else.
 
        let mut module_index = 0;
 
        let mut import_index = 0;
 
        loop {
 
            if module_index >= self.modules.len() {
 
                break;
 
            }
 

	
 
            let module_root_id = self.modules[module_index].root_id;
 
            let import_id = {
 
                let root = &self.heap[module_root_id];
 
                if import_index >= root.imports.len() {
 
                    module_index += 1;
 
                    import_index = 0;
 
                    continue
 
                }
 
                root.imports[import_index]
 
            };
 

	
 
            let import = &mut self.heap[import_id];
 
            match import {
 
                Import::Module(import) => {
 
                    debug_assert!(import.module_id.is_none(), "module import already resolved");
 
                    let target_module_id = self.symbol_table.resolve_module(&import.module_name)
 
                        .expect("module import is resolved by symbol table");
 
                    import.module_id = Some(target_module_id)
 
                },
 
                Import::Symbols(import) => {
 
                    debug_assert!(import.module_id.is_none(), "module of symbol import already resolved");
 
                    let target_module_id = self.symbol_table.resolve_module(&import.module_name)
 
                        .expect("symbol import's module is resolved by symbol table");
 
                    import.module_id = Some(target_module_id);
 

	
 
                    for symbol in &mut import.symbols {
 
                        debug_assert!(symbol.definition_id.is_none(), "symbol import already resolved");
 
                        let (_, target_definition_id) = self.symbol_table.resolve_identifier(module_root_id, &symbol.alias)
 
                            .expect("symbol import is resolved by symbol table")
 
                            .as_definition()
 
                            .expect("symbol import does not resolve to namespace symbol");
 
                        symbol.definition_id = Some(target_definition_id);
 
                    }
 
                }
 
            }
 

	
 
            import_index += 1;
 
        }
 

	
 
        // All imports in the AST are now annotated. We now use the symbol table
 
        // to construct the type table.
 
        let mut type_ctx = TypeCtx::new(&self.symbol_table, &mut self.heap, &self.modules);
 
        self.type_table.build_base_types(&mut type_ctx)?;
 

	
 
        Ok(())
 
    }
 

	
 
    pub fn parse(&mut self) -> Result<(), ParseError2> {
 
    pub fn parse(&mut self) -> Result<(), ParseError> {
 
        self.resolve_symbols_and_types()?;
 

	
 
        // Validate and link all modules
 
        let mut visit = ValidityAndLinkerVisitor::new();
 
        for module in &self.modules {
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            visit.visit_module(&mut ctx)?;
 
        }
 

	
 
        // Perform typechecking on all modules
 
        let mut visit = TypeResolvingVisitor::new();
 
        let mut queue = ResolveQueue::new();
 
        for module in &self.modules {
 
            let ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            TypeResolvingVisitor::queue_module_definitions(&ctx, &mut queue);   
 
        };
 
        while !queue.is_empty() {
 
            let top = queue.pop().unwrap();
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module: &self.modules[top.root_id.index as usize],
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            visit.handle_module_definition(&mut ctx, &mut queue, top)?;
 
        }
 

	
 
        // Perform remaining steps
 
        // TODO: Phase out at some point
 
        for module in &self.modules {
 
            let root_id = module.root_id;
 
            if let Err((position, message)) = Self::parse_inner(&mut self.heap, root_id) {
 
                return Err(ParseError2::new_error(&self.modules[0].source, position, &message))
 
                return Err(ParseError::new_error(&self.modules[0].source, position, &message))
 
            }
 
        }
 

	
 
        // let mut writer = ASTWriter::new();
 
        // let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap();
 
        // writer.write_ast(&mut file, &self.heap);
 

	
 
        Ok(())
 
    }
 

	
 
    pub fn parse_inner(h: &mut Heap, pd: RootId) -> VisitorResult {
 
        // TODO: @cleanup, slowly phasing out old compiler
 
        // NestedSynchronousStatements::new().visit_protocol_description(h, pd)?;
 
        // ChannelStatementOccurrences::new().visit_protocol_description(h, pd)?;
 
        // FunctionStatementReturns::new().visit_protocol_description(h, pd)?;
 
        // ComponentStatementReturnNew::new().visit_protocol_description(h, pd)?;
 
        // CheckBuiltinOccurrences::new().visit_protocol_description(h, pd)?;
 
        // BuildSymbolDeclarations::new().visit_protocol_description(h, pd)?;
 
        // LinkCallExpressions::new().visit_protocol_description(h, pd)?;
 
        // BuildScope::new().visit_protocol_description(h, pd)?;
 
        // ResolveVariables::new().visit_protocol_description(h, pd)?;
 
        LinkStatements::new().visit_protocol_description(h, pd)?;
 
        // BuildLabels::new().visit_protocol_description(h, pd)?;
 
        // ResolveLabels::new().visit_protocol_description(h, pd)?;
 
        AssignableExpressions::new().visit_protocol_description(h, pd)?;
 
        IndexableExpressions::new().visit_protocol_description(h, pd)?;
 
        SelectableExpressions::new().visit_protocol_description(h, pd)?;
 

	
 
        Ok(())
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/symbol_table.rs
Show inline comments
 
// TODO: Maybe allow namespaced-aliased imports. It is currently not possible
 
//  to express the following:
 
//      import Module.Submodule as SubMod
 
//      import SubMod::{Symbol}
 
//  And it is especially not possible to express the following:
 
//      import SubMod::{Symbol}
 
//      import Module.Submodule as SubMod
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 

	
 
use std::collections::{HashMap, hash_map::Entry};
 
use crate::protocol::parser::LexedModule;
 

	
 
#[derive(PartialEq, Eq, Hash)]
 
struct SymbolKey {
 
    module_id: RootId,
 
    symbol_name: Vec<u8>,
 
}
 

	
 
impl SymbolKey {
 
    fn from_identifier(module_id: RootId, symbol: &Identifier) -> Self {
 
        Self{ module_id, symbol_name: symbol.value.clone() }
 
    }
 

	
 
    fn from_namespaced_identifier(module_id: RootId, symbol: &NamespacedIdentifier) -> Self {
 
        Self{ module_id, symbol_name: symbol.strip_poly_args() }
 
    }
 
}
 

	
 
pub(crate) enum Symbol {
 
    Namespace(RootId),
 
    Definition((RootId, DefinitionId)),
 
}
 

	
 
pub(crate) struct SymbolValue {
 
    // Position is the place where the symbol is introduced to a module (this
 
    // position always corresponds to the module whose RootId is stored in the
 
    // `SymbolKey` associated with this `SymbolValue`). For a definition this
 
    // is the position where the symbol is defined, for an import this is the
 
    // position of the import statement.
 
    pub(crate) position: InputPosition,
 
    pub(crate) symbol: Symbol,
 
}
 

	
 
impl SymbolValue {
 
    pub(crate) fn is_namespace(&self) -> bool {
 
        match &self.symbol {
 
            Symbol::Namespace(_) => true,
 
            _ => false
 
        }
 
    }
 
    pub(crate) fn as_namespace(&self) -> Option<RootId> {
 
        match &self.symbol {
 
            Symbol::Namespace(root_id) => Some(*root_id),
 
            _ => None,
 
        }
 
    }
 

	
 
    pub(crate) fn as_definition(&self) -> Option<(RootId, DefinitionId)> {
 
        match &self.symbol {
 
            Symbol::Definition((root_id, definition_id)) => Some((*root_id, *definition_id)),
 
            _ => None,
 
        }
 
    }
 
}
 
/// `SymbolTable` is responsible for two parts of the parsing process: firstly
 
/// it ensures that there are no clashing symbol definitions within each file,
 
/// and secondly it will resolve all symbols within a module to their
 
/// appropriate definitions (in case of enums, functions, etc.) and namespaces
 
/// (currently only external modules can act as namespaces). If a symbol clashes
 
/// or if a symbol cannot be resolved this will be an error.
 
///
 
/// Within the compilation process the symbol table is responsible for resolving
 
/// namespaced identifiers (e.g. Module::Enum::EnumVariant) to the appropriate
 
/// definition (i.e. not namespaces; as the language has no way to use
 
/// namespaces except for using them in namespaced identifiers).
 
pub(crate) struct SymbolTable {
 
    // Lookup from module name (not any aliases) to the root id
 
    module_lookup: HashMap<Vec<u8>, RootId>,
 
    // Lookup from within a module, to a particular imported (potentially
 
    // aliased) or defined symbol. Basically speaking: if the source code of a
 
    // module contains correctly imported/defined symbols, then this lookup
 
    // will always return the corresponding definition
 
    symbol_lookup: HashMap<SymbolKey, SymbolValue>,
 
}
 

	
 
impl SymbolTable {
 
    pub(crate) fn new() -> Self {
 
        Self{ module_lookup: HashMap::new(), symbol_lookup: HashMap::new() }
 
    }
 

	
 
    pub(crate) fn build(&mut self, heap: &Heap, modules: &[LexedModule]) -> Result<(), ParseError2> {
 
    pub(crate) fn build(&mut self, heap: &Heap, modules: &[LexedModule]) -> Result<(), ParseError> {
 
        // Sanity checks
 
        debug_assert!(self.module_lookup.is_empty());
 
        debug_assert!(self.symbol_lookup.is_empty());
 
        if cfg!(debug_assertions) {
 
            for (index, module) in modules.iter().enumerate() {
 
                debug_assert_eq!(
 
                    index, module.root_id.index as usize,
 
                    "module RootId does not correspond to LexedModule index"
 
                )
 
            }
 
        }
 

	
 
        // Preparation: create a lookup from module name to root id. This does
 
        // not take aliasing into account.
 
        self.module_lookup.reserve(modules.len());
 
        for module in modules {
 
            // TODO: Maybe put duplicate module name checking here?
 
            // TODO: @string
 
            self.module_lookup.insert(module.module_name.clone(), module.root_id);
 
        }
 

	
 
        // Preparation: determine total number of imports we will be inserting
 
        // into the lookup table. We could just iterate over the arena, but then
 
        // we don't know the source file the import belongs to.
 
        let mut lookup_reserve_size = 0;
 
        for module in modules {
 
            let module_root = &heap[module.root_id];
 
            for import_id in &module_root.imports {
 
                match &heap[*import_id] {
 
                    Import::Module(_) => lookup_reserve_size += 1,
 
                    Import::Symbols(import) => {
 
                        if import.symbols.is_empty() {
 
                            // Add all symbols from the other module
 
                            match self.module_lookup.get(&import.module_name) {
 
                                Some(target_module_id) => {
 
                                    lookup_reserve_size += heap[*target_module_id].definitions.len()
 
                                },
 
                                None => {
 
                                    return Err(
 
                                        ParseError2::new_error(&module.source, import.position, "Cannot resolve module")
 
                                        ParseError::new_error(&module.source, import.position, "Cannot resolve module")
 
                                    );
 
                                }
 
                            }
 
                        } else {
 
                            lookup_reserve_size += import.symbols.len();
 
                        }
 
                    }
 
                }
 
            }
 

	
 
            lookup_reserve_size += module_root.definitions.len();
 
        }
 

	
 
        self.symbol_lookup.reserve(lookup_reserve_size);
 

	
 
        // First pass: we go through all of the modules and add lookups to
 
        // symbols that are defined within that module. Cross-module imports are
 
        // not yet resolved
 
        for module in modules {
 
            let root = &heap[module.root_id];
 
            for definition_id in &root.definitions {
 
                let definition = &heap[*definition_id];
 
                let identifier = definition.identifier();
 
                if let Err(previous_position) = self.add_definition_symbol(
 
                    identifier.position, SymbolKey::from_identifier(module.root_id, &identifier),
 
                    module.root_id, *definition_id
 
                ) {
 
                    return Err(
 
                        ParseError2::new_error(&module.source, definition.position(), "Symbol is multiply defined")
 
                        ParseError::new_error(&module.source, definition.position(), "Symbol is multiply defined")
 
                            .with_postfixed_info(&module.source, previous_position, "Previous definition was here")
 
                    )
 
                }
 
            }
 
        }
 

	
 
        // Second pass: now that we can find symbols in modules, we can resolve
 
        // all imports (if they're correct, that is)
 
        for module in modules {
 
            let root = &heap[module.root_id];
 
            for import_id in &root.imports {
 
                let import = &heap[*import_id];
 
                match import {
 
                    Import::Module(import) => {
 
                        // Find the module using its name
 
                        let target_root_id = self.resolve_module(&import.module_name);
 
                        if target_root_id.is_none() {
 
                            return Err(ParseError2::new_error(&module.source, import.position, "Could not resolve module"));
 
                            return Err(ParseError::new_error(&module.source, import.position, "Could not resolve module"));
 
                        }
 
                        let target_root_id = target_root_id.unwrap();
 
                        if target_root_id == module.root_id {
 
                            return Err(ParseError2::new_error(&module.source, import.position, "Illegal import of self"));
 
                            return Err(ParseError::new_error(&module.source, import.position, "Illegal import of self"));
 
                        }
 

	
 
                        // Add the target module under its alias
 
                        if let Err(previous_position) = self.add_namespace_symbol(
 
                            import.position, SymbolKey::from_identifier(module.root_id, &import.alias),
 
                            target_root_id
 
                        ) {
 
                            return Err(
 
                                ParseError2::new_error(&module.source, import.position, "Symbol is multiply defined")
 
                                ParseError::new_error(&module.source, import.position, "Symbol is multiply defined")
 
                                    .with_postfixed_info(&module.source, previous_position, "Previous definition was here")
 
                            );
 
                        }
 
                    },
 
                    Import::Symbols(import) => {
 
                        // Find the target module using its name
 
                        let target_root_id = self.resolve_module(&import.module_name);
 
                        if target_root_id.is_none() {
 
                            return Err(ParseError2::new_error(&module.source, import.position, "Could not resolve module of symbol imports"));
 
                            return Err(ParseError::new_error(&module.source, import.position, "Could not resolve module of symbol imports"));
 
                        }
 
                        let target_root_id = target_root_id.unwrap();
 
                        if target_root_id == module.root_id {
 
                            return Err(ParseError2::new_error(&module.source, import.position, "Illegal import of self"));
 
                            return Err(ParseError::new_error(&module.source, import.position, "Illegal import of self"));
 
                        }
 

	
 
                        // Determine which symbols to import
 
                        if import.symbols.is_empty() {
 
                            // Import of all symbols, not using any aliases
 
                            for definition_id in &heap[target_root_id].definitions {
 
                                let definition = &heap[*definition_id];
 
                                let identifier = definition.identifier();
 
                                if let Err(previous_position) = self.add_definition_symbol(
 
                                    import.position, SymbolKey::from_identifier(module.root_id, identifier),
 
                                    target_root_id, *definition_id
 
                                ) {
 
                                    return Err(
 
                                        ParseError2::new_error(
 
                                        ParseError::new_error(
 
                                            &module.source, import.position,
 
                                            &format!("Imported symbol '{}' is already defined", String::from_utf8_lossy(&identifier.value))
 
                                        )
 
                                        .with_postfixed_info(
 
                                            &modules[target_root_id.index as usize].source,
 
                                            definition.position(),
 
                                            "The imported symbol is defined here"
 
                                        )
 
                                        .with_postfixed_info(
 
                                            &module.source, previous_position, "And is previously defined here"
 
                                        )
 
                                    )
 
                                }
 
                            }
 
                        } else {
 
                            // Import of specific symbols, optionally using aliases
 
                            for symbol in &import.symbols {
 
                                // Because we have already added per-module definitions, we can use
 
                                // the table to lookup this particular symbol. Note: within a single
 
                                // module a namespace-import and a symbol-import may not collide.
 
                                // Hence per-module symbols are unique.
 
                                // However: if we import a symbol from another module, we don't want
 
                                // to "import a module's imported symbol". And so if we do find
 
                                // a symbol match, we need to make sure it is a definition from
 
                                // within that module by checking `source_root_id == target_root_id`
 
                                let key = SymbolKey::from_identifier(target_root_id, &symbol.name);
 
                                let target_symbol = self.symbol_lookup.get(&key);
 
                                let symbol_definition_id = match target_symbol {
 
                                    Some(target_symbol) => {
 
                                        match target_symbol.symbol {
 
                                            Symbol::Definition((symbol_root_id, symbol_definition_id)) => {
 
                                                if symbol_root_id == target_root_id {
 
                                                    Some(symbol_definition_id)
 
                                                } else {
 
                                                    // This is imported within the target module, and not
 
                                                    // defined within the target module
 
                                                    None
 
                                                }
 
                                            },
 
                                            Symbol::Namespace(_) => {
 
                                                // We don't import a module's "module import"
 
                                                None
 
                                            }
 
                                        }
 
                                    },
 
                                    None => None
 
                                };
 

	
 
                                if symbol_definition_id.is_none() {
 
                                    return Err(
 
                                        ParseError2::new_error(&module.source, symbol.position, "Could not resolve symbol")
 
                                        ParseError::new_error(&module.source, symbol.position, "Could not resolve symbol")
 
                                    )
 
                                }
 
                                let symbol_definition_id = symbol_definition_id.unwrap();
 

	
 
                                if let Err(previous_position) = self.add_definition_symbol(
 
                                    symbol.position, SymbolKey::from_identifier(module.root_id, &symbol.alias),
 
                                    target_root_id, symbol_definition_id
 
                                ) {
 
                                    return Err(
 
                                        ParseError2::new_error(&module.source, symbol.position, "Symbol is multiply defined")
 
                                        ParseError::new_error(&module.source, symbol.position, "Symbol is multiply defined")
 
                                            .with_postfixed_info(&module.source, previous_position, "Previous definition was here")
 
                                    )
 
                                }
 
                            }
 
                        }
 
                    }
 
                }
 
            }
 
        }
 
        fn find_name(heap: &Heap, root_id: RootId) -> String {
 
            let root = &heap[root_id];
 
            for pragma_id in &root.pragmas {
 
                match &heap[*pragma_id] {
 
                    Pragma::Module(module) => {
 
                        return String::from_utf8_lossy(&module.value).to_string()
 
                    },
 
                    _ => {},
 
                }
 
            }
 

	
 
            return String::from("Unknown")
 
        }
 

	
 
        debug_assert_eq!(
 
            self.symbol_lookup.len(), lookup_reserve_size,
 
            "miscalculated reserved size for symbol lookup table"
 
        );
 
        Ok(())
 
    }
 

	
 
    /// Resolves a module by its defined name
 
    pub(crate) fn resolve_module(&self, identifier: &Vec<u8>) -> Option<RootId> {
 
        self.module_lookup.get(identifier).map(|v| *v)
 
    }
 

	
 
    pub(crate) fn resolve_symbol<'t>(
 
        &'t self, root_module_id: RootId, identifier: &[u8]
 
    ) -> Option<&'t SymbolValue> {
 
        let lookup_key = SymbolKey{ module_id: root_module_id, symbol_name: Vec::from(identifier) };
 
        self.symbol_lookup.get(&lookup_key)
 
    }
 

	
 
    pub(crate) fn resolve_identifier<'t>(
 
        &'t self, root_module_id: RootId, identifier: &Identifier
 
    ) -> Option<&'t SymbolValue> {
 
        let lookup_key = SymbolKey::from_identifier(root_module_id, identifier);
 
        self.symbol_lookup.get(&lookup_key)
 
    }
 

	
 
    /// Resolves a namespaced symbol. This method will go as far as possible in
 
    /// going to the right symbol. It will halt the search when:
 
    /// 1. Polymorphic arguments are encountered on the identifier.
 
    /// 2. A non-namespace symbol is encountered.
 
    /// 3. A part of the identifier couldn't be resolved to anything
 
    /// The returned iterator will always point to the next symbol (even if 
 
    /// nothing was found)
 
    pub(crate) fn resolve_namespaced_identifier<'t, 'i>(
 
        &'t self, root_module_id: RootId, identifier: &'i NamespacedIdentifier
 
    ) -> (Option<&'t SymbolValue>, NamespacedIdentifierIter<'i>) {
 
        let mut iter = identifier.iter();
 
        let mut symbol: Option<&SymbolValue> = None;
 
        let mut within_module_id = root_module_id;
 

	
 
        while let Some((partial, poly_args)) = iter.next() {
 
            // Lookup the symbol within the currently iterated upon module
 
            let lookup_key = SymbolKey{ module_id: within_module_id, symbol_name: Vec::from(partial) };
 
            let new_symbol = self.symbol_lookup.get(&lookup_key);
 
            
 
            match new_symbol {
 
                None => {
 
                    // Can't find anything
 
                    symbol = None;
 
                    break;
 
                },
 
                Some(new_symbol) => {
 
                    // Found something, but if we already moved to another
 
                    // module then we don't want to keep jumping across modules,
 
                    // we're only interested in symbols defined within that
 
                    // module.
 
                    match &new_symbol.symbol {
 
                        Symbol::Namespace(new_root_id) => {
 
                            if root_module_id != within_module_id {
 
                                // This new symbol is imported by a foreign
 
                                // module, so this is an error
 
                                debug_assert!(symbol.is_some());
 
                                debug_assert!(symbol.unwrap().is_namespace());
 
                                debug_assert!(iter.num_returned() > 1);
 
                                symbol = None;
 
                                break;
 
                            }
 
                            within_module_id = *new_root_id;
 
                            symbol = Some(new_symbol);
 
                        },
 
                        Symbol::Definition((definition_root_id, _)) => {
 
                            // Found a definition, but if we already jumped
 
                            // modules, then this must be defined within that
 
                            // module.
 
                            if root_module_id != within_module_id && within_module_id != *definition_root_id {
 
                                // This is an imported definition within the module
 
                                // So keep the old 
 
                                debug_assert!(symbol.is_some());
 
                                debug_assert!(symbol.unwrap().is_namespace());
 
                                debug_assert!(iter.num_returned() > 1);
 
                                symbol = None;
 
                                break;
 
                            }
 
                            symbol = Some(new_symbol);
 
                            break;
 
                        }
 
                    }
 
                }
 
            }
 

	
 
            if poly_args.is_some() {
 
                // Polymorphic argument specification should also be a fully 
 
                // resolved result.
 
                break;
 
            }
 
        }
 

	
 
        match symbol {
 
            None => (None, iter),
 
            Some(symbol) => (Some(symbol), iter)
 
        }
 
    }
 

	
 
    /// Attempts to add a namespace symbol. Returns `Ok` if the symbol was
 
    /// inserted. If the symbol already exists then `Err` will be returned
 
    /// together with the previous definition's source position (in the origin
 
    /// module's source file).
 
    // Note: I would love to return a reference to the value, but Rust is
 
    // preventing me from doing so... That, or I'm not smart enough...
 
    fn add_namespace_symbol(
 
        &mut self, origin_position: InputPosition, key: SymbolKey, target_module_id: RootId
 
    ) -> Result<(), InputPosition> {
 
        match self.symbol_lookup.entry(key) {
 
            Entry::Occupied(o) => Err(o.get().position),
 
            Entry::Vacant(v) => {
 
                v.insert(SymbolValue{
 
                    position: origin_position,
 
                    symbol: Symbol::Namespace(target_module_id)
 
                });
 
                Ok(())
 
            }
 
        }
 
    }
 

	
 
    /// Attempts to add a definition symbol. Returns `Ok` if the symbol was
 
    /// inserted. If the symbol already exists then `Err` will be returned
 
    /// together with the previous definition's source position (in the origin
 
    /// module's source file).
 
    fn add_definition_symbol(
 
        &mut self, origin_position: InputPosition, key: SymbolKey,
 
        target_module_id: RootId, target_definition_id: DefinitionId,
 
    ) -> Result<(), InputPosition> {
 
        match self.symbol_lookup.entry(key) {
 
            Entry::Occupied(o) => Err(o.get().position),
 
            Entry::Vacant(v) => {
 
                v.insert(SymbolValue {
 
                    position: origin_position,
 
                    symbol: Symbol::Definition((target_module_id, target_definition_id))
 
                });
 
                Ok(())
 
            }
 
        }
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/type_resolver.rs
Show inline comments
 
@@ -910,2434 +910,2434 @@ impl TypeResolvingVisitor {
 
                Definition::Enum(_) | Definition::Struct(_) => {},
 
            }
 
        }
 
    }
 

	
 
    pub(crate) fn handle_module_definition(
 
        &mut self, ctx: &mut Ctx, queue: &mut ResolveQueue, element: ResolveQueueElement
 
    ) -> VisitorResult {
 
        // Visit the definition
 
        debug_assert_eq!(ctx.module.root_id, element.root_id);
 
        self.reset();
 
        self.poly_vars.clear();
 
        self.poly_vars.extend(element.monomorph_types.iter().cloned());
 
        self.visit_definition(ctx, element.definition_id)?;
 

	
 
        // Keep resolving types
 
        self.resolve_types(ctx, queue)?;
 
        Ok(())
 
    }
 

	
 
    fn reset(&mut self) {
 
        self.definition_type = DefinitionType::None;
 
        self.poly_vars.clear();
 
        self.stmt_buffer.clear();
 
        self.expr_buffer.clear();
 
        self.var_types.clear();
 
        self.expr_types.clear();
 
        self.extra_data.clear();
 
        self.expr_queued.clear();
 
    }
 
}
 

	
 
impl Visitor2 for TypeResolvingVisitor {
 
    // Definitions
 

	
 
    fn visit_component_definition(&mut self, ctx: &mut Ctx, id: ComponentId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Component(id);
 

	
 
        let comp_def = &ctx.heap[id];
 
        debug_assert_eq!(comp_def.poly_vars.len(), self.poly_vars.len(), "component polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting component '{}': {}", &String::from_utf8_lossy(&comp_def.identifier.value), id.0.index);
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        for param_id in comp_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type(ctx, param.parser_type, true);
 
            debug_assert!(var_type.is_done, "expected component arguments to be concrete types");
 
            self.var_types.insert(param_id.upcast(), VarData::new_local(var_type));
 
        }
 

	
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    fn visit_function_definition(&mut self, ctx: &mut Ctx, id: FunctionId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Function(id);
 

	
 
        let func_def = &ctx.heap[id];
 
        debug_assert_eq!(func_def.poly_vars.len(), self.poly_vars.len(), "function polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting function '{}': {}", &String::from_utf8_lossy(&func_def.identifier.value), id.0.index);
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        for param_id in func_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type(ctx, param.parser_type, true);
 
            debug_assert!(var_type.is_done, "expected function arguments to be concrete types");
 
            self.var_types.insert(param_id.upcast(), VarData::new_local(var_type));
 
        }
 

	
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    // Statements
 

	
 
    fn visit_block_stmt(&mut self, ctx: &mut Ctx, id: BlockStatementId) -> VisitorResult {
 
        // Transfer statements for traversal
 
        let block = &ctx.heap[id];
 

	
 
        for stmt_id in block.statements.clone() {
 
            self.visit_stmt(ctx, stmt_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_memory_stmt(&mut self, ctx: &mut Ctx, id: MemoryStatementId) -> VisitorResult {
 
        let memory_stmt = &ctx.heap[id];
 

	
 
        let local = &ctx.heap[memory_stmt.variable];
 
        let var_type = self.determine_inference_type_from_parser_type(ctx, local.parser_type, true);
 
        self.var_types.insert(memory_stmt.variable.upcast(), VarData::new_local(var_type));
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_channel_stmt(&mut self, ctx: &mut Ctx, id: ChannelStatementId) -> VisitorResult {
 
        let channel_stmt = &ctx.heap[id];
 

	
 
        let from_local = &ctx.heap[channel_stmt.from];
 
        let from_var_type = self.determine_inference_type_from_parser_type(ctx, from_local.parser_type, true);
 
        self.var_types.insert(from_local.this.upcast(), VarData::new_channel(from_var_type, channel_stmt.to.upcast()));
 

	
 
        let to_local = &ctx.heap[channel_stmt.to];
 
        let to_var_type = self.determine_inference_type_from_parser_type(ctx, to_local.parser_type, true);
 
        self.var_types.insert(to_local.this.upcast(), VarData::new_channel(to_var_type, channel_stmt.from.upcast()));
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_labeled_stmt(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> VisitorResult {
 
        let labeled_stmt = &ctx.heap[id];
 
        let substmt_id = labeled_stmt.body;
 
        self.visit_stmt(ctx, substmt_id)
 
    }
 

	
 
    fn visit_if_stmt(&mut self, ctx: &mut Ctx, id: IfStatementId) -> VisitorResult {
 
        let if_stmt = &ctx.heap[id];
 

	
 
        let true_body_id = if_stmt.true_body;
 
        let false_body_id = if_stmt.false_body;
 
        let test_expr_id = if_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_stmt(ctx, true_body_id)?;
 
        self.visit_stmt(ctx, false_body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_while_stmt(&mut self, ctx: &mut Ctx, id: WhileStatementId) -> VisitorResult {
 
        let while_stmt = &ctx.heap[id];
 

	
 
        let body_id = while_stmt.body;
 
        let test_expr_id = while_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_stmt(ctx, body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_synchronous_stmt(&mut self, ctx: &mut Ctx, id: SynchronousStatementId) -> VisitorResult {
 
        let sync_stmt = &ctx.heap[id];
 
        let body_id = sync_stmt.body;
 

	
 
        self.visit_stmt(ctx, body_id)
 
    }
 

	
 
    fn visit_return_stmt(&mut self, ctx: &mut Ctx, id: ReturnStatementId) -> VisitorResult {
 
        let return_stmt = &ctx.heap[id];
 
        let expr_id = return_stmt.expression;
 

	
 
        self.visit_expr(ctx, expr_id)
 
    }
 

	
 
    fn visit_assert_stmt(&mut self, ctx: &mut Ctx, id: AssertStatementId) -> VisitorResult {
 
        let assert_stmt = &ctx.heap[id];
 
        let test_expr_id = assert_stmt.expression;
 

	
 
        self.visit_expr(ctx, test_expr_id)
 
    }
 

	
 
    fn visit_new_stmt(&mut self, ctx: &mut Ctx, id: NewStatementId) -> VisitorResult {
 
        let new_stmt = &ctx.heap[id];
 
        let call_expr_id = new_stmt.expression;
 

	
 
        self.visit_call_expr(ctx, call_expr_id)
 
    }
 

	
 
    fn visit_expr_stmt(&mut self, ctx: &mut Ctx, id: ExpressionStatementId) -> VisitorResult {
 
        let expr_stmt = &ctx.heap[id];
 
        let subexpr_id = expr_stmt.expression;
 

	
 
        self.visit_expr(ctx, subexpr_id)
 
    }
 

	
 
    // Expressions
 

	
 
    fn visit_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let assign_expr = &ctx.heap[id];
 
        let left_expr_id = assign_expr.left;
 
        let right_expr_id = assign_expr.right;
 

	
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.visit_expr(ctx, right_expr_id)?;
 

	
 
        self.progress_assignment_expr(ctx, id)
 
    }
 

	
 
    fn visit_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let conditional_expr = &ctx.heap[id];
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        self.expr_types.insert(test_expr_id, InferenceType::new(false, true, vec![InferenceTypePart::Bool]));
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_expr(ctx, true_expr_id)?;
 
        self.visit_expr(ctx, false_expr_id)?;
 

	
 
        self.progress_conditional_expr(ctx, id)
 
    }
 

	
 
    fn visit_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let binary_expr = &ctx.heap[id];
 
        let lhs_expr_id = binary_expr.left;
 
        let rhs_expr_id = binary_expr.right;
 

	
 
        self.visit_expr(ctx, lhs_expr_id)?;
 
        self.visit_expr(ctx, rhs_expr_id)?;
 

	
 
        self.progress_binary_expr(ctx, id)
 
    }
 

	
 
    fn visit_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let unary_expr = &ctx.heap[id];
 
        let arg_expr_id = unary_expr.expression;
 

	
 
        self.visit_expr(ctx, arg_expr_id)?;
 

	
 
        self.progress_unary_expr(ctx, id)
 
    }
 

	
 
    fn visit_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let indexing_expr = &ctx.heap[id];
 
        let subject_expr_id = indexing_expr.subject;
 
        let index_expr_id = indexing_expr.index;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.visit_expr(ctx, index_expr_id)?;
 

	
 
        self.progress_indexing_expr(ctx, id)
 
    }
 

	
 
    fn visit_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let slicing_expr = &ctx.heap[id];
 
        let subject_expr_id = slicing_expr.subject;
 
        let from_expr_id = slicing_expr.from_index;
 
        let to_expr_id = slicing_expr.to_index;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.visit_expr(ctx, from_expr_id)?;
 
        self.visit_expr(ctx, to_expr_id)?;
 

	
 
        self.progress_slicing_expr(ctx, id)
 
    }
 

	
 
    fn visit_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let select_expr = &ctx.heap[id];
 
        let subject_expr_id = select_expr.subject;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 

	
 
        self.progress_select_expr(ctx, id)
 
    }
 

	
 
    fn visit_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let array_expr = &ctx.heap[id];
 
        // TODO: @performance
 
        for element_id in array_expr.elements.clone().into_iter() {
 
            self.visit_expr(ctx, element_id)?;
 
        }
 

	
 
        self.progress_array_expr(ctx, id)
 
    }
 

	
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let literal_expr = &ctx.heap[id];
 
        match &literal_expr.value {
 
            Literal::Null | Literal::False | Literal::True |
 
            Literal::Integer(_) | Literal::Character(_) => {
 
                // No subexpressions
 
            },
 
            Literal::Struct(literal) => {
 
                // TODO: @performance
 
                let expr_ids: Vec<_> = literal.fields
 
                    .iter()
 
                    .map(|f| f.value)
 
                    .collect();
 

	
 
                self.insert_initial_struct_polymorph_data(ctx, id);
 

	
 
                for expr_id in expr_ids {
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 
            }
 
        }
 

	
 
        self.progress_literal_expr(ctx, id)
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        self.insert_initial_call_polymorph_data(ctx, id);
 

	
 
        // TODO: @performance
 
        let call_expr = &ctx.heap[id];
 
        for arg_expr_id in call_expr.arguments.clone() {
 
            self.visit_expr(ctx, arg_expr_id)?;
 
        }
 

	
 
        self.progress_call_expr(ctx, id)
 
    }
 

	
 
    fn visit_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let var_expr = &ctx.heap[id];
 
        debug_assert!(var_expr.declaration.is_some());
 
        let var_data = self.var_types.get_mut(var_expr.declaration.as_ref().unwrap()).unwrap();
 
        var_data.used_at.push(upcast_id);
 

	
 
        self.progress_variable_expr(ctx, id)
 
    }
 
}
 

	
 
macro_rules! debug_assert_expr_ids_unique_and_known {
 
    // Base case for a single expression ID
 
    ($resolver:ident, $id:ident) => {
 
        if cfg!(debug_assertions) {
 
            $resolver.expr_types.contains_key(&$id);
 
        }
 
    };
 
    // Base case for two expression IDs
 
    ($resolver:ident, $id1:ident, $id2:ident) => {
 
        debug_assert_ne!($id1, $id2);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2);
 
    };
 
    // Generic case
 
    ($resolver:ident, $id1:ident, $id2:ident, $($tail:ident),+) => {
 
        debug_assert_ne!($id1, $id2);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2, $($tail),+);
 
    };
 
}
 

	
 
macro_rules! debug_assert_ptrs_distinct {
 
    // Base case
 
    ($ptr1:ident, $ptr2:ident) => {
 
        debug_assert!(!std::ptr::eq($ptr1, $ptr2));
 
    };
 
    // Generic case
 
    ($ptr1:ident, $ptr2:ident, $($tail:ident),+) => {
 
        debug_assert_ptrs_distinct!($ptr1, $ptr2);
 
        debug_assert_ptrs_distinct!($ptr2, $($tail),+);
 
    };
 
}
 

	
 
impl TypeResolvingVisitor {
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError2> {
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError> {
 
        // Keep inferring until we can no longer make any progress
 
        while let Some(next_expr_id) = self.expr_queued.iter().next() {
 
            let next_expr_id = *next_expr_id;
 
            self.expr_queued.remove(&next_expr_id);
 
            self.progress_expr(ctx, next_expr_id)?;
 
        }
 

	
 
        // We check if we have all the types we need. If we're typechecking a 
 
        // polymorphic procedure more than once, then we have already annotated
 
        // the AST and have now performed typechecking for a different 
 
        // monomorph. In that case we just need to perform typechecking, no need
 
        // to annotate the AST again.
 
        let definition_id = match &self.definition_type {
 
            DefinitionType::Component(id) => id.upcast(),
 
            DefinitionType::Function(id) => id.upcast(),
 
            _ => unreachable!(),
 
        };
 

	
 
        let already_checked = ctx.types.get_base_definition(&definition_id).unwrap().has_any_monomorph();
 
        for (expr_id, expr_type) in self.expr_types.iter_mut() {
 
            if !expr_type.is_done {
 
                // Auto-infer numberlike/integerlike types to a regular int
 
                if expr_type.parts.len() == 1 && expr_type.parts[0] == InferenceTypePart::IntegerLike {
 
                    expr_type.parts[0] = InferenceTypePart::Int;
 
                } else {
 
                    let expr = &ctx.heap[*expr_id];
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Could not fully infer the type of this expression (got '{}')",
 
                            expr_type.display_name(&ctx.heap)
 
                        )
 
                    ))
 
                }
 
            }
 

	
 
            if !already_checked {
 
                let concrete_type = ctx.heap[*expr_id].get_type_mut();
 
                expr_type.write_concrete_type(concrete_type);
 
            } else {
 
                if cfg!(debug_assertions) {
 
                    let mut concrete_type = ConcreteType::default();
 
                    expr_type.write_concrete_type(&mut concrete_type);
 
                    debug_assert_eq!(*ctx.heap[*expr_id].get_type(), concrete_type);
 
                }
 
            }
 
        }
 

	
 
        // All types are fine
 
        ctx.types.add_monomorph(&definition_id, self.poly_vars.clone());
 

	
 
        // Check all things we need to monomorphize
 
        // TODO: Struct/enum/union monomorphization
 
        for (expr_id, extra_data) in self.extra_data.iter() {
 
            if extra_data.poly_vars.is_empty() { continue; }
 

	
 
            // Retrieve polymorph variable specification. Those of struct 
 
            // literals and those of procedure calls need to be fully inferred.
 
            // The remaining ones (e.g. select expressions) allow partial 
 
            // inference of types, as long as the accessed field's type is
 
            // fully inferred.
 
            let needs_full_inference = match &ctx.heap[*expr_id] {
 
                Expression::Call(_) => true,
 
                Expression::Literal(_) => true,
 
                _ => false
 
            };
 

	
 
            if needs_full_inference {
 
                let mut monomorph_types = Vec::with_capacity(extra_data.poly_vars.len());
 
                for (poly_idx, poly_type) in extra_data.poly_vars.iter().enumerate() {
 
                    if !poly_type.is_done {
 
                        // TODO: Single clean function for function signatures and polyvars.
 
                        // TODO: Better error message
 
                        let expr = &ctx.heap[*expr_id];
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, expr.position(),
 
                            &format!(
 
                                "Could not fully infer the type of polymorphic variable {} of this expression (got '{}')",
 
                                poly_idx, poly_type.display_name(&ctx.heap)
 
                            )
 
                        ))
 
                    }
 

	
 
                    let mut concrete_type = ConcreteType::default();
 
                    poly_type.write_concrete_type(&mut concrete_type);
 
                    monomorph_types.insert(poly_idx, concrete_type);
 
                }
 

	
 
                // Resolve to the appropriate expression and instantiate 
 
                // monomorphs.
 
                match &ctx.heap[*expr_id] {
 
                    Expression::Call(call_expr) => {
 
                        // Add to type table if not yet typechecked
 
                        if let Method::Symbolic(symbolic) = &call_expr.method {
 
                            let definition_id = symbolic.definition.unwrap();
 
                            if !ctx.types.has_monomorph(&definition_id, &monomorph_types) {
 
                                let root_id = ctx.types
 
                                    .get_base_definition(&definition_id)
 
                                    .unwrap()
 
                                    .ast_root;
 

	
 
                                // Pre-emptively add the monomorph to the type table, but
 
                                // we still need to perform typechecking on it
 
                                // TODO: Unsure about this, performance wise
 
                                let queue_element = ResolveQueueElement{
 
                                    root_id,
 
                                    definition_id,
 
                                    monomorph_types,
 
                                };
 
                                if !queue.contains(&queue_element) {
 
                                    queue.push(queue_element);
 
                                }
 
                            }
 
                        }
 
                    },
 
                    Expression::Literal(lit_expr) => {
 
                        let lit_struct = lit_expr.value.as_struct();
 
                        let definition_id = lit_struct.definition.as_ref().unwrap();
 
                        if !ctx.types.has_monomorph(definition_id, &monomorph_types) {
 
                            ctx.types.add_monomorph(definition_id, monomorph_types);
 
                        }
 
                    },
 
                    _ => unreachable!("needs fully inference, but not a struct literal or call expression")
 
                }
 
            } // else: was just a helper structure...
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> Result<(), ParseError2> {
 
    fn progress_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> Result<(), ParseError> {
 
        match &ctx.heap[id] {
 
            Expression::Assignment(expr) => {
 
                let id = expr.this;
 
                self.progress_assignment_expr(ctx, id)
 
            },
 
            Expression::Conditional(expr) => {
 
                let id = expr.this;
 
                self.progress_conditional_expr(ctx, id)
 
            },
 
            Expression::Binary(expr) => {
 
                let id = expr.this;
 
                self.progress_binary_expr(ctx, id)
 
            },
 
            Expression::Unary(expr) => {
 
                let id = expr.this;
 
                self.progress_unary_expr(ctx, id)
 
            },
 
            Expression::Indexing(expr) => {
 
                let id = expr.this;
 
                self.progress_indexing_expr(ctx, id)
 
            },
 
            Expression::Slicing(expr) => {
 
                let id = expr.this;
 
                self.progress_slicing_expr(ctx, id)
 
            },
 
            Expression::Select(expr) => {
 
                let id = expr.this;
 
                self.progress_select_expr(ctx, id)
 
            },
 
            Expression::Array(expr) => {
 
                let id = expr.this;
 
                self.progress_array_expr(ctx, id)
 
            },
 
            Expression::Literal(expr) => {
 
                let id = expr.this;
 
                self.progress_literal_expr(ctx, id)
 
            },
 
            Expression::Call(expr) => {
 
                let id = expr.this;
 
                self.progress_call_expr(ctx, id)
 
            },
 
            Expression::Variable(expr) => {
 
                let id = expr.this;
 
                self.progress_variable_expr(ctx, id)
 
            }
 
        }
 
    }
 

	
 
    fn progress_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> Result<(), ParseError2> {
 
    fn progress_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> Result<(), ParseError> {
 
        use AssignmentOperator as AO;
 

	
 
        // TODO: Assignable check
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.left;
 
        let arg2_expr_id = expr.right;
 

	
 
        debug_log!("Assignment expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let progress_base = match expr.operation {
 
            AO::Set =>
 
                false,
 
            AO::Multiplied | AO::Divided | AO::Added | AO::Subtracted =>
 
                self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?,
 
            AO::Remained | AO::ShiftedLeft | AO::ShiftedRight |
 
            AO::BitwiseAnded | AO::BitwiseXored | AO::BitwiseOred =>
 
                self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?,
 
        };
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = self.apply_equal3_constraint(
 
            ctx, upcast_id, arg1_expr_id, arg2_expr_id, 0
 
        )?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_base || progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 

	
 
        if progress_base || progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> Result<(), ParseError2> {
 
    fn progress_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> Result<(), ParseError> {
 
        // Note: test expression type is already enforced
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.true_expression;
 
        let arg2_expr_id = expr.false_expression;
 

	
 
        debug_log!("Conditional expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = self.apply_equal3_constraint(
 
            ctx, upcast_id, arg1_expr_id, arg2_expr_id, 0
 
        )?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_expr_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> Result<(), ParseError2> {
 
    fn progress_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> Result<(), ParseError> {
 
        // Note: our expression type might be fixed by our parent, but we still
 
        // need to make sure it matches the type associated with our operation.
 
        use BinaryOperator as BO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_id = expr.left;
 
        let arg2_id = expr.right;
 

	
 
        debug_log!("Binary expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg1 type: {}", self.expr_types.get(&arg1_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type: {}", self.expr_types.get(&arg2_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = match expr.operation {
 
            BO::Concatenate => {
 
                // Arguments may be arrays/slices, output is always an array
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &ARRAY_TEMPLATE)?;
 
                let progress_arg1 = self.apply_forced_constraint(ctx, arg1_id, &ARRAYLIKE_TEMPLATE)?;
 
                let progress_arg2 = self.apply_forced_constraint(ctx, arg2_id, &ARRAYLIKE_TEMPLATE)?;
 

	
 
                // If they're all arraylike, then we want the subtype to match
 
                let (subtype_expr, subtype_arg1, subtype_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 1)?;
 

	
 
                (progress_expr || subtype_expr, progress_arg1 || subtype_arg1, progress_arg2 || subtype_arg2)
 
            },
 
            BO::LogicalOr | BO::LogicalAnd => {
 
                // Forced boolean on all
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg1 = self.apply_forced_constraint(ctx, arg1_id, &BOOL_TEMPLATE)?;
 
                let progress_arg2 = self.apply_forced_constraint(ctx, arg2_id, &BOOL_TEMPLATE)?;
 

	
 
                (progress_expr, progress_arg1, progress_arg2)
 
            },
 
            BO::BitwiseOr | BO::BitwiseXor | BO::BitwiseAnd | BO::Remainder | BO::ShiftLeft | BO::ShiftRight => {
 
                // All equal of integer type
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg1, progress_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg1, progress_base || progress_arg2)
 
            },
 
            BO::Equality | BO::Inequality => {
 
                // Equal2 on args, forced boolean output
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let (progress_arg1, progress_arg2) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, arg1_id, 0, arg2_id, 0)?;
 

	
 
                (progress_expr, progress_arg1, progress_arg2)
 
            },
 
            BO::LessThan | BO::GreaterThan | BO::LessThanEqual | BO::GreaterThanEqual => {
 
                // Equal2 on args with numberlike type, forced boolean output
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg_base = self.apply_forced_constraint(ctx, arg1_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_arg1, progress_arg2) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, arg1_id, 0, arg2_id, 0)?;
 

	
 
                (progress_expr, progress_arg_base || progress_arg1, progress_arg_base || progress_arg2)
 
            },
 
            BO::Add | BO::Subtract | BO::Multiply | BO::Divide => {
 
                // All equal of number type
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg1, progress_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg1, progress_base || progress_arg2)
 
            },
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg1 type [{}]: {}", progress_arg1, self.expr_types.get(&arg1_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Arg2 type [{}]: {}", progress_arg2, self.expr_types.get(&arg2_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_id); }
 
        if progress_arg2 { self.queue_expr(arg2_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> Result<(), ParseError2> {
 
    fn progress_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> Result<(), ParseError> {
 
        use UnaryOperation as UO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg_id = expr.expression;
 

	
 
        debug_log!("Unary expr '{:?}': {}", expr.operation, upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Arg  type: {}", self.expr_types.get(&arg_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let (progress_expr, progress_arg) = match expr.operation {
 
            UO::Positive | UO::Negative => {
 
                // Equal types of numeric class
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, arg_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg)
 
            },
 
            UO::BitwiseNot | UO::PreIncrement | UO::PreDecrement | UO::PostIncrement | UO::PostDecrement => {
 
                // Equal types of integer class
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, arg_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg)
 
            },
 
            UO::LogicalNot => {
 
                // Both booleans
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                (progress_expr, progress_arg)
 
            }
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Arg  type [{}]: {}", progress_arg, self.expr_types.get(&arg_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg { self.queue_expr(arg_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> Result<(), ParseError2> {
 
    fn progress_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let subject_id = expr.subject;
 
        let index_id = expr.index;
 

	
 
        debug_log!("Indexing expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Subject type: {}", self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Index   type: {}", self.expr_types.get(&index_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // Make sure subject is arraylike and index is integerlike
 
        let progress_subject_base = self.apply_forced_constraint(ctx, subject_id, &ARRAYLIKE_TEMPLATE)?;
 
        let progress_index = self.apply_forced_constraint(ctx, index_id, &INTEGERLIKE_TEMPLATE)?;
 

	
 
        // Make sure if output is of T then subject is Array<T>
 
        let (progress_expr, progress_subject) =
 
            self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, subject_id, 1)?;
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Subject type [{}]: {}", progress_subject_base || progress_subject, self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Index   type [{}]: {}", progress_index, self.expr_types.get(&index_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_subject_base || progress_subject { self.queue_expr(subject_id); }
 
        if progress_index { self.queue_expr(index_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> Result<(), ParseError2> {
 
    fn progress_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let subject_id = expr.subject;
 
        let from_id = expr.from_index;
 
        let to_id = expr.to_index;
 

	
 
        debug_log!("Slicing expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Subject type: {}", self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - FromIdx type: {}", self.expr_types.get(&from_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - ToIdx   type: {}", self.expr_types.get(&to_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // Make sure subject is arraylike and indices are of equal integerlike
 
        let progress_subject_base = self.apply_forced_constraint(ctx, subject_id, &ARRAYLIKE_TEMPLATE)?;
 
        let progress_idx_base = self.apply_forced_constraint(ctx, from_id, &INTEGERLIKE_TEMPLATE)?;
 
        let (progress_from, progress_to) = self.apply_equal2_constraint(ctx, upcast_id, from_id, 0, to_id, 0)?;
 

	
 
        // Make sure if output is of T then subject is Array<T>
 
        let (progress_expr, progress_subject) =
 
            self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, subject_id, 1)?;
 

	
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Subject type [{}]: {}", progress_subject_base || progress_subject, self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - FromIdx type [{}]: {}", progress_idx_base || progress_from, self.expr_types.get(&from_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - ToIdx   type [{}]: {}", progress_idx_base || progress_to, self.expr_types.get(&to_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_subject_base || progress_subject { self.queue_expr(subject_id); }
 
        if progress_idx_base || progress_from { self.queue_expr(from_id); }
 
        if progress_idx_base || progress_to { self.queue_expr(to_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> Result<(), ParseError2> {
 
    fn progress_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        
 
        debug_log!("Select expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Subject type: {}", self.expr_types.get(&ctx.heap[id].subject).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let expr = &mut ctx.heap[id];
 
        let subject_id = expr.subject;
 

	
 
        fn determine_inference_type_instance<'a>(types: &'a TypeTable, infer_type: &InferenceType) -> Result<Option<&'a DefinedType>, ()> {
 
            for part in &infer_type.parts {
 
                if part.is_marker() || !part.is_concrete() {
 
                    continue;
 
                }
 

	
 
                // Part is concrete, check if it is an instance of something
 
                if let InferenceTypePart::Instance(definition_id, _num_sub) = part {
 
                    // Lookup type definition and ensure the specified field 
 
                    // name exists on the struct
 
                    let definition = types.get_base_definition(definition_id);
 
                    debug_assert!(definition.is_some());
 
                    let definition = definition.unwrap();
 

	
 
                    return Ok(Some(definition))
 
                } else {
 
                    // Expected an instance of something
 
                    return Err(())
 
                }
 
            }
 

	
 
            // Nothing is concrete yet
 
            Ok(None)
 
        }
 

	
 
        let (progress_subject, progress_expr) = match &mut expr.field {
 
            Field::Length => {
 
                let progress_subject = self.apply_forced_constraint(ctx, subject_id, &ARRAYLIKE_TEMPLATE)?;
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 

	
 
                (progress_subject, progress_expr)
 
            },
 
            Field::Symbolic(field) => {
 
                // Retrieve the struct definition id and field index if possible 
 
                // and not previously determined
 
                if field.definition.is_none() {
 
                    // Not yet known, check if we can determine it
 
                    let subject_type = self.expr_types.get(&subject_id).unwrap();
 
                    let type_def = determine_inference_type_instance(&ctx.types, subject_type);
 

	
 
                    match type_def {
 
                        Ok(Some(type_def)) => {
 
                            // Subject type is known, check if it is a 
 
                            // struct and the field exists on the struct
 
                            let struct_def = if let DefinedTypeVariant::Struct(struct_def) = &type_def.definition {
 
                                struct_def
 
                            } else {
 
                                return Err(ParseError2::new_error(
 
                                return Err(ParseError::new_error(
 
                                    &ctx.module.source, field.identifier.position,
 
                                    &format!(
 
                                        "Can only apply field access to structs, got a subject of type '{}'",
 
                                        subject_type.display_name(&ctx.heap)
 
                                    )
 
                                ));
 
                            };
 

	
 
                            for (field_def_idx, field_def) in struct_def.fields.iter().enumerate() {
 
                                if field_def.identifier == field.identifier {
 
                                    // Set field definition and index
 
                                    field.definition = Some(type_def.ast_definition);
 
                                    field.field_idx = field_def_idx;
 
                                    break;
 
                                }
 
                            }
 

	
 
                            if field.definition.is_none() {
 
                                let field_position = field.identifier.position;
 
                                let ast_struct_def = ctx.heap[type_def.ast_definition].as_struct();
 
                                return Err(ParseError2::new_error(
 
                                return Err(ParseError::new_error(
 
                                    &ctx.module.source, field_position,
 
                                    &format!(
 
                                        "This field does not exist on the struct '{}'",
 
                                        &String::from_utf8_lossy(&ast_struct_def.identifier.value)
 
                                    )
 
                                ))
 
                            }
 

	
 
                            // Encountered definition and field index for the
 
                            // first time
 
                            self.insert_initial_select_polymorph_data(ctx, id);
 
                        },
 
                        Ok(None) => {
 
                            // Type of subject is not yet known, so we 
 
                            // cannot make any progress yet
 
                            return Ok(())
 
                        },
 
                        Err(()) => {
 
                            return Err(ParseError2::new_error(
 
                            return Err(ParseError::new_error(
 
                                &ctx.module.source, field.identifier.position,
 
                                &format!(
 
                                    "Can only apply field access to structs, got a subject of type '{}'",
 
                                    subject_type.display_name(&ctx.heap)
 
                                )
 
                            ));
 
                        }
 
                    }
 
                }
 

	
 
                // If here then field definition and index are known, and the
 
                // initial type (based on the struct's definition) has been
 
                // applied.
 
                // Check to see if we can infer anything about the subject's and
 
                // the field's polymorphic variables
 
                let poly_data = self.extra_data.get_mut(&upcast_id).unwrap();
 
                let mut poly_progress = HashSet::new();
 
                
 
                // Apply to struct's type
 
                let signature_type: *mut _ = &mut poly_data.embedded[0];
 
                let subject_type: *mut _ = self.expr_types.get_mut(&subject_id).unwrap();
 

	
 
                let (_, progress_subject) = Self::apply_equal2_signature_constraint(
 
                    ctx, upcast_id, Some(subject_id), poly_data, &mut poly_progress,
 
                    signature_type, 0, subject_type, 0
 
                )?;
 

	
 
                if progress_subject {
 
                    self.expr_queued.insert(subject_id);
 
                }
 
                
 
                // Apply to field's type
 
                let signature_type: *mut _ = &mut poly_data.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let (_, progress_expr) = Self::apply_equal2_signature_constraint(
 
                    ctx, upcast_id, None, poly_data, &mut poly_progress, 
 
                    signature_type, 0, expr_type, 0
 
                )?;
 

	
 
                if progress_expr {
 
                    if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                        self.expr_queued.insert(parent_id);
 
                    }
 
                }
 

	
 
                // Reapply progress in polymorphic variables to struct's type
 
                let signature_type: *mut _ = &mut poly_data.embedded[0];
 
                let subject_type: *mut _ = self.expr_types.get_mut(&subject_id).unwrap();
 
                
 
                let progress_subject = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    poly_data, &poly_progress, signature_type, subject_type
 
                );
 

	
 
                let signature_type: *mut _ = &mut poly_data.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    poly_data, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                (progress_subject, progress_expr)
 
            }
 
        };
 

	
 
        if progress_subject { self.queue_expr(subject_id); }
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Subject type [{}]: {}", progress_subject, self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> Result<(), ParseError2> {
 
    fn progress_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let expr_elements = expr.elements.clone(); // TODO: @performance
 

	
 
        debug_log!("Array expr ({} elements): {}", expr_elements.len(), upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // All elements should have an equal type
 
        let progress = self.apply_equal_n_constraint(ctx, upcast_id, &expr_elements)?;
 
        for (progress_arg, arg_id) in progress.iter().zip(expr_elements.iter()) {
 
            if *progress_arg {
 
                self.queue_expr(*arg_id);
 
            }
 
        }
 

	
 
        // And the output should be an array of the element types
 
        let mut expr_progress = self.apply_forced_constraint(ctx, upcast_id, &ARRAY_TEMPLATE)?;
 
        if !expr_elements.is_empty() {
 
            let first_arg_id = expr_elements[0];
 
            let (inner_expr_progress, arg_progress) = self.apply_equal2_constraint(
 
                ctx, upcast_id, upcast_id, 1, first_arg_id, 0
 
            )?;
 

	
 
            expr_progress = expr_progress || inner_expr_progress;
 

	
 
            // Note that if the array type progressed the type of the arguments,
 
            // then we should enqueue this progression function again
 
            // TODO: @fix Make apply_equal_n accept a start idx as well
 
            if arg_progress { self.queue_expr(upcast_id); }
 
        }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type [{}]: {}", expr_progress, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if expr_progress { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> Result<(), ParseError2> {
 
    fn progress_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 

	
 
        debug_log!("Literal expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let progress_expr = match &expr.value {
 
            Literal::Null => {
 
                self.apply_forced_constraint(ctx, upcast_id, &MESSAGE_TEMPLATE)?
 
            },
 
            Literal::Integer(_) => {
 
                self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?
 
            },
 
            Literal::True | Literal::False => {
 
                self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?
 
            },
 
            Literal::Character(_) => todo!("character literals"),
 
            Literal::Struct(data) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
                debug_assert_eq!(extra.embedded.len(), data.fields.len());
 

	
 
                debug_log!(" * During (inferring types from fields and struct type):");
 

	
 
                // Mutually infer field signature/expression types
 
                for (field_idx, field) in data.fields.iter().enumerate() {
 
                    let field_expr_id = field.value;
 
                    let signature_type: *mut _ = &mut extra.embedded[field_idx];
 
                    let field_type: *mut _ = self.expr_types.get_mut(&field_expr_id).unwrap();
 
                    let (_, progress_arg) = Self::apply_equal2_signature_constraint(
 
                        ctx, upcast_id, Some(field_expr_id), extra, &mut poly_progress,
 
                        signature_type, 0, field_type, 0
 
                    )?;
 

	
 
                    debug_log!(
 
                        "   - Field {} type | sig: {}, field: {}", field_idx,
 
                        unsafe{&*signature_type}.display_name(&ctx.heap),
 
                        unsafe{&*field_type}.display_name(&ctx.heap)
 
                    );
 

	
 
                    if progress_arg {
 
                        self.expr_queued.insert(field_expr_id);
 
                    }
 
                }
 

	
 
                debug_log!("   - Field poly progress | {:?}", poly_progress);
 

	
 
                // Same for the type of the struct itself
 
                let signature_type: *mut _ = &mut extra.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 
                let (_, progress_expr) = Self::apply_equal2_signature_constraint(
 
                    ctx, upcast_id, None, extra, &mut poly_progress,
 
                    signature_type, 0, expr_type, 0
 
                )?;
 

	
 
                debug_log!(
 
                    "   - Ret type | sig: {}, expr: {}",
 
                    unsafe{&*signature_type}.display_name(&ctx.heap),
 
                    unsafe{&*expr_type}.display_name(&ctx.heap)
 
                );
 
                debug_log!("   - Ret poly progress | {:?}", poly_progress);
 

	
 
                if progress_expr {
 
                    // TODO: @cleanup, cannot call utility self.queue_parent thingo
 
                    if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                        self.expr_queued.insert(parent_id);
 
                    }
 
                }
 

	
 
                // Check which expressions use the polymorphic arguments. If the
 
                // polymorphic variables have been progressed then we try to 
 
                // progress them inside the expression as well.
 
                debug_log!(" * During (reinferring from progressed polyvars):");
 

	
 
                // For all field expressions
 
                for field_idx in 0..extra.embedded.len() {
 
                    debug_assert_eq!(field_idx, data.fields[field_idx].field_idx, "confusing, innit?");
 
                    let signature_type: *mut _ = &mut extra.embedded[field_idx];
 
                    let field_expr_id = data.fields[field_idx].value;
 
                    let field_type: *mut _ = self.expr_types.get_mut(&field_expr_id).unwrap();
 

	
 
                    let progress_arg = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                        extra, &poly_progress, signature_type, field_type
 
                    );
 

	
 
                    debug_log!(
 
                        "   - Field {} type | sig: {}, field: {}", field_idx,
 
                        unsafe{&*signature_type}.display_name(&ctx.heap),
 
                        unsafe{&*field_type}.display_name(&ctx.heap)
 
                    );
 
                    if progress_arg {
 
                        self.expr_queued.insert(field_expr_id);
 
                    }
 
                }
 
                
 
                // For the return type
 
                let signature_type: *mut _ = &mut extra.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    extra, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                progress_expr
 
            }
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // TODO: FIX!!!!
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    // TODO: @cleanup, see how this can be cleaned up once I implement
 
    //  polymorphic struct/enum/union literals. These likely follow the same
 
    //  pattern as here.
 
    fn progress_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> Result<(), ParseError2> {
 
    fn progress_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 

	
 
        debug_log!("Call expr '{}': {}", match &expr.method {
 
            Method::Create => String::from("create"),
 
            Method::Fires => String::from("fires"),
 
            Method::Get => String::from("get"),
 
            Method::Put => String::from("put"),
 
            Method::Symbolic(method) => String::from_utf8_lossy(&method.identifier.value).to_string()
 
        },upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 
        debug_log!(" * During (inferring types from arguments and return type):");
 

	
 
        // Check if we can make progress using the arguments and/or return types
 
        // while keeping track of the polyvars we've extended
 
        let mut poly_progress = HashSet::new();
 
        debug_assert_eq!(extra.embedded.len(), expr.arguments.len());
 

	
 
        for (arg_idx, arg_id) in expr.arguments.clone().into_iter().enumerate() {
 
            let signature_type: *mut _ = &mut extra.embedded[arg_idx];
 
            let argument_type: *mut _ = self.expr_types.get_mut(&arg_id).unwrap();
 
            let (_, progress_arg) = Self::apply_equal2_signature_constraint(
 
                ctx, upcast_id, Some(arg_id), extra, &mut poly_progress,
 
                signature_type, 0, argument_type, 0
 
            )?;
 

	
 
            debug_log!(
 
                "   - Arg {} type | sig: {}, arg: {}", arg_idx,
 
                unsafe{&*signature_type}.display_name(&ctx.heap), 
 
                unsafe{&*argument_type}.display_name(&ctx.heap));
 

	
 
            if progress_arg {
 
                // Progressed argument expression
 
                self.expr_queued.insert(arg_id);
 
            }
 
        }
 

	
 
        // Do the same for the return type
 
        let signature_type: *mut _ = &mut extra.returned;
 
        let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 
        let (_, progress_expr) = Self::apply_equal2_signature_constraint(
 
            ctx, upcast_id, None, extra, &mut poly_progress,
 
            signature_type, 0, expr_type, 0
 
        )?;
 

	
 
        debug_log!(
 
            "   - Ret type | sig: {}, expr: {}", 
 
            unsafe{&*signature_type}.display_name(&ctx.heap), 
 
            unsafe{&*expr_type}.display_name(&ctx.heap)
 
        );
 

	
 
        if progress_expr {
 
            // TODO: @cleanup, cannot call utility self.queue_parent thingo
 
            if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                self.expr_queued.insert(parent_id);
 
            }
 
        }
 

	
 
        // If we did not have an error in the polymorph inference above, then
 
        // reapplying the polymorph type to each argument type and the return
 
        // type should always succeed.
 
        debug_log!(" * During (reinferring from progressed polyvars):");
 
        for (poly_idx, poly_var) in extra.poly_vars.iter().enumerate() {
 
            debug_log!("   - Poly {} | sig: {}", poly_idx, poly_var.display_name(&ctx.heap));
 
        }
 
        // TODO: @performance If the algorithm is changed to be more "on demand
 
        //  argument re-evaluation", instead of "all-argument re-evaluation",
 
        //  then this is no longer true
 
        for arg_idx in 0..extra.embedded.len() {
 
            let signature_type: *mut _ = &mut extra.embedded[arg_idx];
 
            let arg_expr_id = expr.arguments[arg_idx];
 
            let arg_type: *mut _ = self.expr_types.get_mut(&arg_expr_id).unwrap();
 
            
 
            let progress_arg = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                extra, &poly_progress,
 
                signature_type, arg_type
 
            );
 
            
 
            debug_log!(
 
                "   - Arg {} type | sig: {}, arg: {}", arg_idx, 
 
                unsafe{&*signature_type}.display_name(&ctx.heap), 
 
                unsafe{&*arg_type}.display_name(&ctx.heap)
 
            );
 
            if progress_arg {
 
                self.expr_queued.insert(arg_expr_id);
 
            }
 
        }
 

	
 
        // Once more for the return type
 
        let signature_type: *mut _ = &mut extra.returned;
 
        let ret_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
        let progress_ret = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
            extra, &poly_progress, signature_type, ret_type
 
        );
 
        debug_log!(
 
            "   - Ret type | sig: {}, arg: {}", 
 
            unsafe{&*signature_type}.display_name(&ctx.heap), 
 
            unsafe{&*ret_type}.display_name(&ctx.heap)
 
        );
 
        if progress_ret {
 
            self.queue_expr_parent(ctx, upcast_id);
 
        }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> Result<(), ParseError2> {
 
    fn progress_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> Result<(), ParseError> {
 
        let upcast_id = id.upcast();
 
        let var_expr = &ctx.heap[id];
 
        let var_id = var_expr.declaration.unwrap();
 

	
 
        debug_log!("Variable expr '{}': {}", &String::from_utf8_lossy(&ctx.heap[var_id].identifier().value), upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Var  type: {}", self.var_types.get(&var_id).unwrap().var_type.display_name(&ctx.heap));
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // Retrieve shared variable type and expression type and apply inference
 
        let var_data = self.var_types.get_mut(&var_id).unwrap();
 
        let expr_type = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            &mut var_data.var_type as *mut _, 0, expr_type, 0
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            let var_decl = &ctx.heap[var_id];
 
            return Err(ParseError2::new_error(
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, var_decl.position(),
 
                &format!(
 
                    "Conflicting types for this variable, previously assigned the type '{}'",
 
                    var_data.var_type.display_name(&ctx.heap)
 
                )
 
            ).with_postfixed_info(
 
                &ctx.module.source, var_expr.position,
 
                &format!(
 
                    "But inferred to have incompatible type '{}' here",
 
                    expr_type.display_name(&ctx.heap)
 
                )
 
            ))
 
        }
 

	
 
        let progress_var = infer_res.modified_lhs();
 
        let progress_expr = infer_res.modified_rhs();
 

	
 
        if progress_var {
 
            // Let other variable expressions using this type progress as well
 
            for other_expr in var_data.used_at.iter() {
 
                if *other_expr != upcast_id {
 
                    self.expr_queued.insert(*other_expr);
 
                }
 
            }
 

	
 
            // Let a linked port know that our type has updated
 
            if let Some(linked_id) = var_data.linked_var {
 
                // Only perform one-way inference to prevent updating our type, this
 
                // would lead to an inconsistency
 
                let var_type: *mut _ = &mut var_data.var_type;
 
                let link_data = self.var_types.get_mut(&linked_id).unwrap();
 

	
 
                debug_assert!(
 
                    unsafe{&*var_type}.parts[0] == InferenceTypePart::Input ||
 
                    unsafe{&*var_type}.parts[0] == InferenceTypePart::Output
 
                );
 
                debug_assert!(
 
                    link_data.var_type.parts[0] == InferenceTypePart::Input ||
 
                    link_data.var_type.parts[0] == InferenceTypePart::Output
 
                );
 
                match InferenceType::infer_subtree_for_single_type(&mut link_data.var_type, 1, &unsafe{&*var_type}.parts, 1) {
 
                    SingleInferenceResult::Modified => {
 
                        for other_expr in &link_data.used_at {
 
                            self.expr_queued.insert(*other_expr);
 
                        }
 
                    },
 
                    SingleInferenceResult::Unmodified => {},
 
                    SingleInferenceResult::Incompatible => {
 
                        let var_data = self.var_types.get(&var_id).unwrap();
 
                        let link_data = self.var_types.get(&linked_id).unwrap();
 
                        let var_decl = &ctx.heap[var_id];
 
                        let link_decl = &ctx.heap[linked_id];
 

	
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, var_decl.position(),
 
                            &format!(
 
                                "Conflicting types for this variable, assigned the type '{}'",
 
                                var_data.var_type.display_name(&ctx.heap)
 
                            )
 
                        ).with_postfixed_info(
 
                            &ctx.module.source, link_decl.position(),
 
                            &format!(
 
                                "Because it is incompatible with this variable, assigned the type '{}'",
 
                                link_data.var_type.display_name(&ctx.heap)
 
                            )
 
                        ));
 
                    }
 
                }
 
            }
 
        }
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Var  type [{}]: {}", progress_var, self.var_types.get(&var_id).unwrap().var_type.display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 

	
 
        Ok(())
 
    }
 

	
 
    fn queue_expr_parent(&mut self, ctx: &Ctx, expr_id: ExpressionId) {
 
        if let ExpressionParent::Expression(parent_expr_id, _) = &ctx.heap[expr_id].parent() {
 
            self.expr_queued.insert(*parent_expr_id);
 
        }
 
    }
 

	
 
    fn queue_expr(&mut self, expr_id: ExpressionId) {
 
        self.expr_queued.insert(expr_id);
 
    }
 

	
 
    /// Applies a forced type constraint: the type associated with the supplied
 
    /// expression will be molded into the provided "template". The template may
 
    /// be fully specified (e.g. a bool) or contain "inference" variables (e.g.
 
    /// an array of T)
 
    fn apply_forced_constraint(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
    ) -> Result<bool, ParseError2> {
 
    ) -> Result<bool, ParseError> {
 
        debug_assert_expr_ids_unique_and_known!(self, expr_id);
 
        let expr_type = self.expr_types.get_mut(&expr_id).unwrap();
 
        match InferenceType::infer_subtree_for_single_type(expr_type, 0, template, 0) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(
 
                self.construct_template_type_error(ctx, expr_id, template)
 
            )
 
        }
 
    }
 

	
 
    fn apply_forced_constraint_types(
 
        to_infer: *mut InferenceType, to_infer_start_idx: usize,
 
        template: &[InferenceTypePart], template_start_idx: usize
 
    ) -> Result<bool, ()> {
 
        match InferenceType::infer_subtree_for_single_type(
 
            unsafe{ &mut *to_infer }, to_infer_start_idx,
 
            template, template_start_idx
 
        ) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(()),
 
        }
 
    }
 

	
 
    /// Applies a type constraint that expects the two provided types to be
 
    /// equal. We attempt to make progress in inferring the types. If the call
 
    /// is successful then the composition of all types are made equal.
 
    /// The "parent" `expr_id` is provided to construct errors.
 
    fn apply_equal2_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg1_start_idx: usize,
 
        arg2_id: ExpressionId, arg2_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError2> {
 
    ) -> Result<(bool, bool), ParseError> {
 
        debug_assert_expr_ids_unique_and_known!(self, arg1_id, arg2_id);
 
        let arg1_type: *mut _ = self.expr_types.get_mut(&arg1_id).unwrap();
 
        let arg2_type: *mut _ = self.expr_types.get_mut(&arg2_id).unwrap();
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            arg1_type, arg1_start_idx,
 
            arg2_type, arg2_start_idx
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
        }
 

	
 
        Ok((infer_res.modified_lhs(), infer_res.modified_rhs()))
 
    }
 

	
 
    /// Applies an equal2 constraint between a signature type (e.g. a function
 
    /// argument or struct field) and an expression whose type should match that
 
    /// expression. If we make progress on the signature, then we try to see if
 
    /// any of the embedded polymorphic types can be progressed.
 
    ///
 
    /// `outer_expr_id` is the main expression we're progressing (e.g. a 
 
    /// function call), while `expr_id` is the embedded expression we're 
 
    /// matching against the signature. `expression_type` and 
 
    /// `expression_start_idx` belong to `expr_id`.
 
    fn apply_equal2_signature_constraint(
 
        ctx: &Ctx, outer_expr_id: ExpressionId, expr_id: Option<ExpressionId>,
 
        polymorph_data: &mut ExtraData, polymorph_progress: &mut HashSet<usize>,
 
        signature_type: *mut InferenceType, signature_start_idx: usize,
 
        expression_type: *mut InferenceType, expression_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError2> {
 
    ) -> Result<(bool, bool), ParseError> {
 
        // Safety: cannot mutually infer the same types
 
        //         polymorph_data containers may not be modified
 
        debug_assert_ptrs_distinct!(signature_type, expression_type);
 

	
 
        // Infer the signature and expression type
 
        let infer_res = unsafe { 
 
            InferenceType::infer_subtrees_for_both_types(
 
                signature_type, signature_start_idx,
 
                expression_type, expression_start_idx
 
            ) 
 
        };
 

	
 
        if infer_res == DualInferenceResult::Incompatible {
 
            // TODO: Check if I still need to use this
 
            let outer_position = ctx.heap[outer_expr_id].position();
 
            let (position_name, position) = match expr_id {
 
                Some(expr_id) => ("argument's", ctx.heap[expr_id].position()),
 
                None => ("type's", outer_position)
 
            };
 
            let (signature_display_type, expression_display_type) = unsafe { (
 
                (&*signature_type).display_name(&ctx.heap),
 
                (&*expression_type).display_name(&ctx.heap)
 
            ) };
 

	
 
            return Err(ParseError2::new_error(
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, outer_position,
 
                "Failed to fully resolve the types of this expression"
 
            ).with_postfixed_info(
 
                &ctx.module.source, position,
 
                &format!(
 
                    "Because the {} signature has been resolved to '{}', but the expression has been resolved to '{}'",
 
                    position_name, signature_display_type, expression_display_type
 
                )
 
            ));
 
        }
 

	
 
        // Try to see if we can progress any of the polymorphic variables
 
        let progress_sig = infer_res.modified_lhs();
 
        let progress_expr = infer_res.modified_rhs();
 

	
 
        if progress_sig {
 
            let signature_type = unsafe{&mut *signature_type};
 
            debug_assert!(
 
                signature_type.has_body_marker, 
 
                "made progress on signature type, but it doesn't have a marker"
 
            );
 
            for (poly_idx, poly_section) in signature_type.body_marker_iter() {
 
                let polymorph_type = &mut polymorph_data.poly_vars[poly_idx];
 
                match Self::apply_forced_constraint_types(
 
                    polymorph_type, 0, poly_section, 0
 
                ) {
 
                    Ok(true) => { polymorph_progress.insert(poly_idx); },
 
                    Ok(false) => {},
 
                    Err(()) => { return Err(Self::construct_poly_arg_error(ctx, polymorph_data, outer_expr_id))}
 
                }
 
            }
 
        }
 
        Ok((progress_sig, progress_expr))
 
    }
 

	
 
    /// Applies equal2 constraints on the signature type for each of the 
 
    /// polymorphic variables. If the signature type is progressed then we 
 
    /// progress the expression type as well.
 
    ///
 
    /// This function assumes that the polymorphic variables have already been
 
    /// progressed as far as possible by calling 
 
    /// `apply_equal2_signature_constraint`. As such, we expect to not encounter
 
    /// any errors.
 
    ///
 
    /// This function returns true if the expression's type has been progressed
 
    fn apply_equal2_polyvar_constraint(
 
        heap: &Heap,
 
        polymorph_data: &ExtraData, _polymorph_progress: &HashSet<usize>,
 
        signature_type: *mut InferenceType, expr_type: *mut InferenceType
 
    ) -> bool {
 
        // Safety: all pointers should be distinct
 
        //         polymorph_data contains may not be modified
 
        debug_assert_ptrs_distinct!(signature_type, expr_type);
 
        let signature_type = unsafe{&mut *signature_type};
 
        let expr_type = unsafe{&mut *expr_type};
 

	
 
        // Iterate through markers in signature type to try and make progress
 
        // on the polymorphic variable        
 
        let mut seek_idx = 0;
 
        let mut modified_sig = false;
 
        
 
        while let Some((poly_idx, start_idx)) = signature_type.find_body_marker(seek_idx) {
 
            let end_idx = InferenceType::find_subtree_end_idx(&signature_type.parts, start_idx);
 
            // if polymorph_progress.contains(&poly_idx) {
 
                // Need to match subtrees
 
                let polymorph_type = &polymorph_data.poly_vars[poly_idx];
 
                debug_log!("   - DEBUG: Applying {} to '{}' from '{}'", polymorph_type.display_name(heap), InferenceType::partial_display_name(heap, &signature_type.parts[start_idx..]), signature_type.display_name(heap));
 
                let modified_at_marker = Self::apply_forced_constraint_types(
 
                    signature_type, start_idx, 
 
                    &polymorph_type.parts, 0
 
                ).expect("no failure when applying polyvar constraints");
 

	
 
                modified_sig = modified_sig || modified_at_marker;
 
            // }
 

	
 
            seek_idx = end_idx;
 
        }
 

	
 
        // If we made any progress on the signature's type, then we also need to
 
        // apply it to the expression that is supposed to match the signature.
 
        if modified_sig {
 
            match InferenceType::infer_subtree_for_single_type(
 
                expr_type, 0, &signature_type.parts, 0
 
            ) {
 
                SingleInferenceResult::Modified => true,
 
                SingleInferenceResult::Unmodified => false,
 
                SingleInferenceResult::Incompatible =>
 
                    unreachable!("encountered failure while reapplying modified signature to expression after polyvar inference")
 
            }
 
        } else {
 
            false
 
        }
 
    }
 

	
 
    /// Applies a type constraint that expects all three provided types to be
 
    /// equal. In case we can make progress in inferring the types then we
 
    /// attempt to do so. If the call is successful then the composition of all
 
    /// types is made equal.
 
    fn apply_equal3_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId,
 
        start_idx: usize
 
    ) -> Result<(bool, bool, bool), ParseError2> {
 
    ) -> Result<(bool, bool, bool), ParseError> {
 
        // Safety: all expression IDs are always distinct, and we do not modify
 
        //  the container
 
        debug_assert_expr_ids_unique_and_known!(self, expr_id, arg1_id, arg2_id);
 
        let expr_type: *mut _ = self.expr_types.get_mut(&expr_id).unwrap();
 
        let arg1_type: *mut _ = self.expr_types.get_mut(&arg1_id).unwrap();
 
        let arg2_type: *mut _ = self.expr_types.get_mut(&arg2_id).unwrap();
 

	
 
        let expr_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(expr_type, start_idx, arg1_type, start_idx)
 
        };
 
        if expr_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_expr_type_error(ctx, expr_id, arg1_id));
 
        }
 

	
 
        let args_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(arg1_type, start_idx, arg2_type, start_idx) };
 
        if args_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
        }
 

	
 
        // If all types are compatible, but the second call caused the arg1_type
 
        // to be expanded, then we must also assign this to expr_type.
 
        let mut progress_expr = expr_res.modified_lhs();
 
        let mut progress_arg1 = expr_res.modified_rhs();
 
        let progress_arg2 = args_res.modified_rhs();
 

	
 
        if args_res.modified_lhs() { 
 
            unsafe {
 
                let end_idx = InferenceType::find_subtree_end_idx(&(*arg2_type).parts, start_idx);
 
                let subtree = &((*arg2_type).parts[start_idx..end_idx]);
 
                (*expr_type).replace_subtree(start_idx, subtree);
 
            }
 
            progress_expr = true;
 
            progress_arg1 = true;
 
        }
 

	
 
        Ok((progress_expr, progress_arg1, progress_arg2))
 
    }
 

	
 
    // TODO: @optimize Since we only deal with a single type this might be done
 
    //  a lot more efficiently, methinks (disregarding the allocations here)
 
    fn apply_equal_n_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId, args: &[ExpressionId],
 
    ) -> Result<Vec<bool>, ParseError2> {
 
    ) -> Result<Vec<bool>, ParseError> {
 
        // Early exit
 
        match args.len() {
 
            0 => return Ok(vec!()),         // nothing to progress
 
            1 => return Ok(vec![false]),    // only one type, so nothing to infer
 
            _ => {}
 
        }
 

	
 
        let mut progress = Vec::new();
 
        progress.resize(args.len(), false);
 

	
 
        // Do pairwise inference, keep track of the last entry we made progress
 
        // on. Once done we need to update everything to the most-inferred type.
 
        let mut arg_iter = args.iter();
 
        let mut last_arg_id = *arg_iter.next().unwrap();
 
        let mut last_lhs_progressed = 0;
 
        let mut lhs_arg_idx = 0;
 

	
 
        while let Some(next_arg_id) = arg_iter.next() {
 
            let arg1_type: *mut _ = self.expr_types.get_mut(&last_arg_id).unwrap();
 
            let arg2_type: *mut _ = self.expr_types.get_mut(next_arg_id).unwrap();
 

	
 
            let res = unsafe {
 
                InferenceType::infer_subtrees_for_both_types(arg1_type, 0, arg2_type, 0)
 
            };
 

	
 
            if res == DualInferenceResult::Incompatible {
 
                return Err(self.construct_arg_type_error(ctx, expr_id, last_arg_id, *next_arg_id));
 
            }
 

	
 
            if res.modified_lhs() {
 
                // We re-inferred something on the left hand side, so everything
 
                // up until now should be re-inferred.
 
                progress[lhs_arg_idx] = true;
 
                last_lhs_progressed = lhs_arg_idx;
 
            }
 
            progress[lhs_arg_idx + 1] = res.modified_rhs();
 

	
 
            last_arg_id = *next_arg_id;
 
            lhs_arg_idx += 1;
 
        }
 

	
 
        // Re-infer everything. Note that we do not need to re-infer the type
 
        // exactly at `last_lhs_progressed`, but only everything up to it.
 
        let last_type: *mut _ = self.expr_types.get_mut(args.last().unwrap()).unwrap();
 
        for arg_idx in 0..last_lhs_progressed {
 
            let arg_type: *mut _ = self.expr_types.get_mut(&args[arg_idx]).unwrap();
 
            unsafe{
 
                (*arg_type).replace_subtree(0, &(*last_type).parts);
 
            }
 
            progress[arg_idx] = true;
 
        }
 

	
 
        Ok(progress)
 
    }
 

	
 
    /// Determines the `InferenceType` for the expression based on the
 
    /// expression parent. Note that if the parent is another expression, we do
 
    /// not take special action, instead we let parent expressions fix the type
 
    /// of subexpressions before they have a chance to call this function.
 
    fn insert_initial_expr_inference_type(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId
 
    ) -> Result<(), ParseError2> {
 
    ) -> Result<(), ParseError> {
 
        use ExpressionParent as EP;
 
        use InferenceTypePart as ITP;
 

	
 
        let expr = &ctx.heap[expr_id];
 
        let inference_type = match expr.parent() {
 
            EP::None =>
 
                // Should have been set by linker
 
                unreachable!(),
 
            EP::ExpressionStmt(_) | EP::Expression(_, _) =>
 
                // Determined during type inference
 
                InferenceType::new(false, false, vec![ITP::Unknown]),
 
            EP::If(_) | EP::While(_) | EP::Assert(_) =>
 
                // Must be a boolean
 
                InferenceType::new(false, true, vec![ITP::Bool]),
 
            EP::Return(_) =>
 
                // Must match the return type of the function
 
                if let DefinitionType::Function(func_id) = self.definition_type {
 
                    let return_parser_type_id = ctx.heap[func_id].return_type;
 
                    self.determine_inference_type_from_parser_type(ctx, return_parser_type_id, true)
 
                } else {
 
                    // Cannot happen: definition always set upon body traversal
 
                    // and "return" calls in components are illegal.
 
                    unreachable!();
 
                },
 
            EP::New(_) =>
 
                // Must be a component call, which we assign a "Void" return
 
                // type
 
                InferenceType::new(false, true, vec![ITP::Void]),
 
        };
 

	
 
        match self.expr_types.entry(expr_id) {
 
            Entry::Vacant(vacant) => {
 
                vacant.insert(inference_type);
 
            },
 
            Entry::Occupied(mut preexisting) => {
 
                // We already have an entry, this happens if our parent fixed
 
                // our type (e.g. we're used in a conditional expression's test)
 
                // but we have a different type.
 
                // TODO: Is this ever called? Seems like it can't
 
                debug_assert!(false, "I am actually called, my ID is {}", expr_id.index);
 
                let old_type = preexisting.get_mut();
 
                if let SingleInferenceResult::Incompatible = InferenceType::infer_subtree_for_single_type(
 
                    old_type, 0, &inference_type.parts, 0
 
                ) {
 
                    return Err(self.construct_expr_type_error(ctx, expr_id, expr_id))
 
                }
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn insert_initial_call_polymorph_data(
 
        &mut self, ctx: &mut Ctx, call_id: CallExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 

	
 
        // Note: the polymorph variables may be partially specified and may
 
        // contain references to the wrapping definition's (i.e. the proctype
 
        // we are currently visiting) polymorphic arguments.
 
        //
 
        // The arguments of the call may refer to polymorphic variables in the
 
        // definition of the function we're calling, not of the wrapping
 
        // definition. We insert markers in these inferred types to be able to
 
        // map them back and forth to the polymorphic arguments of the function
 
        // we are calling.
 
        let call = &ctx.heap[call_id];
 

	
 
        // Handle the polymorphic variables themselves
 
        let mut poly_vars = Vec::with_capacity(call.poly_args.len());
 
        for poly_arg_type_id in call.poly_args.clone() { // TODO: @performance
 
            poly_vars.push(self.determine_inference_type_from_parser_type(ctx, poly_arg_type_id, true));
 
        }
 

	
 
        // Handle the arguments
 
        // TODO: @cleanup: Maybe factor this out for reuse in the validator/linker, should also
 
        //  make the code slightly more robust.
 
        let (embedded_types, return_type) = match &call.method {
 
            Method::Create => {
 
                // Not polymorphic
 
                (
 
                    vec![InferenceType::new(false, true, vec![ITP::Int])],
 
                    InferenceType::new(false, true, vec![ITP::Message, ITP::Byte])
 
                )
 
            },
 
            Method::Fires => {
 
                // bool fires<T>(PortLike<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::PortLike, ITP::MarkerBody(0), ITP::Unknown])],
 
                    InferenceType::new(false, true, vec![ITP::Bool])
 
                )
 
            },
 
            Method::Get => {
 
                // T get<T>(input<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::Input, ITP::MarkerBody(0), ITP::Unknown])],
 
                    InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown])
 
                )
 
            },
 
            Method::Put => {
 
                // void Put<T>(output<T> port, T msg)
 
                (
 
                    vec![
 
                        InferenceType::new(true, false, vec![ITP::Output, ITP::MarkerBody(0), ITP::Unknown]),
 
                        InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown])
 
                    ],
 
                    InferenceType::new(false, true, vec![ITP::Void])
 
                )
 
            }
 
            Method::Symbolic(symbolic) => {
 
                let definition = &ctx.heap[symbolic.definition.unwrap()];
 

	
 
                match definition {
 
                    Definition::Component(definition) => {
 
                        debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
                        for param_id in definition.parameters.clone() {
 
                            let param = &ctx.heap[param_id];
 
                            let param_parser_type_id = param.parser_type;
 
                            parameter_types.push(self.determine_inference_type_from_parser_type(ctx, param_parser_type_id, false));
 
                        }
 

	
 
                        (parameter_types, InferenceType::new(false, true, vec![InferenceTypePart::Void]))
 
                    },
 
                    Definition::Function(definition) => {
 
                        debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
                        for param_id in definition.parameters.clone() {
 
                            let param = &ctx.heap[param_id];
 
                            let param_parser_type_id = param.parser_type;
 
                            parameter_types.push(self.determine_inference_type_from_parser_type(ctx, param_parser_type_id, false));
 
                        }
 

	
 
                        let return_type = self.determine_inference_type_from_parser_type(ctx, definition.return_type, false);
 
                        (parameter_types, return_type)
 
                    },
 
                    Definition::Struct(_) | Definition::Enum(_) => {
 
                        unreachable!("insert initial polymorph data for struct/enum");
 
                    }
 
                }
 
            }
 
        };
 

	
 
        self.extra_data.insert(call_id.upcast(), ExtraData {
 
            poly_vars,
 
            embedded: embedded_types,
 
            returned: return_type
 
        });
 
    }
 

	
 
    fn insert_initial_struct_polymorph_data(
 
        &mut self, ctx: &mut Ctx, lit_id: LiteralExpressionId,
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_struct();
 

	
 
        // Handle polymorphic arguments
 
        let mut poly_vars = Vec::with_capacity(literal.poly_args2.len());
 
        let mut total_num_poly_parts = 0;
 
        for poly_arg_type_id in literal.poly_args2.clone() { // TODO: @performance
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, poly_arg_type_id, true
 
            ); 
 
            total_num_poly_parts += inference_type.parts.len();
 
            poly_vars.push(inference_type);
 
        }
 

	
 
        // Handle parser types on struct definition
 
        let definition = &ctx.heap[literal.definition.unwrap()];
 
        let definition = match definition {
 
            Definition::Struct(definition) => {
 
                debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                definition
 
            },
 
            _ => unreachable!("definition for struct literal does not point to struct definition")
 
        };
 

	
 
        // Note: programmer is capable of specifying fields in a struct literal
 
        // in a different order than on the definition. We take the programmer-
 
        // specified order to be leading.
 
        let mut embedded_types = Vec::with_capacity(definition.fields.len());
 
        for lit_field in literal.fields.iter() {
 
            let def_field = &definition.fields[lit_field.field_idx];
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, def_field.parser_type, false
 
            );
 
            embedded_types.push(inference_type);
 
        }
 

	
 
        // Return type is the struct type itself, with the appropriate 
 
        // polymorphic variables. So:
 
        // - 1 part for definition
 
        // - N_poly_arg marker parts for each polymorphic argument
 
        // - all the parts for the currently known polymorphic arguments 
 
        let parts_reserved = 1 + poly_vars.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(definition.this.upcast(), poly_vars.len()));
 
        let mut return_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
            if !poly_var.is_done { return_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let return_type = InferenceType::new(true, return_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
            poly_vars, 
 
            embedded: embedded_types,
 
            returned: return_type,
 
        });
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct. Assumes that the select
 
    /// expression's referenced (definition_id, field_idx) has been resolved.
 
    fn insert_initial_select_polymorph_data(
 
        &mut self, ctx: &Ctx, select_id: SelectExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 

	
 
        // Retrieve relevant data
 
        let expr = &ctx.heap[select_id];
 
        let field = expr.field.as_symbolic();
 

	
 
        let definition_id = field.definition.unwrap();
 
        let definition = ctx.heap[definition_id].as_struct();
 
        let field_idx = field.field_idx;
 

	
 
        // Generate initial polyvar types and struct type
 
        let num_poly_vars = definition.poly_vars.len();
 
        let mut poly_vars = Vec::with_capacity(num_poly_vars);
 
        let struct_parts_reserved = 1 + 2 * num_poly_vars;
 
        let mut struct_parts = Vec::with_capacity(struct_parts_reserved);
 
        struct_parts.push(ITP::Instance(definition_id, num_poly_vars));        
 

	
 
        for poly_idx in 0..num_poly_vars {
 
            poly_vars.push(InferenceType::new(true, false, vec![
 
                ITP::MarkerBody(poly_idx), ITP::Unknown,
 
            ]));
 
            struct_parts.push(ITP::MarkerBody(poly_idx));
 
            struct_parts.push(ITP::Unknown);
 
        }
 
        debug_assert_eq!(struct_parts.len(), struct_parts_reserved);
 

	
 
        // Generate initial field type
 
        let field_type = self.determine_inference_type_from_parser_type(
 
            ctx, definition.fields[field_idx].parser_type, false
 
        );
 

	
 
        self.extra_data.insert(select_id.upcast(), ExtraData{
 
            poly_vars,
 
            embedded: vec![InferenceType::new(true, false, struct_parts)],
 
            returned: field_type
 
        });
 
    }
 

	
 
    /// Determines the initial InferenceType from the provided ParserType. This
 
    /// may be called with two kinds of intentions:
 
    /// 1. To resolve a ParserType within the body of a function, or on
 
    ///     polymorphic arguments to calls/instantiations within that body. This
 
    ///     means that the polymorphic variables are known and can be replaced
 
    ///     with the monomorph we're instantiating.
 
    /// 2. To resolve a ParserType on a called function's definition or on
 
    ///     an instantiated datatype's members. This means that the polymorphic
 
    ///     arguments inside those ParserTypes refer to the polymorphic
 
    ///     variables in the called/instantiated type's definition.
 
    /// In the second case we place InferenceTypePart::Marker instances such
 
    /// that we can perform type inference on the polymorphic variables.
 
    fn determine_inference_type_from_parser_type(
 
        &mut self, ctx: &Ctx, parser_type_id: ParserTypeId,
 
        parser_type_in_body: bool
 
    ) -> InferenceType {
 
        use ParserTypeVariant as PTV;
 
        use InferenceTypePart as ITP;
 

	
 
        let mut to_consider = VecDeque::with_capacity(16);
 
        to_consider.push_back(parser_type_id);
 

	
 
        let mut infer_type = Vec::new();
 
        let mut has_inferred = false;
 
        let mut has_markers = false;
 

	
 
        while !to_consider.is_empty() {
 
            let parser_type_id = to_consider.pop_front().unwrap();
 
            let parser_type = &ctx.heap[parser_type_id];
 
            match &parser_type.variant {
 
                PTV::Message => {
 
                    // TODO: @types Remove the Message -> Byte hack at some point...
 
                    infer_type.push(ITP::Message);
 
                    infer_type.push(ITP::Byte);
 
                },
 
                PTV::Bool => { infer_type.push(ITP::Bool); },
 
                PTV::Byte => { infer_type.push(ITP::Byte); },
 
                PTV::Short => { infer_type.push(ITP::Short); },
 
                PTV::Int => { infer_type.push(ITP::Int); },
 
                PTV::Long => { infer_type.push(ITP::Long); },
 
                PTV::String => { infer_type.push(ITP::String); },
 
                PTV::IntegerLiteral => { unreachable!("integer literal type on variable type"); },
 
                PTV::Inferred => {
 
                    infer_type.push(ITP::Unknown);
 
                    has_inferred = true;
 
                },
 
                PTV::Array(subtype_id) => {
 
                    infer_type.push(ITP::Array);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Input(subtype_id) => {
 
                    infer_type.push(ITP::Input);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Output(subtype_id) => {
 
                    infer_type.push(ITP::Output);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Symbolic(symbolic) => {
 
                    debug_assert!(symbolic.variant.is_some(), "symbolic variant not yet determined");
 
                    match symbolic.variant.as_ref().unwrap() {
 
                        SymbolicParserTypeVariant::PolyArg(_, arg_idx) => {
 
                            let arg_idx = *arg_idx;
 
                            debug_assert!(symbolic.poly_args2.is_empty()); // TODO: @hkt
 

	
 
                            if parser_type_in_body {
 
                                // Polymorphic argument refers to definition's
 
                                // polymorphic variables
 
                                debug_assert!(arg_idx < self.poly_vars.len());
 
                                debug_assert!(!self.poly_vars[arg_idx].has_marker());
 
                                infer_type.push(ITP::MarkerDefinition(arg_idx));
 
                                for concrete_part in &self.poly_vars[arg_idx].parts {
 
                                    infer_type.push(ITP::from(*concrete_part));
 
                                }
 
                            } else {
 
                                // Polymorphic argument has to be inferred
 
                                has_markers = true;
 
                                has_inferred = true;
 
                                infer_type.push(ITP::MarkerBody(arg_idx));
 
                                infer_type.push(ITP::Unknown);
 
                            }
 
                        },
 
                        SymbolicParserTypeVariant::Definition(definition_id) => {
 
                            // TODO: @cleanup
 
                            if cfg!(debug_assertions) {
 
                                let definition = &ctx.heap[*definition_id];
 
                                debug_assert!(definition.is_struct() || definition.is_enum()); // TODO: @function_ptrs
 
                                let num_poly = match definition {
 
                                    Definition::Struct(v) => v.poly_vars.len(),
 
                                    Definition::Enum(v) => v.poly_vars.len(),
 
                                    _ => unreachable!(),
 
                                };
 
                                debug_assert_eq!(symbolic.poly_args2.len(), num_poly);
 
                            }
 

	
 
                            infer_type.push(ITP::Instance(*definition_id, symbolic.poly_args2.len()));
 
                            let mut poly_arg_idx = symbolic.poly_args2.len();
 
                            while poly_arg_idx > 0 {
 
                                poly_arg_idx -= 1;
 
                                to_consider.push_front(symbolic.poly_args2[poly_arg_idx]);
 
                            }
 
                        }
 
                    }
 
                }
 
            }
 
        }
 

	
 
        InferenceType::new(has_markers, !has_inferred, infer_type)
 
    }
 

	
 
    /// Construct an error when an expression's type does not match. This
 
    /// happens if we infer the expression type from its arguments (e.g. the
 
    /// expression type of an addition operator is the type of the arguments)
 
    /// But the expression type was already set due to our parent (e.g. an
 
    /// "if statement" or a "logical not" always expecting a boolean)
 
    fn construct_expr_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, arg_id: ExpressionId
 
    ) -> ParseError2 {
 
    ) -> ParseError {
 
        // TODO: Expand and provide more meaningful information for humans
 
        let expr = &ctx.heap[expr_id];
 
        let arg_expr = &ctx.heap[arg_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 
        let arg_type = self.expr_types.get(&arg_id).unwrap();
 

	
 
        return ParseError2::new_error(
 
        return ParseError::new_error(
 
            &ctx.module.source, expr.position(),
 
            &format!(
 
                "Incompatible types: this expression expected a '{}'", 
 
                expr_type.display_name(&ctx.heap)
 
            )
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg_expr.position(),
 
            &format!(
 
                "But this expression yields a '{}'",
 
                arg_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_arg_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId
 
    ) -> ParseError2 {
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let arg1 = &ctx.heap[arg1_id];
 
        let arg2 = &ctx.heap[arg2_id];
 

	
 
        let arg1_type = self.expr_types.get(&arg1_id).unwrap();
 
        let arg2_type = self.expr_types.get(&arg2_id).unwrap();
 

	
 
        return ParseError2::new_error(
 
        return ParseError::new_error(
 
            &ctx.module.source, expr.position(),
 
            "Incompatible types: cannot apply this expression"
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg1.position(),
 
            &format!(
 
                "Because this expression has type '{}'",
 
                arg1_type.display_name(&ctx.heap)
 
            )
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg2.position(),
 
            &format!(
 
                "But this expression has type '{}'",
 
                arg2_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_template_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
    ) -> ParseError2 {
 
    ) -> ParseError {
 
        let expr = &ctx.heap[expr_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 

	
 
        return ParseError2::new_error(
 
        return ParseError::new_error(
 
            &ctx.module.source, expr.position(),
 
            &format!(
 
                "Incompatible types: got a '{}' but expected a '{}'",
 
                expr_type.display_name(&ctx.heap), 
 
                InferenceType::partial_display_name(&ctx.heap, template)
 
            )
 
        )
 
    }
 

	
 
    /// Constructs a human interpretable error in the case that type inference
 
    /// on a polymorphic variable to a function call or literal construction 
 
    /// failed. This may only be caused by a pair of inference types (which may 
 
    /// come from arguments or the return type) having two different inferred 
 
    /// values for that polymorphic variable.
 
    ///
 
    /// So we find this pair and construct the error using it.
 
    ///
 
    /// We assume that the expression is a function call or a struct literal,
 
    /// and that an actual error has occurred.
 
    fn construct_poly_arg_error(
 
        ctx: &Ctx, poly_data: &ExtraData, expr_id: ExpressionId
 
    ) -> ParseError2 {
 
    ) -> ParseError {
 
        // Helper function to check for polymorph mismatch between two inference
 
        // types.
 
        fn has_poly_mismatch<'a>(type_a: &'a InferenceType, type_b: &'a InferenceType) -> Option<(usize, &'a [InferenceTypePart], &'a [InferenceTypePart])> {
 
            if !type_a.has_body_marker || !type_b.has_body_marker {
 
                return None
 
            }
 

	
 
            for (marker_a, section_a) in type_a.body_marker_iter() {
 
                for (marker_b, section_b) in type_b.body_marker_iter() {
 
                    if marker_a != marker_b {
 
                        // Not the same polymorphic variable
 
                        continue;
 
                    }
 

	
 
                    if !InferenceType::check_subtrees(section_a, 0, section_b, 0) {
 
                        // Not compatible
 
                        return Some((marker_a, section_a, section_b))
 
                    }
 
                }
 
            }
 

	
 
            None
 
        }
 

	
 
        // Helpers function to retrieve polyvar name and definition name
 
        fn get_poly_var_and_func_name(ctx: &Ctx, poly_var_idx: usize, expr: &CallExpression) -> (String, String) {
 
            match &expr.method {
 
                Method::Create => unreachable!(),
 
                Method::Fires => (String::from('T'), String::from("fires")),
 
                Method::Get => (String::from('T'), String::from("get")),
 
                Method::Put => (String::from('T'), String::from("put")),
 
                Method::Symbolic(symbolic) => {
 
                    let definition = &ctx.heap[symbolic.definition.unwrap()];
 
                    let poly_var = match definition {
 
                        Definition::Struct(_) | Definition::Enum(_) => unreachable!(),
 
                        Definition::Function(definition) => {
 
                            String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string()
 
                        },
 
                        Definition::Component(definition) => {
 
                            String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string()
 
                        }
 
                    };
 
                    let func_name = String::from_utf8_lossy(&symbolic.identifier.value).to_string();
 
                    (poly_var, func_name)
 
                }
 
            }
 
        }
 

	
 
        fn get_poly_var_and_type_name(ctx: &Ctx, poly_var_idx: usize, definition_id: DefinitionId) -> (String, String) {
 
            let definition = &ctx.heap[definition_id];
 
            match definition {
 
                Definition::Enum(_) | Definition::Function(_) | Definition::Component(_) =>
 
                    unreachable!("get_poly_var_and_type_name called on non-struct value"),
 
                Definition::Struct(definition) => (
 
                    String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string(),
 
                    String::from_utf8_lossy(&definition.identifier.value).to_string()
 
                ),
 
            }
 
        }
 

	
 
        // Helper function to construct initial error
 
        fn construct_main_error(ctx: &Ctx, poly_var_idx: usize, expr: &Expression) -> ParseError2 {
 
        fn construct_main_error(ctx: &Ctx, poly_var_idx: usize, expr: &Expression) -> ParseError {
 
            match expr {
 
                Expression::Call(expr) => {
 
                    let (poly_var, func_name) = get_poly_var_and_func_name(ctx, poly_var_idx, expr);
 
                    return ParseError2::new_error(
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' of '{}'",
 
                            poly_var, func_name
 
                        )
 
                    )
 
                },
 
                Expression::Literal(expr) => {
 
                    let lit_struct = expr.value.as_struct();
 
                    let (poly_var, struct_name) = get_poly_var_and_type_name(ctx, poly_var_idx, lit_struct.definition.unwrap());
 
                    return ParseError2::new_error(
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' of instantiation of '{}'",
 
                            poly_var, struct_name
 
                        )
 
                    )
 
                },
 
                Expression::Select(expr) => {
 
                    let field = expr.field.as_symbolic();
 
                    let (poly_var, struct_name) = get_poly_var_and_type_name(ctx, poly_var_idx, field.definition.unwrap());
 
                    return ParseError2::new_error(
 
                    return ParseError::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' while accessing field '{}' of '{}'",
 
                            poly_var, &String::from_utf8_lossy(&field.identifier.value), struct_name
 
                        )
 
                    )
 
                }
 
                _ => unreachable!("called construct_poly_arg_error without a call/literal expression")
 
            }
 
        }
 

	
 
        // Actual checking
 
        let expr = &ctx.heap[expr_id];
 
        let (expr_args, expr_return_name) = match expr {
 
            Expression::Call(expr) => 
 
                (
 
                    expr.arguments.clone(),
 
                    "return type"
 
                ),
 
            Expression::Literal(expr) => 
 
                (
 
                    expr.value.as_struct().fields
 
                        .iter()
 
                        .map(|f| f.value)
 
                        .collect(),
 
                    "literal"
 
                ),
 
            Expression::Select(expr) =>
 
                // Select expression uses the polymorphic variables of the 
 
                // struct it is accessing, so get the subject expression.
 
                (
 
                    vec![expr.subject],
 
                    "selected field"
 
                ),
 
            _ => unreachable!(),
 
        };
 

	
 
        // - check return type with itself
 
        if let Some((poly_idx, section_a, section_b)) = has_poly_mismatch(
 
            &poly_data.returned, &poly_data.returned
 
        ) {
 
            return construct_main_error(ctx, poly_idx, expr)
 
                .with_postfixed_info(
 
                    &ctx.module.source, expr.position(),
 
                    &format!(
 
                        "The {} inferred the conflicting types '{}' and '{}'",
 
                        expr_return_name,
 
                        InferenceType::partial_display_name(&ctx.heap, section_a),
 
                        InferenceType::partial_display_name(&ctx.heap, section_b)
 
                    )
 
                )
 
        }
 

	
 
        // - check arguments with each other argument and with return type
 
        for (arg_a_idx, arg_a) in poly_data.embedded.iter().enumerate() {
 
            for (arg_b_idx, arg_b) in poly_data.embedded.iter().enumerate() {
 
                if arg_b_idx > arg_a_idx {
 
                    break;
 
                }
 

	
 
                if let Some((poly_idx, section_a, section_b)) = has_poly_mismatch(&arg_a, &arg_b) {
 
                    let error = construct_main_error(ctx, poly_idx, expr);
 
                    if arg_a_idx == arg_b_idx {
 
                        // Same argument
 
                        let arg = &ctx.heap[expr_args[arg_a_idx]];
 
                        return error.with_postfixed_info(
 
                            &ctx.module.source, arg.position(),
 
                            &format!(
 
                                "This argument inferred the conflicting types '{}' and '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_a),
 
                                InferenceType::partial_display_name(&ctx.heap, section_b)
 
                            )
 
                        )
 
                    } else {
 
                        let arg_a = &ctx.heap[expr_args[arg_a_idx]];
 
                        let arg_b = &ctx.heap[expr_args[arg_b_idx]];
 
                        return error.with_postfixed_info(
 
                            &ctx.module.source, arg_a.position(),
 
                            &format!(
 
                                "This argument inferred it to '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_a)
 
                            )
 
                        ).with_postfixed_info(
 
                            &ctx.module.source, arg_b.position(),
 
                            &format!(
 
                                "While this argument inferred it to '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_b)
 
                            )
 
                        )
 
                    }
 
                }
 
            }
 

	
 
            // Check with return type
 
            if let Some((poly_idx, section_arg, section_ret)) = has_poly_mismatch(arg_a, &poly_data.returned) {
 
                let arg = &ctx.heap[expr_args[arg_a_idx]];
 
                return construct_main_error(ctx, poly_idx, expr)
 
                    .with_postfixed_info(
 
                        &ctx.module.source, arg.position(),
 
                        &format!(
 
                            "This argument inferred it to '{}'",
 
                            InferenceType::partial_display_name(&ctx.heap, section_arg)
 
                        )
 
                    )
 
                    .with_postfixed_info(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "While the {} inferred it to '{}'",
 
                            expr_return_name,
 
                            InferenceType::partial_display_name(&ctx.heap, section_ret)
 
                        )
 
                    )
 
            }
 
        }
 

	
 
        unreachable!("construct_poly_arg_error without actual error found?")
 
    }
 
}
 

	
 
#[cfg(test)]
 
mod tests {
 
    use super::*;
 
    use crate::protocol::arena::Id;
 
    use InferenceTypePart as ITP;
 
    use InferenceType as IT;
 

	
 
    #[test]
 
    fn test_single_part_inference() {
 
        // lhs argument inferred from rhs
 
        let pairs = [
 
            (ITP::NumberLike, ITP::Byte),
 
            (ITP::IntegerLike, ITP::Int),
 
            (ITP::Unknown, ITP::Long),
 
            (ITP::Unknown, ITP::String)
 
        ];
 
        for (lhs, rhs) in pairs.iter() {
 
            // Using infer-both
 
            let mut lhs_type = IT::new(false, false, vec![lhs.clone()]);
 
            let mut rhs_type = IT::new(false, true, vec![rhs.clone()]);
 
            let result = unsafe{ IT::infer_subtrees_for_both_types(
 
                &mut lhs_type, 0, &mut rhs_type, 0
 
            ) };
 
            assert_eq!(DualInferenceResult::First, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts);
 

	
 
            // Using infer-single
 
            let mut lhs_type = IT::new(false, false, vec![lhs.clone()]);
 
            let rhs_type = IT::new(false, true, vec![rhs.clone()]);
 
            let result = IT::infer_subtree_for_single_type(
 
                &mut lhs_type, 0, &rhs_type.parts, 0
 
            );
 
            assert_eq!(SingleInferenceResult::Modified, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts);
 
        }
 
    }
 

	
 
    #[test]
 
    fn test_multi_part_inference() {
 
        let pairs = [
 
            (vec![ITP::ArrayLike, ITP::NumberLike], vec![ITP::Slice, ITP::Byte]),
 
            (vec![ITP::Unknown], vec![ITP::Input, ITP::Array, ITP::String]),
 
            (vec![ITP::PortLike, ITP::Int], vec![ITP::Input, ITP::Int]),
 
            (vec![ITP::Unknown], vec![ITP::Output, ITP::Int]),
 
            (
 
                vec![ITP::Instance(Id::new(0), 2), ITP::Input, ITP::Unknown, ITP::Output, ITP::Unknown],
 
                vec![ITP::Instance(Id::new(0), 2), ITP::Input, ITP::Array, ITP::Int, ITP::Output, ITP::Int]
 
            )
 
        ];
 

	
 
        for (lhs, rhs) in pairs.iter() {
 
            let mut lhs_type = IT::new(false, false, lhs.clone());
 
            let mut rhs_type = IT::new(false, false, rhs.clone());
 
            let result = unsafe{ IT::infer_subtrees_for_both_types(
 
                &mut lhs_type, 0, &mut rhs_type, 0
 
            ) };
 
            assert_eq!(DualInferenceResult::First, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts);
 

	
 
            let mut lhs_type = IT::new(false, false, lhs.clone());
 
            let rhs_type = IT::new(false, false, rhs.clone());
 
            let result = IT::infer_subtree_for_single_type(
 
                &mut lhs_type, 0, &rhs_type.parts, 0
 
            );
 
            assert_eq!(SingleInferenceResult::Modified, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts)
 
        }
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/type_table.rs
Show inline comments
 
/**
 
TypeTable
 

	
 
Contains the type table: a datastructure that, when compilation succeeds,
 
contains a concrete type definition for each AST type definition. In general
 
terms the type table will go through the following phases during the compilation
 
process:
 

	
 
1. The base type definitions are resolved after the parser phase has
 
    finished. This implies that the AST is fully constructed, but not yet
 
    annotated.
 
2. With the base type definitions resolved, the validation/linker phase will
 
    use the type table (together with the symbol table) to disambiguate
 
    terms (e.g. does an expression refer to a variable, an enum, a constant,
 
    etc.)
 
3. During the type checking/inference phase the type table is used to ensure
 
    that the AST contains valid use of types in expressions and statements.
 
    At the same time type inference will find concrete instantiations of
 
    polymorphic types, these will be stored in the type table as monomorphed
 
    instantiations of a generic type.
 
4. After type checking and inference (and possibly when constructing byte
 
    code) the type table will construct a type graph and solidify each
 
    non-polymorphic type and monomorphed instantiations of polymorphic types
 
    into concrete types.
 

	
 
So a base type is defined by its (optionally polymorphic) representation in the
 
AST. A concrete type has concrete types for each of the polymorphic arguments. A
 
struct, enum or union may have polymorphic arguments but not actually be a
 
polymorphic type. This happens when the polymorphic arguments are not used in
 
the type definition itself. Similarly for functions/components: but here we just
 
check the arguments/return type of the signature.
 

	
 
Apart from base types and concrete types, we also use the term "embedded type"
 
for types that are embedded within another type, such as a type of a struct
 
struct field or of a union variant. Embedded types may themselves have
 
polymorphic arguments and therefore form an embedded type tree.
 

	
 
NOTE: for now a polymorphic definition of a function/component is illegal if the
 
    polymorphic arguments are not used in the arguments/return type. It should
 
    be legal, but we disallow it for now.
 

	
 
TODO: Allow potentially cyclic datatypes and reject truly cyclic datatypes.
 
TODO: Allow for the full potential of polymorphism
 
TODO: Detect "true" polymorphism: for datatypes like structs/enum/unions this
 
    is simple. For functions we need to check the entire body. Do it here? Or
 
    do it somewhere else?
 
TODO: Do we want to check fn argument collision here, or in validation phase?
 
TODO: Make type table an on-demand thing instead of constructing all base types.
 
TODO: Cleanup everything, feels like a lot can be written cleaner and with less
 
    assumptions on each function call.
 
// TODO: Review all comments
 
*/
 

	
 
use std::fmt::{Formatter, Result as FmtResult};
 
use std::collections::{HashMap, VecDeque};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::parser::symbol_table::{SymbolTable, Symbol};
 
use crate::protocol::inputsource::*;
 
use crate::protocol::parser::*;
 

	
 
//------------------------------------------------------------------------------
 
// Defined Types
 
//------------------------------------------------------------------------------
 

	
 
#[derive(Copy, Clone, PartialEq, Eq)]
 
pub enum TypeClass {
 
    Enum,
 
    Union,
 
    Struct,
 
    Function,
 
    Component
 
}
 

	
 
impl TypeClass {
 
    pub(crate) fn display_name(&self) -> &'static str {
 
        match self {
 
            TypeClass::Enum => "enum",
 
            TypeClass::Union => "enum",
 
            TypeClass::Struct => "struct",
 
            TypeClass::Function => "function",
 
            TypeClass::Component => "component",
 
        }
 
    }
 

	
 
    pub(crate) fn is_data_type(&self) -> bool {
 
        *self == TypeClass::Enum || *self == TypeClass::Union || *self == TypeClass::Struct
 
    }
 

	
 
    pub(crate) fn is_proc_type(&self) -> bool {
 
        *self == TypeClass::Function || *self == TypeClass::Component
 
    }
 
}
 

	
 
impl std::fmt::Display for TypeClass {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
 
        write!(f, "{}", self.display_name())
 
    }
 
}
 

	
 
/// Struct wrapping around a potentially polymorphic type. If the type does not
 
/// have any polymorphic arguments then it will not have any monomorphs and
 
/// `is_polymorph` will be set to `false`. A type with polymorphic arguments
 
/// only has `is_polymorph` set to `true` if the polymorphic arguments actually
 
/// appear in the types associated types (function return argument, struct
 
/// field, enum variant, etc.). Otherwise the polymorphic argument is just a
 
/// marker and does not influence the bytesize of the type.
 
pub struct DefinedType {
 
    pub(crate) ast_root: RootId,
 
    pub(crate) ast_definition: DefinitionId,
 
    pub(crate) definition: DefinedTypeVariant,
 
    pub(crate) poly_vars: Vec<PolyVar>,
 
    pub(crate) is_polymorph: bool,
 
    pub(crate) is_pointerlike: bool,
 
    // TODO: @optimize
 
    pub(crate) monomorphs: Vec<Vec<ConcreteType>>,
 
}
 

	
 
impl DefinedType {
 
    fn add_monomorph(&mut self, types: Vec<ConcreteType>) {
 
        debug_assert!(!self.has_monomorph(&types), "monomorph already exists");
 
        self.monomorphs.push(types);
 
    }
 

	
 
    pub(crate) fn has_any_monomorph(&self) -> bool {
 
        !self.monomorphs.is_empty()
 
    }
 

	
 
    pub(crate) fn has_monomorph(&self, types: &Vec<ConcreteType>) -> bool {
 
        debug_assert_eq!(self.poly_vars.len(), types.len(), "mismatch in number of polymorphic types");
 
        for monomorph in &self.monomorphs {
 
            if monomorph == types { return true; }
 
        }
 

	
 
        return false;
 
    }
 
}
 

	
 
pub enum DefinedTypeVariant {
 
    Enum(EnumType),
 
    Union(UnionType),
 
    Struct(StructType),
 
    Function(FunctionType),
 
    Component(ComponentType)
 
}
 

	
 
pub struct PolyVar {
 
    identifier: Identifier,
 
    /// Whether the polymorphic variables is used directly in the definition of
 
    /// the type (not including bodies of function/component types)
 
    is_in_use: bool,
 
}
 

	
 
impl DefinedTypeVariant {
 
    pub(crate) fn type_class(&self) -> TypeClass {
 
        match self {
 
            DefinedTypeVariant::Enum(_) => TypeClass::Enum,
 
            DefinedTypeVariant::Union(_) => TypeClass::Union,
 
            DefinedTypeVariant::Struct(_) => TypeClass::Struct,
 
            DefinedTypeVariant::Function(_) => TypeClass::Function,
 
            DefinedTypeVariant::Component(_) => TypeClass::Component
 
        }
 
    }
 

	
 
    pub(crate) fn as_struct(&self) -> &StructType {
 
        match self {
 
            DefinedTypeVariant::Struct(v) => v,
 
            _ => unreachable!("Cannot convert {} to struct variant", self.type_class())
 
        }
 
    }
 
}
 

	
 
/// `EnumType` is the classical C/C++ enum type. It has various variants with
 
/// an assigned integer value. The integer values may be user-defined,
 
/// compiler-defined, or a mix of the two. If a user assigns the same enum
 
/// value multiple times, we assume the user is an expert and we consider both
 
/// variants to be equal to one another.
 
pub struct EnumType {
 
    variants: Vec<EnumVariant>,
 
    representation: PrimitiveType,
 
}
 

	
 
// TODO: Also support maximum u64 value
 
pub struct EnumVariant {
 
    identifier: Identifier,
 
    value: i64,
 
}
 

	
 
/// `UnionType` is the algebraic datatype (or sum type, or discriminated union).
 
/// A value is an element of the union, identified by its tag, and may contain
 
/// a single subtype.
 
pub struct UnionType {
 
    variants: Vec<UnionVariant>,
 
    tag_representation: PrimitiveType
 
}
 

	
 
pub struct UnionVariant {
 
    identifier: Identifier,
 
    parser_type: Option<ParserTypeId>,
 
    tag_value: i64,
 
}
 

	
 
pub struct StructType {
 
    pub(crate) fields: Vec<StructField>,
 
}
 

	
 
pub struct StructField {
 
    pub(crate) identifier: Identifier,
 
    pub(crate) parser_type: ParserTypeId,
 
}
 

	
 
pub struct FunctionType {
 
    pub return_type: ParserTypeId,
 
    pub arguments: Vec<FunctionArgument>
 
}
 

	
 
pub struct ComponentType {
 
    pub variant: ComponentVariant,
 
    pub arguments: Vec<FunctionArgument>
 
}
 

	
 
pub struct FunctionArgument {
 
    identifier: Identifier,
 
    parser_type: ParserTypeId,
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Type table
 
//------------------------------------------------------------------------------
 

	
 
// TODO: @cleanup Do I really need this, doesn't make the code that much cleaner
 
struct TypeIterator {
 
    breadcrumbs: Vec<(RootId, DefinitionId)>
 
}
 

	
 
impl TypeIterator {
 
    fn new() -> Self {
 
        Self{ breadcrumbs: Vec::with_capacity(32) }
 
    }
 

	
 
    fn reset(&mut self, root_id: RootId, definition_id: DefinitionId) {
 
        self.breadcrumbs.clear();
 
        self.breadcrumbs.push((root_id, definition_id))
 
    }
 

	
 
    fn push(&mut self, root_id: RootId, definition_id: DefinitionId) {
 
        self.breadcrumbs.push((root_id, definition_id));
 
    }
 

	
 
    fn contains(&self, root_id: RootId, definition_id: DefinitionId) -> bool {
 
        for (stored_root_id, stored_definition_id) in self.breadcrumbs.iter() {
 
            if *stored_root_id == root_id && *stored_definition_id == definition_id { return true; }
 
        }
 

	
 
        return false
 
    }
 

	
 
    fn top(&self) -> Option<(RootId, DefinitionId)> {
 
        self.breadcrumbs.last().map(|(r, d)| (*r, *d))
 
    }
 

	
 
    fn pop(&mut self) {
 
        debug_assert!(!self.breadcrumbs.is_empty());
 
        self.breadcrumbs.pop();
 
    }
 
}
 

	
 
/// Result from attempting to resolve a `ParserType` using the symbol table and
 
/// the type table.
 
enum ResolveResult {
 
    /// ParserType is a builtin type
 
    BuiltIn,
 
    /// ParserType points to a polymorphic argument, contains the index of the
 
    /// polymorphic argument in the outermost definition (e.g. we may have 
 
    /// structs nested three levels deep, but in the innermost struct we can 
 
    /// only use the polyargs that are specified in the type definition of the
 
    /// outermost struct).
 
    PolyArg(usize),
 
    /// ParserType points to a user-defined type that is already resolved in the
 
    /// type table.
 
    Resolved((RootId, DefinitionId)),
 
    /// ParserType points to a user-defined type that is not yet resolved into
 
    /// the type table.
 
    Unresolved((RootId, DefinitionId))
 
}
 

	
 
pub(crate) struct TypeTable {
 
    /// Lookup from AST DefinitionId to a defined type. Considering possible
 
    /// polymorphs is done inside the `DefinedType` struct.
 
    lookup: HashMap<DefinitionId, DefinedType>,
 
    /// Iterator over `(module, definition)` tuples used as workspace to make sure
 
    /// that each base definition of all a type's subtypes are resolved.
 
    iter: TypeIterator,
 
    /// Iterator over `parser type`s during the process where `parser types` are
 
    /// resolved into a `(module, definition)` tuple.
 
    parser_type_iter: VecDeque<ParserTypeId>,
 
}
 

	
 
pub(crate) struct TypeCtx<'a> {
 
    symbols: &'a SymbolTable,
 
    heap: &'a mut Heap,
 
    modules: &'a [LexedModule]
 
}
 

	
 
impl<'a> TypeCtx<'a> {
 
    pub(crate) fn new(symbols: &'a SymbolTable, heap: &'a mut Heap, modules: &'a [LexedModule]) -> Self {
 
        Self{ symbols, heap, modules }
 
    }
 
}
 

	
 
impl TypeTable {
 
    /// Construct a new type table without any resolved types.
 
    pub(crate) fn new() -> Self {
 
        Self{ 
 
            lookup: HashMap::new(), 
 
            iter: TypeIterator::new(), 
 
            parser_type_iter: VecDeque::with_capacity(64), 
 
        }
 
    }
 

	
 
    pub(crate) fn build_base_types(&mut self, ctx: &mut TypeCtx) -> Result<(), ParseError2> {
 
    pub(crate) fn build_base_types(&mut self, ctx: &mut TypeCtx) -> Result<(), ParseError> {
 
        // Make sure we're allowed to cast root_id to index into ctx.modules
 
        debug_assert!(self.lookup.is_empty());
 
        debug_assert!(self.iter.top().is_none());
 
        debug_assert!(self.parser_type_iter.is_empty());
 

	
 
        if cfg!(debug_assertions) {
 
            for (index, module) in ctx.modules.iter().enumerate() {
 
                debug_assert_eq!(index, module.root_id.index as usize);
 
            }
 
        }
 

	
 
        // Use context to guess hashmap size
 
        let reserve_size = ctx.heap.definitions.len();
 
        self.lookup.reserve(reserve_size);
 

	
 
        // TODO: @cleanup Rework this hack
 
        for root_idx in 0..ctx.modules.len() {
 
            let last_definition_idx = ctx.heap[ctx.modules[root_idx].root_id].definitions.len();
 
            for definition_idx in 0..last_definition_idx {
 
                let definition_id = ctx.heap[ctx.modules[root_idx].root_id].definitions[definition_idx];
 
                self.resolve_base_definition(ctx, definition_id)?;
 
            }
 
        }
 

	
 
        debug_assert_eq!(self.lookup.len(), reserve_size, "mismatch in reserved size of type table");
 

	
 
        Ok(())
 
    }
 

	
 
    /// Retrieves base definition from type table. We must be able to retrieve
 
    /// it as we resolve all base types upon type table construction (for now).
 
    /// However, in the future we might do on-demand type resolving, so return
 
    /// an option anyway
 
    pub(crate) fn get_base_definition(&self, definition_id: &DefinitionId) -> Option<&DefinedType> {
 
        self.lookup.get(&definition_id)
 
    }
 

	
 
    /// Instantiates a monomorph for a given base definition.
 
    pub(crate) fn add_monomorph(&mut self, definition_id: &DefinitionId, types: Vec<ConcreteType>) {
 
        debug_assert!(
 
            self.lookup.contains_key(definition_id),
 
            "attempting to instantiate monomorph of definition unknown to type table"
 
        );
 

	
 
        let definition = self.lookup.get_mut(definition_id).unwrap();
 
        definition.add_monomorph(types);
 
    }
 

	
 
    /// Checks if a given definition already has a specific monomorph
 
    pub(crate) fn has_monomorph(&mut self, definition_id: &DefinitionId, types: &Vec<ConcreteType>) -> bool {
 
        debug_assert!(
 
            self.lookup.contains_key(definition_id),
 
            "attempting to check monomorph existence of definition unknown to type table"
 
        );
 

	
 
        let definition = self.lookup.get(definition_id).unwrap();
 
        definition.has_monomorph(types)
 
    }
 

	
 
    /// This function will resolve just the basic definition of the type, it
 
    /// will not handle any of the monomorphized instances of the type.
 
    fn resolve_base_definition<'a>(&'a mut self, ctx: &mut TypeCtx, definition_id: DefinitionId) -> Result<(), ParseError2> {
 
    fn resolve_base_definition<'a>(&'a mut self, ctx: &mut TypeCtx, definition_id: DefinitionId) -> Result<(), ParseError> {
 
        // Check if we have already resolved the base definition
 
        if self.lookup.contains_key(&definition_id) { return Ok(()); }
 

	
 
        let root_id = Self::find_root_id(ctx, definition_id);
 
        self.iter.reset(root_id, definition_id);
 

	
 
        while let Some((root_id, definition_id)) = self.iter.top() {
 
            // We have a type to resolve
 
            let definition = &ctx.heap[definition_id];
 

	
 
            let can_pop_breadcrumb = match definition {
 
                // TODO: @cleanup Borrow rules hax
 
                Definition::Enum(_) => self.resolve_base_enum_definition(ctx, root_id, definition_id),
 
                Definition::Struct(_) => self.resolve_base_struct_definition(ctx, root_id, definition_id),
 
                Definition::Component(_) => self.resolve_base_component_definition(ctx, root_id, definition_id),
 
                Definition::Function(_) => self.resolve_base_function_definition(ctx, root_id, definition_id),
 
            }?;
 

	
 
            // Otherwise: `ingest_resolve_result` has pushed a new breadcrumb
 
            // that we must follow before we can resolve the current type
 
            if can_pop_breadcrumb {
 
                self.iter.pop();
 
            }
 
        }
 

	
 
        // We must have resolved the type
 
        debug_assert!(self.lookup.contains_key(&definition_id), "base type not resolved");
 
        Ok(())
 
    }
 

	
 
    /// Resolve the basic enum definition to an entry in the type table. It will
 
    /// not instantiate any monomorphized instances of polymorphic enum
 
    /// definitions. If a subtype has to be resolved first then this function
 
    /// will return `false` after calling `ingest_resolve_result`.
 
    fn resolve_base_enum_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
    fn resolve_base_enum_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_enum());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base enum already resolved");
 
        
 
        let definition = ctx.heap[definition_id].as_enum();
 

	
 
        // Check if the enum should be implemented as a classic enumeration or
 
        // a tagged union. Keep track of variant index for error messages. Make
 
        // sure all embedded types are resolved.
 
        let mut first_tag_value = None;
 
        let mut first_int_value = None;
 
        for variant in &definition.variants {
 
            match &variant.value {
 
                EnumVariantValue::None => {},
 
                EnumVariantValue::Integer(_) => if first_int_value.is_none() {
 
                    first_int_value = Some(variant.position);
 
                },
 
                EnumVariantValue::Type(variant_type_id) => {
 
                    if first_tag_value.is_none() {
 
                        first_tag_value = Some(variant.position);
 
                    }
 

	
 
                    // Check if the embedded type needs to be resolved
 
                    let resolve_result = self.resolve_base_parser_type(ctx, &definition.poly_vars, root_id, *variant_type_id)?;
 
                    if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                        return Ok(false)
 
                    }
 
                }
 
            }
 
        }
 

	
 
        if first_tag_value.is_some() && first_int_value.is_some() {
 
            // Not illegal, but useless and probably a programmer mistake
 
            let module_source = &ctx.modules[root_id.index as usize].source;
 
            let tag_pos = first_tag_value.unwrap();
 
            let int_pos = first_int_value.unwrap();
 
            return Err(
 
                ParseError2::new_error(
 
                ParseError::new_error(
 
                    module_source, definition.position,
 
                    "Illegal combination of enum integer variant(s) and enum union variant(s)"
 
                )
 
                    .with_postfixed_info(module_source, int_pos, "Assigning an integer value here")
 
                    .with_postfixed_info(module_source, tag_pos, "Embedding a type in a union variant here")
 
            );
 
        }
 

	
 
        // Enumeration is legal
 
        if first_tag_value.is_some() {
 
            // Implement as a tagged union
 

	
 
            // Determine the union variants
 
            let mut tag_value = -1;
 
            let mut variants = Vec::with_capacity(definition.variants.len());
 
            for variant in &definition.variants {
 
                tag_value += 1;
 
                let parser_type = match &variant.value {
 
                    EnumVariantValue::None => {
 
                        None
 
                    },
 
                    EnumVariantValue::Type(parser_type_id) => {
 
                        // Type should be resolvable, we checked this above
 
                        Some(*parser_type_id)
 
                    },
 
                    EnumVariantValue::Integer(_) => {
 
                        debug_assert!(false, "Encountered `Integer` variant after asserting enum is a discriminated union");
 
                        unreachable!();
 
                    }
 
                };
 

	
 
                variants.push(UnionVariant{
 
                    identifier: variant.identifier.clone(),
 
                    parser_type,
 
                    tag_value,
 
                })
 
            }
 

	
 
            // Ensure union names and polymorphic args do not conflict
 
            self.check_identifier_collision(
 
                ctx, root_id, &variants, |variant| &variant.identifier, "enum variant"
 
            )?;
 
            self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 

	
 
            let mut poly_args = self.create_initial_poly_vars(&definition.poly_vars);
 
            for variant in &variants {
 
                if let Some(embedded) = variant.parser_type {
 
                    self.check_and_resolve_embedded_type_and_modify_poly_args(ctx, definition_id, &mut poly_args, root_id, embedded)?;
 
                }
 
            }
 
            let is_polymorph = poly_args.iter().any(|arg| arg.is_in_use);
 

	
 
            // Insert base definition in type table
 
            self.lookup.insert(definition_id, DefinedType {
 
                ast_root: root_id,
 
                ast_definition: definition_id,
 
                definition: DefinedTypeVariant::Union(UnionType{
 
                    variants,
 
                    tag_representation: Self::enum_tag_type(-1, tag_value),
 
                }),
 
                poly_vars: poly_args,
 
                is_polymorph,
 
                is_pointerlike: false, // TODO: @cyclic_types
 
                monomorphs: Vec::new()
 
            });
 
        } else {
 
            // Implement as a regular enum
 
            let mut enum_value = -1;
 
            let mut min_enum_value = 0;
 
            let mut max_enum_value = 0;
 
            let mut variants = Vec::with_capacity(definition.variants.len());
 
            for variant in &definition.variants {
 
                enum_value += 1;
 
                match &variant.value {
 
                    EnumVariantValue::None => {
 
                        variants.push(EnumVariant{
 
                            identifier: variant.identifier.clone(),
 
                            value: enum_value,
 
                        });
 
                    },
 
                    EnumVariantValue::Integer(override_value) => {
 
                        enum_value = *override_value;
 
                        variants.push(EnumVariant{
 
                            identifier: variant.identifier.clone(),
 
                            value: enum_value,
 
                        });
 
                    },
 
                    EnumVariantValue::Type(_) => {
 
                        debug_assert!(false, "Encountered `Type` variant after asserting enum is not a discriminated union");
 
                        unreachable!();
 
                    }
 
                }
 
                if enum_value < min_enum_value { min_enum_value = enum_value; }
 
                else if enum_value > max_enum_value { max_enum_value = enum_value; }
 
            }
 

	
 
            // Ensure enum names and polymorphic args do not conflict
 
            self.check_identifier_collision(
 
                ctx, root_id, &variants, |variant| &variant.identifier, "enum variant"
 
            )?;
 
            self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 

	
 
            // Note: although we cannot have embedded type dependent on the
 
            // polymorphic variables, they might still be present as tokens
 
            let definition_id = definition.this.upcast();
 
            self.lookup.insert(definition_id, DefinedType {
 
                ast_root: root_id,
 
                ast_definition: definition_id,
 
                definition: DefinedTypeVariant::Enum(EnumType{
 
                    variants,
 
                    representation: Self::enum_tag_type(min_enum_value, max_enum_value)
 
                }),
 
                poly_vars: self.create_initial_poly_vars(&definition.poly_vars),
 
                is_polymorph: false,
 
                is_pointerlike: false,
 
                monomorphs: Vec::new()
 
            });
 
        }
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic struct definition to an entry in the type table. It
 
    /// will not instantiate any monomorphized instances of polymorphic struct
 
    /// definitions.
 
    fn resolve_base_struct_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
    fn resolve_base_struct_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_struct());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base struct already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_struct();
 

	
 
        // Make sure all fields point to resolvable types
 
        for field_definition in &definition.fields {
 
            let resolve_result = self.resolve_base_parser_type(ctx, &definition.poly_vars, root_id, field_definition.parser_type)?;
 
            if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                return Ok(false)
 
            }
 
        }
 

	
 
        // All fields types are resolved, construct base type
 
        let mut fields = Vec::with_capacity(definition.fields.len());
 
        for field_definition in &definition.fields {
 
            fields.push(StructField{
 
                identifier: field_definition.field.clone(),
 
                parser_type: field_definition.parser_type,
 
            })
 
        }
 

	
 
        // And make sure no conflicts exist in field names and/or polymorphic args
 
        self.check_identifier_collision(
 
            ctx, root_id, &fields, |field| &field.identifier, "struct field"
 
        )?;
 
        self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 

	
 
        // Construct representation of polymorphic arguments
 
        let mut poly_args = self.create_initial_poly_vars(&definition.poly_vars);
 
        for field in &fields {
 
            self.check_and_resolve_embedded_type_and_modify_poly_args(ctx, definition_id, &mut poly_args, root_id, field.parser_type)?;
 
        }
 

	
 
        let is_polymorph = poly_args.iter().any(|arg| arg.is_in_use);
 

	
 
        self.lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Struct(StructType{
 
                fields,
 
            }),
 
            poly_vars: poly_args,
 
            is_polymorph,
 
            is_pointerlike: false, // TODO: @cyclic
 
            monomorphs: Vec::new(),
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic function definition to an entry in the type table. It
 
    /// will not instantiate any monomorphized instances of polymorphic function
 
    /// definitions.
 
    fn resolve_base_function_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
    fn resolve_base_function_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_function());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base function already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_function();
 
        let return_type = definition.return_type;
 

	
 
        // Check the return type
 
        let resolve_result = self.resolve_base_parser_type(
 
            ctx, &definition.poly_vars, root_id, definition.return_type
 
        )?;
 
        if !self.ingest_resolve_result(ctx, resolve_result)? {
 
            return Ok(false)
 
        }
 

	
 
        // Check the argument types
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            let resolve_result = self.resolve_base_parser_type(
 
                ctx, &definition.poly_vars, root_id, param.parser_type
 
            )?;
 
            if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                return Ok(false)
 
            }
 
        }
 

	
 
        // Construct arguments to function
 
        let mut arguments = Vec::with_capacity(definition.parameters.len());
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            arguments.push(FunctionArgument{
 
                identifier: param.identifier.clone(),
 
                parser_type: param.parser_type,
 
            })
 
        }
 

	
 
        // Check conflict of argument and polyarg identifiers
 
        self.check_identifier_collision(
 
            ctx, root_id, &arguments, |arg| &arg.identifier, "function argument"
 
        )?;
 
        self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 

	
 
        // Construct polymorphic arguments
 
        let mut poly_args = self.create_initial_poly_vars(&definition.poly_vars);
 
        let return_type_id = definition.return_type;
 
        self.check_and_resolve_embedded_type_and_modify_poly_args(ctx, definition_id, &mut poly_args, root_id, return_type_id)?;
 
        for argument in &arguments {
 
            self.check_and_resolve_embedded_type_and_modify_poly_args(ctx, definition_id, &mut poly_args, root_id, argument.parser_type)?;
 
        }
 

	
 
        let is_polymorph = poly_args.iter().any(|arg| arg.is_in_use);
 

	
 
        // Construct entry in type table
 
        self.lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Function(FunctionType{
 
                return_type,
 
                arguments,
 
            }),
 
            poly_vars: poly_args,
 
            is_polymorph,
 
            is_pointerlike: false, // TODO: @cyclic
 
            monomorphs: Vec::new(),
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic component definition to an entry in the type table.
 
    /// It will not instantiate any monomorphized instancees of polymorphic
 
    /// component definitions.
 
    fn resolve_base_component_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
    fn resolve_base_component_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError> {
 
        debug_assert!(ctx.heap[definition_id].is_component());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base component already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_component();
 
        let component_variant = definition.variant;
 

	
 
        // Check argument types
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            let resolve_result = self.resolve_base_parser_type(
 
                ctx, &definition.poly_vars, root_id, param.parser_type
 
            )?;
 
            if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                return Ok(false)
 
            }
 
        }
 

	
 
        // Construct argument types
 
        let mut arguments = Vec::with_capacity(definition.parameters.len());
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            arguments.push(FunctionArgument{
 
                identifier: param.identifier.clone(),
 
                parser_type: param.parser_type
 
            })
 
        }
 

	
 
        // Check conflict of argument and polyarg identifiers
 
        self.check_identifier_collision(
 
            ctx, root_id, &arguments, |arg| &arg.identifier, "component argument"
 
        )?;
 
        self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 

	
 
        // Construct polymorphic arguments
 
        let mut poly_args = self.create_initial_poly_vars(&definition.poly_vars);
 
        for argument in &arguments {
 
            self.check_and_resolve_embedded_type_and_modify_poly_args(ctx, definition_id, &mut poly_args, root_id, argument.parser_type)?;
 
        }
 

	
 
        let is_polymorph = poly_args.iter().any(|v| v.is_in_use);
 

	
 
        // Construct entry in type table
 
        self.lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Component(ComponentType{
 
                variant: component_variant,
 
                arguments,
 
            }),
 
            poly_vars: poly_args,
 
            is_polymorph,
 
            is_pointerlike: false, // TODO: @cyclic
 
            monomorphs: Vec::new(),
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Takes a ResolveResult and returns `true` if the caller can happily
 
    /// continue resolving its current type, or `false` if the caller must break
 
    /// resolving the current type and exit to the outer resolving loop. In the
 
    /// latter case the `result` value was `ResolveResult::Unresolved`, implying
 
    /// that the type must be resolved first.
 
    fn ingest_resolve_result(&mut self, ctx: &TypeCtx, result: ResolveResult) -> Result<bool, ParseError2> {
 
    fn ingest_resolve_result(&mut self, ctx: &TypeCtx, result: ResolveResult) -> Result<bool, ParseError> {
 
        match result {
 
            ResolveResult::BuiltIn | ResolveResult::PolyArg(_) => Ok(true),
 
            ResolveResult::Resolved(_) => Ok(true),
 
            ResolveResult::Unresolved((root_id, definition_id)) => {
 
                if self.iter.contains(root_id, definition_id) {
 
                    // Cyclic dependency encountered
 
                    // TODO: Allow this
 
                    let mut error = ParseError2::new_error(
 
                    let mut error = ParseError::new_error(
 
                        &ctx.modules[root_id.index as usize].source, ctx.heap[definition_id].position(),
 
                        "Evaluating this type definition results in a cyclic type"
 
                    );
 

	
 
                    for (breadcrumb_idx, (root_id, definition_id)) in self.iter.breadcrumbs.iter().enumerate() {
 
                        let msg = if breadcrumb_idx == 0 {
 
                            "The cycle started with this definition"
 
                        } else {
 
                            "Which depends on this definition"
 
                        };
 

	
 
                        error = error.with_postfixed_info(
 
                            &ctx.modules[root_id.index as usize].source,
 
                            ctx.heap[*definition_id].position(), msg
 
                        );
 
                    }
 

	
 
                    Err(error)
 
                } else {
 
                    // Type is not yet resolved, so push IDs on iterator and
 
                    // continue the resolving loop
 
                    self.iter.push(root_id, definition_id);
 
                    Ok(false)
 
                }
 
            }
 
        }
 
    }
 

	
 
    /// Each type definition may consist of several embedded subtypes. This
 
    /// function checks whether that embedded type is a builtin, a direct
 
    /// reference to a polymorphic argument, or an (un)resolved type definition.
 
    /// If the embedded type's symbol cannot be found then this function returns
 
    /// an error.
 
    ///
 
    /// If the embedded type is resolved, then one always receives the type's
 
    /// (module, definition) tuple. If any of the types in the embedded type's
 
    /// tree is not yet resolved, then one may receive a (module, definition)
 
    /// tuple that does not correspond to the `parser_type_id` passed into this
 
    /// function.
 
    fn resolve_base_parser_type(&mut self, ctx: &TypeCtx, poly_vars: &Vec<Identifier>, root_id: RootId, parser_type_id: ParserTypeId) -> Result<ResolveResult, ParseError2> {
 
    fn resolve_base_parser_type(&mut self, ctx: &TypeCtx, poly_vars: &Vec<Identifier>, root_id: RootId, parser_type_id: ParserTypeId) -> Result<ResolveResult, ParseError> {
 
        use ParserTypeVariant as PTV;
 

	
 
        // Prepping iterator
 
        self.parser_type_iter.clear();
 
        self.parser_type_iter.push_back(parser_type_id);
 

	
 
        // Result for the very first time we resolve a
 
        let mut resolve_result = None;
 
        let mut set_resolve_result = |v: ResolveResult| {
 
            if resolve_result.is_none() { resolve_result = Some(v); }
 
        };
 

	
 
        'resolve_loop: while let Some(parser_type_id) = self.parser_type_iter.pop_back() {
 
            let parser_type = &ctx.heap[parser_type_id];
 

	
 
            match &parser_type.variant {
 
                // Builtin types. An array is a builtin as it is implemented as a
 
                // couple of pointers, so we do not require the subtype to be fully
 
                // resolved. Similar for input/output ports.
 
                PTV::Array(_) | PTV::Input(_) | PTV::Output(_) | PTV::Message |
 
                PTV::Bool | PTV::Byte | PTV::Short | PTV::Int | PTV::Long |
 
                PTV::String => {
 
                    set_resolve_result(ResolveResult::BuiltIn);
 
                },
 
                // IntegerLiteral types and the inferred marker are not allowed in
 
                // definitions of types
 
                PTV::IntegerLiteral |
 
                PTV::Inferred => {
 
                    debug_assert!(false, "Encountered illegal ParserTypeVariant within type definition");
 
                    unreachable!();
 
                },
 
                // Symbolic type, make sure its base type, and the base types
 
                // of all members of the embedded type tree are resolved. We
 
                // don't care about monomorphs yet.
 
                PTV::Symbolic(symbolic) => {
 
                    // Check if the symbolic type is one of the definition's
 
                    // polymorphic arguments. If so then we can halt the
 
                    // execution
 
                    for (poly_arg_idx, poly_arg) in poly_vars.iter().enumerate() {
 
                        if symbolic.identifier.matches_identifier(poly_arg) {
 
                            set_resolve_result(ResolveResult::PolyArg(poly_arg_idx));
 
                            continue 'resolve_loop;
 
                        }
 
                    }
 

	
 
                    // Lookup the definition in the symbol table
 
                    let (symbol, mut ident_iter) = ctx.symbols.resolve_namespaced_identifier(root_id, &symbolic.identifier);
 
                    if symbol.is_none() {
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            &ctx.modules[root_id.index as usize].source, symbolic.identifier.position,
 
                            "Could not resolve type"
 
                        ))
 
                    }
 

	
 
                    let symbol_value = symbol.unwrap();
 
                    let module_source = &ctx.modules[root_id.index as usize].source;
 

	
 
                    match symbol_value.symbol {
 
                        Symbol::Namespace(_) => {
 
                            // Reference to a namespace instead of a type
 
                            let last_ident = ident_iter.prev();
 
                            return if ident_iter.num_remaining() == 0 {
 
                                // Could also have polymorphic args, but we 
 
                                // don't care, just throw this error: 
 
                                Err(ParseError2::new_error(
 
                                Err(ParseError::new_error(
 
                                    module_source, symbolic.identifier.position,
 
                                    "Expected a type, got a module name"
 
                                ))
 
                            } else if last_ident.is_some() && last_ident.map(|(_, poly_args)| poly_args.is_some()).unwrap() {
 
                                // Halted at a namespaced because we encountered
 
                                // polymorphic arguments
 
                                Err(ParseError2::new_error(
 
                                Err(ParseError::new_error(
 
                                    module_source, symbolic.identifier.position,
 
                                    "Illegal specification of polymorphic arguments to a module name"
 
                                ))
 
                            } else {
 
                                // Impossible (with the current implementation 
 
                                // of the symbol table)
 
                                unreachable!(
 
                                    "Got namespace symbol with {} returned symbols from {}",
 
                                    ident_iter.num_returned(),
 
                                    &String::from_utf8_lossy(&symbolic.identifier.value)
 
                                );
 
                            }
 
                        },
 
                        Symbol::Definition((root_id, definition_id)) => {
 
                            let definition = &ctx.heap[definition_id];
 
                            if ident_iter.num_remaining() > 0 {
 
                                // Namespaced identifier is longer than the type
 
                                // we found. Return the appropriate message
 
                                return if definition.is_struct() || definition.is_enum() {
 
                                    Err(ParseError2::new_error(
 
                                    Err(ParseError::new_error(
 
                                        module_source, symbolic.identifier.position,
 
                                        &format!(
 
                                            "Unknown type '{}', did you mean to use '{}'?",
 
                                            String::from_utf8_lossy(&symbolic.identifier.value),
 
                                            String::from_utf8_lossy(&definition.identifier().value)
 
                                        )
 
                                    ))
 
                                } else {
 
                                    Err(ParseError2::new_error(
 
                                    Err(ParseError::new_error(
 
                                        module_source, symbolic.identifier.position,
 
                                        "Unknown datatype"
 
                                    ))
 
                                }
 
                            }
 

	
 
                            // Found a match, make sure it is a datatype
 
                            if !(definition.is_struct() || definition.is_enum()) {
 
                                return Err(ParseError2::new_error(
 
                                return Err(ParseError::new_error(
 
                                    module_source, symbolic.identifier.position,
 
                                    "Embedded types must be datatypes (structs or enums)"
 
                                ))
 
                            }
 

	
 
                            // Found a struct/enum definition
 
                            if !self.lookup.contains_key(&definition_id) {
 
                                // Type is not yet resoled, immediately return
 
                                // this
 
                                return Ok(ResolveResult::Unresolved((root_id, definition_id)));
 
                            }
 

	
 
                            // Type is resolved, so set as result, but continue
 
                            // iterating over the parser types in the embedded
 
                            // type's tree
 
                            set_resolve_result(ResolveResult::Resolved((root_id, definition_id)));
 

	
 
                            // Note: because we're resolving parser types, not
 
                            // embedded types, we're parsing a tree, so we can't
 
                            // get stuck in a cyclic loop.
 
                            let last_ident = ident_iter.prev();
 
                            if let Some((_, Some(poly_args))) = last_ident {
 
                                for poly_arg_type_id in poly_args {
 
                                    self.parser_type_iter.push_back(*poly_arg_type_id);
 
                                }
 
                            }
 
                        }
 
                    }
 
                }
 
            }
 
        }
 

	
 
        // If here then all types in the embedded type's tree were resolved.
 
        debug_assert!(resolve_result.is_some(), "faulty logic in ParserType resolver");
 
        return Ok(resolve_result.unwrap())
 
    }
 

	
 
    fn create_initial_poly_vars(&self, poly_args: &[Identifier]) -> Vec<PolyVar> {
 
        poly_args
 
            .iter()
 
            .map(|v| PolyVar{ identifier: v.clone(), is_in_use: false })
 
            .collect()
 
    }
 

	
 
    /// This function modifies the passed `poly_args` by checking the embedded
 
    /// type tree. This should be called after `resolve_base_parser_type` is
 
    /// called on each node in this tree: we assume that each symbolic type was
 
    /// resolved to either a polymorphic arg or a definition.
 
    ///
 
    /// This function will also make sure that if the embedded type has
 
    /// polymorphic variables itself, that the number of polymorphic variables
 
    /// matches the number of arguments in the associated definition.
 
    ///
 
    /// Finally, for all embedded types (which includes function/component 
 
    /// arguments and return types) in type definitions we will modify the AST
 
    /// when the embedded type is a polymorphic variable or points to another
 
    /// user-defined type.
 
    fn check_and_resolve_embedded_type_and_modify_poly_args(
 
        &mut self, ctx: &mut TypeCtx, 
 
        type_definition_id: DefinitionId, poly_args: &mut [PolyVar], 
 
        root_id: RootId, embedded_type_id: ParserTypeId,
 
    ) -> Result<(), ParseError2> {
 
    ) -> Result<(), ParseError> {
 
        use ParserTypeVariant as PTV;
 

	
 
        self.parser_type_iter.clear();
 
        self.parser_type_iter.push_back(embedded_type_id);
 

	
 
        'type_loop: while let Some(embedded_type_id) = self.parser_type_iter.pop_back() {
 
            let embedded_type = &mut ctx.heap[embedded_type_id];
 

	
 
            match &mut embedded_type.variant {
 
                PTV::Message | PTV::Bool | 
 
                PTV::Byte | PTV::Short | PTV::Int | PTV::Long |
 
                PTV::String => {
 
                    // Builtins, no modification/iteration required
 
                },
 
                PTV::IntegerLiteral | PTV::Inferred => {
 
                    // TODO: @hack Allowed for now so we can continue testing 
 
                    //  the parser/lexer
 
                    // debug_assert!(false, "encountered illegal parser type during ParserType/PolyArg modification");
 
                    // unreachable!();
 
                },
 
                PTV::Array(subtype_id) |
 
                PTV::Input(subtype_id) |
 
                PTV::Output(subtype_id) => {
 
                    // Outer type is fixed, but inner type might be symbolic
 
                    self.parser_type_iter.push_back(*subtype_id);
 
                },
 
                PTV::Symbolic(symbolic) => {
 
                    for (poly_arg_idx, poly_arg) in poly_args.iter_mut().enumerate() {
 
                        if symbolic.identifier.matches_identifier(&poly_arg.identifier) {
 
                            poly_arg.is_in_use = true;
 
                            // TODO: If we allow higher-kinded types in the future,
 
                            //  then we can't continue here, but must resolve the
 
                            //  polyargs as well
 
                            debug_assert!(symbolic.identifier.get_poly_args().is_none(), "got polymorphic arguments to a polymorphic variable");
 
                            debug_assert!(symbolic.variant.is_none(), "symbolic parser type's variant already resolved");
 
                            symbolic.variant = Some(SymbolicParserTypeVariant::PolyArg(type_definition_id, poly_arg_idx));
 
                            continue 'type_loop;
 
                        }
 
                    }
 

	
 
                    // Must match a definition
 
                    let (symbol, ident_iter) = ctx.symbols.resolve_namespaced_identifier(root_id, &symbolic.identifier);
 
                    debug_assert!(symbol.is_some(), "could not resolve symbolic parser type when determining poly args");
 
                    let symbol = symbol.unwrap();
 
                    debug_assert_eq!(ident_iter.num_remaining(), 0, "no exact symbol match when determining poly args");
 
                    let (_root_id, definition_id) = symbol.as_definition().unwrap();
 
    
 
                    // Must be a struct, enum, or union, we checked this
 
                    let defined_type = self.lookup.get(&definition_id).unwrap();
 
                    let (_, poly_args) = ident_iter.prev().unwrap();
 
                    let poly_args = poly_args.unwrap_or_default();
 

	
 
                    if cfg!(debug_assertions) {
 
                        // Everything here should already be checked in 
 
                        // `resolve_base_parser_type`.
 
                        let type_class = defined_type.definition.type_class();
 
                        debug_assert!(
 
                            type_class == TypeClass::Struct || type_class == TypeClass::Enum || type_class == TypeClass::Union,
 
                            "embedded type's class is not struct, enum or union"
 
                        );
 
                        debug_assert_eq!(poly_args.len(), symbolic.identifier.poly_args.len());
 
                    }
 
    
 
                    if poly_args.len() != defined_type.poly_vars.len() {
 
                        // Mismatch in number of polymorphic arguments. This is 
 
                        // not allowed in type definitions (no inference is 
 
                        // allowed within type definitions, only in bodies of
 
                        // functions/components).
 
                        let module_source = &ctx.modules[root_id.index as usize].source;
 
                        let number_args_msg = if defined_type.poly_vars.is_empty() {
 
                            String::from("is not polymorphic")
 
                        } else {
 
                            format!("accepts {} polymorphic arguments", defined_type.poly_vars.len())
 
                        };
 
    
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            module_source, symbolic.identifier.position,
 
                            &format!(
 
                                "The type '{}' {}, but {} polymorphic arguments were provided",
 
                                String::from_utf8_lossy(&symbolic.identifier.strip_poly_args()),
 
                                number_args_msg, poly_args.len()
 
                            )
 
                        ));
 
                    }
 
    
 
                    self.parser_type_iter.extend(poly_args);
 
                    debug_assert!(symbolic.variant.is_none(), "symbolic parser type's variant already resolved");
 
                    symbolic.variant = Some(SymbolicParserTypeVariant::Definition(definition_id));
 
                }
 
            }
 
        }
 

	
 
        // All nodes in the embedded type tree were valid
 
        Ok(())
 
    }
 

	
 
    /// Go through a list of identifiers and ensure that all identifiers have
 
    /// unique names
 
    fn check_identifier_collision<T: Sized, F: Fn(&T) -> &Identifier>(
 
        &self, ctx: &TypeCtx, root_id: RootId, items: &[T], getter: F, item_name: &'static str
 
    ) -> Result<(), ParseError2> {
 
    ) -> Result<(), ParseError> {
 
        for (item_idx, item) in items.iter().enumerate() {
 
            let item_ident = getter(item);
 
            for other_item in &items[0..item_idx] {
 
                let other_item_ident = getter(other_item);
 
                if item_ident == other_item_ident {
 
                    let module_source = &ctx.modules[root_id.index as usize].source;
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        module_source, item_ident.position, &format!("This {} is defined more than once", item_name)
 
                    ).with_postfixed_info(
 
                        module_source, other_item_ident.position, &format!("The other {} is defined here", item_name)
 
                    ));
 
                }
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    /// Go through a list of polymorphic arguments and make sure that the
 
    /// arguments all have unique names, and the arguments do not conflict with
 
    /// any symbols defined at the module scope.
 
    fn check_poly_args_collision(
 
        &self, ctx: &TypeCtx, root_id: RootId, poly_args: &[Identifier]
 
    ) -> Result<(), ParseError2> {
 
    ) -> Result<(), ParseError> {
 
        // Make sure polymorphic arguments are unique and none of the
 
        // identifiers conflict with any imported scopes
 
        for (arg_idx, poly_arg) in poly_args.iter().enumerate() {
 
            for other_poly_arg in &poly_args[..arg_idx] {
 
                if poly_arg == other_poly_arg {
 
                    let module_source = &ctx.modules[root_id.index as usize].source;
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        module_source, poly_arg.position,
 
                        "This polymorphic argument is defined more than once"
 
                    ).with_postfixed_info(
 
                        module_source, other_poly_arg.position,
 
                        "It conflicts with this polymorphic argument"
 
                    ));
 
                }
 
            }
 

	
 
            // Check if identifier conflicts with a symbol defined or imported
 
            // in the current module
 
            if let Some(symbol) = ctx.symbols.resolve_symbol(root_id, &poly_arg.value) {
 
                // We have a conflict
 
                let module_source = &ctx.modules[root_id.index as usize].source;
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    module_source, poly_arg.position,
 
                    "This polymorphic argument conflicts with another symbol"
 
                ).with_postfixed_info(
 
                    module_source, symbol.position,
 
                    "It conflicts due to this symbol"
 
                ));
 
            }
 
        }
 

	
 
        // All arguments are fine
 
        Ok(())
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Small utilities
 
    //--------------------------------------------------------------------------
 

	
 
    fn enum_tag_type(min_tag_value: i64, max_tag_value: i64) -> PrimitiveType {
 
        // TODO: @consistency tag values should be handled correctly
 
        debug_assert!(min_tag_value < max_tag_value);
 
        let abs_max_value = min_tag_value.abs().max(max_tag_value.abs());
 
        if abs_max_value <= u8::max_value() as i64 {
 
            PrimitiveType::Byte
 
        } else if abs_max_value <= u16::max_value() as i64 {
 
            PrimitiveType::Short
 
        } else if abs_max_value <= u32::max_value() as i64 {
 
            PrimitiveType::Int
 
        } else {
 
            PrimitiveType::Long
 
        }
 
    }
 

	
 
    fn find_root_id(ctx: &TypeCtx, definition_id: DefinitionId) -> RootId {
 
        // TODO: Keep in lookup or something
 
        for module in ctx.modules {
 
            let root_id = module.root_id;
 
            let root = &ctx.heap[root_id];
 
            for module_definition_id in root.definitions.iter() {
 
                if *module_definition_id == definition_id {
 
                    return root_id
 
                }
 
            }
 
        }
 

	
 
        debug_assert!(false, "DefinitionId without corresponding RootId");
 
        unreachable!();
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/utils.rs
Show inline comments
 
    use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use super::symbol_table::*;
 
use super::type_table::*;
 

	
 
/// Utility result type.
 
pub(crate) enum FindTypeResult<'t, 'i> {
 
    // Found the type exactly
 
    Found((&'t DefinedType, NamespacedIdentifierIter<'i>)),
 
    // Could not match symbol
 
    SymbolNotFound{ident_pos: InputPosition},
 
    // Matched part of the namespaced identifier, but not completely
 
    SymbolPartial{ident_pos: InputPosition, ident_iter: NamespacedIdentifierIter<'i>},
 
    // Symbol matched, but points to a namespace/module instead of a type
 
    SymbolNamespace{ident_pos: InputPosition, symbol_pos: InputPosition},
 
}
 

	
 
// TODO: @cleanup Find other uses of this pattern
 
impl<'t, 'i> FindTypeResult<'t, 'i> {
 
    /// Utility function to transform the `FindTypeResult` into a `Result` where
 
    /// `Ok` contains the resolved type, and `Err` contains a `ParseError` which
 
    /// can be readily returned. This is the most common use.
 
    pub(crate) fn as_parse_error(self, module_source: &InputSource) -> Result<(&'t DefinedType, NamespacedIdentifierIter<'i>), ParseError2> {
 
    pub(crate) fn as_parse_error(self, module_source: &InputSource) -> Result<(&'t DefinedType, NamespacedIdentifierIter<'i>), ParseError> {
 
        match self {
 
            FindTypeResult::Found(defined_type) => Ok(defined_type),
 
            FindTypeResult::SymbolNotFound{ident_pos} => {
 
                Err(ParseError2::new_error(
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos,
 
                    "Could not resolve this identifier to a symbol"
 
                ))
 
            },
 
            FindTypeResult::SymbolPartial{ident_pos, ident_iter} => {
 
                Err(ParseError2::new_error(
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos, 
 
                    &format!(
 
                        "Could not fully resolve this identifier to a symbol, was only able to match '{}'",
 
                        &String::from_utf8_lossy(ident_iter.returned_section())
 
                    )
 
                ))
 
            },
 
            FindTypeResult::SymbolNamespace{ident_pos, symbol_pos} => {
 
                Err(ParseError2::new_error(
 
                Err(ParseError::new_error(
 
                    module_source, ident_pos,
 
                    "This identifier was resolved to a namespace instead of a type"
 
                ).with_postfixed_info(
 
                    module_source, symbol_pos,
 
                    "This is the referenced namespace"
 
                ))
 
            }
 
        }
 
    }
 
}
 

	
 
/// Attempt to find the type pointer to by a (root, identifier) combination. The
 
/// type must match exactly (no parts in the namespace iterator remaining) and
 
/// must be a type, not a namespace. 
 
pub(crate) fn find_type_definition<'t, 'i>(
 
    symbols: &SymbolTable, types: &'t TypeTable, 
 
    root_id: RootId, identifier: &'i NamespacedIdentifier
 
) -> FindTypeResult<'t, 'i> {
 
    // Lookup symbol
 
    let (symbol, ident_iter) = symbols.resolve_namespaced_identifier(root_id, identifier);
 
    if symbol.is_none() { 
 
        return FindTypeResult::SymbolNotFound{ident_pos: identifier.position};
 
    }
 
    
 
    // Make sure we resolved it exactly
 
    let symbol = symbol.unwrap();
 
    if ident_iter.num_remaining() != 0 { 
 
        return FindTypeResult::SymbolPartial{
 
            ident_pos: identifier.position,
 
            ident_iter
 
        };
 
    }
 

	
 
    match symbol.symbol {
 
        Symbol::Namespace(_) => {
 
            FindTypeResult::SymbolNamespace{
 
                ident_pos: identifier.position, 
 
                symbol_pos: symbol.position
 
            }
 
        },
 
        Symbol::Definition((_, definition_id)) => {
 
            // If this function is called correctly, then we should always be
 
            // able to match the definition's ID to an entry in the type table.
 
            let definition = types.get_base_definition(&definition_id);
 
            debug_assert!(definition.is_some());
 
            FindTypeResult::Found((definition.unwrap(), ident_iter))
 
        }
 
    }
 
}
 

	
 
pub(crate) enum MatchPolymorphResult<'t> {
 
    Matching,
 
    InferAll(usize),
 
    Mismatch{defined_type: &'t DefinedType, ident_position: InputPosition, num_specified: usize},
 
    NoneExpected{defined_type: &'t DefinedType, ident_position: InputPosition},
 
}
 

	
 
impl<'t> MatchPolymorphResult<'t> {
 
    pub(crate) fn as_parse_error(self, heap: &Heap, module_source: &InputSource) -> Result<usize, ParseError2> {
 
    pub(crate) fn as_parse_error(self, heap: &Heap, module_source: &InputSource) -> Result<usize, ParseError> {
 
        match self {
 
            MatchPolymorphResult::Matching => Ok(0),
 
            MatchPolymorphResult::InferAll(count) => {
 
                debug_assert!(count > 0);
 
                Ok(count)
 
            },
 
            MatchPolymorphResult::Mismatch{defined_type, ident_position, num_specified} => {
 
                let type_identifier = heap[defined_type.ast_definition].identifier();
 
                let args_name = if defined_type.poly_vars.len() == 1 {
 
                    "argument"
 
                } else {
 
                    "arguments"
 
                };
 

	
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    module_source, ident_position,
 
                    &format!(
 
                        "expected {} polymorphic {} (or none, to infer them) for the type {}, but {} were specified",
 
                        defined_type.poly_vars.len(), args_name, 
 
                        &String::from_utf8_lossy(&type_identifier.value),
 
                        num_specified
 
                    )
 
                ))
 
            },
 
            MatchPolymorphResult::NoneExpected{defined_type, ident_position, ..} => {
 
                let type_identifier = heap[defined_type.ast_definition].identifier();
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    module_source, ident_position,
 
                    &format!(
 
                        "the type {} is not polymorphic",
 
                        &String::from_utf8_lossy(&type_identifier.value)
 
                    )
 
                ))
 
            }
 
        }
 
    }
 
}
 

	
 
/// Attempt to match the polymorphic arguments to the number of polymorphic
 
/// variables in the definition.
 
pub(crate) fn match_polymorphic_args_to_vars<'t>(
 
    defined_type: &'t DefinedType, poly_args: Option<&[ParserTypeId]>, ident_position: InputPosition
 
) -> MatchPolymorphResult<'t> {
 
    if defined_type.poly_vars.is_empty() {
 
        // No polymorphic variables on type
 
        if poly_args.is_some() {
 
            return MatchPolymorphResult::NoneExpected{
 
                defined_type,
 
                ident_position, 
 
            };
 
        }
 
    } else {
 
        // Polymorphic variables on type
 
        let has_specified = poly_args.map_or(false, |a| a.len() != 0);
 
        if !has_specified {
 
            // Implicitly infer all of the polymorphic arguments
 
            return MatchPolymorphResult::InferAll(defined_type.poly_vars.len());
 
        }
 

	
 
        let num_specified = poly_args.unwrap().len();
 
        if num_specified != defined_type.poly_vars.len() {
 
            return MatchPolymorphResult::Mismatch{
 
                defined_type,
 
                ident_position,
 
                num_specified,
 
            };
 
        }
 
    }
 

	
 
    MatchPolymorphResult::Matching
 
}
 
\ No newline at end of file
src/protocol/parser/visitor.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::parser::{symbol_table::*, type_table::*, LexedModule};
 

	
 
type Unit = ();
 
pub(crate) type VisitorResult = Result<Unit, ParseError2>;
 
pub(crate) type VisitorResult = Result<Unit, ParseError>;
 

	
 
/// Globally configured vector capacity for statement buffers in visitor 
 
/// implementations
 
pub(crate) const STMT_BUFFER_INIT_CAPACITY: usize = 256;
 
/// Globally configured vector capacity for expression buffers in visitor
 
/// implementations
 
pub(crate) const EXPR_BUFFER_INIT_CAPACITY: usize = 256;
 
/// Globally configured vector capacity for parser type buffers in visitor
 
/// implementations
 
pub(crate) const TYPE_BUFFER_INIT_CAPACITY: usize = 128;
 

	
 
/// General context structure that is used while traversing the AST.
 
pub(crate) struct Ctx<'p> {
 
    pub heap: &'p mut Heap,
 
    pub module: &'p LexedModule,
 
    pub symbols: &'p mut SymbolTable,
 
    pub types: &'p mut TypeTable,
 
}
 

	
 
/// Visitor is a generic trait that will fully walk the AST. The default
 
/// implementation of the visitors is to not recurse. The exception is the
 
/// top-level `visit_definition`, `visit_stmt` and `visit_expr` methods, which
 
/// call the appropriate visitor function.
 
pub(crate) trait Visitor2 {
 
    // Entry point
 
    fn visit_module(&mut self, ctx: &mut Ctx) -> VisitorResult {
 
        let mut def_index = 0;
 
        loop {
 
            let definition_id = {
 
                let root = &ctx.heap[ctx.module.root_id];
 
                if def_index >= root.definitions.len() {
 
                    return Ok(())
 
                }
 

	
 
                root.definitions[def_index]
 
            };
 

	
 
            self.visit_definition(ctx, definition_id)?;
 
            def_index += 1;
 
        }
 
    }
 

	
 
    // Definitions
 
    // --- enum matching
 
    fn visit_definition(&mut self, ctx: &mut Ctx, id: DefinitionId) -> VisitorResult {
 
        match &ctx.heap[id] {
 
            Definition::Enum(def) => {
 
                let def = def.this;
 
                self.visit_enum_definition(ctx, def)
 
            },
 
            Definition::Struct(def) => {
 
                let def = def.this;
 
                self.visit_struct_definition(ctx, def)
 
            },
 
            Definition::Component(def) => {
 
                let def = def.this;
 
                self.visit_component_definition(ctx, def)
 
            },
 
            Definition::Function(def) => {
 
                let def = def.this;
 
                self.visit_function_definition(ctx, def)
 
            }
 
        }
 
    }
 

	
 
    // --- enum variant handling
 
    fn visit_enum_definition(&mut self, _ctx: &mut Ctx, _id: EnumId) -> VisitorResult { Ok(()) }
 
    fn visit_struct_definition(&mut self, _ctx: &mut Ctx, _id: StructId) -> VisitorResult { Ok(()) }
 
    fn visit_component_definition(&mut self, _ctx: &mut Ctx, _id: ComponentId) -> VisitorResult { Ok(()) }
 
    fn visit_function_definition(&mut self, _ctx: &mut Ctx, _id: FunctionId) -> VisitorResult { Ok(()) }
 

	
 
    // Statements
 
    // --- enum matching
 
    fn visit_stmt(&mut self, ctx: &mut Ctx, id: StatementId) -> VisitorResult {
 
        match &ctx.heap[id] {
 
            Statement::Block(stmt) => {
 
                let this = stmt.this;
 
                self.visit_block_stmt(ctx, this)
 
            },
 
            Statement::Local(stmt) => {
 
                let this = stmt.this();
 
                self.visit_local_stmt(ctx, this)
 
            },
 
            Statement::Skip(stmt) => {
 
                let this = stmt.this;
 
                self.visit_skip_stmt(ctx, this)
 
            },
 
            Statement::Labeled(stmt) => {
 
                let this = stmt.this;
 
                self.visit_labeled_stmt(ctx, this)
 
            },
 
            Statement::If(stmt) => {
 
                let this = stmt.this;
 
                self.visit_if_stmt(ctx, this)
 
            },
 
            Statement::EndIf(_stmt) => Ok(()),
 
            Statement::While(stmt) => {
 
                let this = stmt.this;
 
                self.visit_while_stmt(ctx, this)
 
            },
 
            Statement::EndWhile(_stmt) => Ok(()),
 
            Statement::Break(stmt) => {
 
                let this = stmt.this;
 
                self.visit_break_stmt(ctx, this)
 
            },
 
            Statement::Continue(stmt) => {
 
                let this = stmt.this;
 
                self.visit_continue_stmt(ctx, this)
 
            },
 
            Statement::Synchronous(stmt) => {
 
                let this = stmt.this;
 
                self.visit_synchronous_stmt(ctx, this)
 
            },
 
            Statement::EndSynchronous(_stmt) => Ok(()),
 
            Statement::Return(stmt) => {
 
                let this = stmt.this;
 
                self.visit_return_stmt(ctx, this)
 
            },
 
            Statement::Assert(stmt) => {
 
                let this = stmt.this;
 
                self.visit_assert_stmt(ctx, this)
 
            },
 
            Statement::Goto(stmt) => {
 
                let this = stmt.this;
 
                self.visit_goto_stmt(ctx, this)
 
            },
 
            Statement::New(stmt) => {
 
                let this = stmt.this;
 
                self.visit_new_stmt(ctx, this)
 
            },
 
            Statement::Expression(stmt) => {
 
                let this = stmt.this;
 
                self.visit_expr_stmt(ctx, this)
 
            }
 
        }
 
    }
 

	
 
    fn visit_local_stmt(&mut self, ctx: &mut Ctx, id: LocalStatementId) -> VisitorResult {
 
        match &ctx.heap[id] {
 
            LocalStatement::Channel(stmt) => {
 
                let this = stmt.this;
 
                self.visit_local_channel_stmt(ctx, this)
 
            },
 
            LocalStatement::Memory(stmt) => {
 
                let this = stmt.this;
 
                self.visit_local_memory_stmt(ctx, this)
 
            },
 
        }
 
    }
 

	
 
    // --- enum variant handling
 
    fn visit_block_stmt(&mut self, _ctx: &mut Ctx, _id: BlockStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_local_memory_stmt(&mut self, _ctx: &mut Ctx, _id: MemoryStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_local_channel_stmt(&mut self, _ctx: &mut Ctx, _id: ChannelStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_skip_stmt(&mut self, _ctx: &mut Ctx, _id: SkipStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_labeled_stmt(&mut self, _ctx: &mut Ctx, _id: LabeledStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_if_stmt(&mut self, _ctx: &mut Ctx, _id: IfStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_while_stmt(&mut self, _ctx: &mut Ctx, _id: WhileStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_break_stmt(&mut self, _ctx: &mut Ctx, _id: BreakStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_continue_stmt(&mut self, _ctx: &mut Ctx, _id: ContinueStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_synchronous_stmt(&mut self, _ctx: &mut Ctx, _id: SynchronousStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_return_stmt(&mut self, _ctx: &mut Ctx, _id: ReturnStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_assert_stmt(&mut self, _ctx: &mut Ctx, _id: AssertStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_goto_stmt(&mut self, _ctx: &mut Ctx, _id: GotoStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_new_stmt(&mut self, _ctx: &mut Ctx, _id: NewStatementId) -> VisitorResult { Ok(()) }
 
    fn visit_expr_stmt(&mut self, _ctx: &mut Ctx, _id: ExpressionStatementId) -> VisitorResult { Ok(()) }
 

	
 
    // Expressions
 
    // --- enum matching
 
    fn visit_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> VisitorResult {
 
        match &ctx.heap[id] {
 
            Expression::Assignment(expr) => {
 
                let this = expr.this;
 
                self.visit_assignment_expr(ctx, this)
 
            },
 
            Expression::Conditional(expr) => {
 
                let this = expr.this;
 
                self.visit_conditional_expr(ctx, this)
 
            }
 
            Expression::Binary(expr) => {
 
                let this = expr.this;
 
                self.visit_binary_expr(ctx, this)
 
            }
 
            Expression::Unary(expr) => {
 
                let this = expr.this;
 
                self.visit_unary_expr(ctx, this)
 
            }
 
            Expression::Indexing(expr) => {
 
                let this = expr.this;
 
                self.visit_indexing_expr(ctx, this)
 
            }
 
            Expression::Slicing(expr) => {
 
                let this = expr.this;
 
                self.visit_slicing_expr(ctx, this)
 
            }
 
            Expression::Select(expr) => {
 
                let this = expr.this;
 
                self.visit_select_expr(ctx, this)
 
            }
 
            Expression::Array(expr) => {
 
                let this = expr.this;
 
                self.visit_array_expr(ctx, this)
 
            }
 
            Expression::Literal(expr) => {
 
                let this = expr.this;
 
                self.visit_literal_expr(ctx, this)
 
            }
 
            Expression::Call(expr) => {
 
                let this = expr.this;
 
                self.visit_call_expr(ctx, this)
 
            }
 
            Expression::Variable(expr) => {
 
                let this = expr.this;
 
                self.visit_variable_expr(ctx, this)
 
            }
 
        }
 
    }
 

	
 
    fn visit_assignment_expr(&mut self, _ctx: &mut Ctx, _id: AssignmentExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_conditional_expr(&mut self, _ctx: &mut Ctx, _id: ConditionalExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_binary_expr(&mut self, _ctx: &mut Ctx, _id: BinaryExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_unary_expr(&mut self, _ctx: &mut Ctx, _id: UnaryExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_indexing_expr(&mut self, _ctx: &mut Ctx, _id: IndexingExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_slicing_expr(&mut self, _ctx: &mut Ctx, _id: SlicingExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_select_expr(&mut self, _ctx: &mut Ctx, _id: SelectExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_array_expr(&mut self, _ctx: &mut Ctx, _id: ArrayExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_literal_expr(&mut self, _ctx: &mut Ctx, _id: LiteralExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_call_expr(&mut self, _ctx: &mut Ctx, _id: CallExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_variable_expr(&mut self, _ctx: &mut Ctx, _id: VariableExpressionId) -> VisitorResult { Ok(()) }
 

	
 
    // Types
 
    fn visit_parser_type(&mut self, _ctx: &mut Ctx, _id: ParserTypeId) -> VisitorResult { Ok(()) }
 
}
 
\ No newline at end of file
src/protocol/parser/visitor_linker.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::parser::{
 
    symbol_table::*, 
 
    type_table::*,
 
    utils::*,
 
};
 

	
 
use super::visitor::{
 
    STMT_BUFFER_INIT_CAPACITY,
 
    EXPR_BUFFER_INIT_CAPACITY,
 
    TYPE_BUFFER_INIT_CAPACITY,
 
    Ctx, 
 
    Visitor2, 
 
    VisitorResult
 
};
 

	
 
#[derive(PartialEq, Eq)]
 
enum DefinitionType {
 
    None,
 
    Primitive(ComponentId),
 
    Composite(ComponentId),
 
    Function(FunctionId)
 
}
 

	
 
impl DefinitionType {
 
    fn is_primitive(&self) -> bool { if let Self::Primitive(_) = self { true } else { false } }
 
    fn is_composite(&self) -> bool { if let Self::Composite(_) = self { true } else { false } }
 
    fn is_function(&self) -> bool { if let Self::Function(_) = self { true } else { false } }
 
}
 

	
 
/// This particular visitor will go through the entire AST in a recursive manner
 
/// and check if all statements and expressions are legal (e.g. no "return"
 
/// statements in component definitions), and will link certain AST nodes to
 
/// their appropriate targets (e.g. goto statements, or function calls).
 
///
 
/// This visitor will not perform control-flow analysis (e.g. making sure that
 
/// each function actually returns) and will also not perform type checking. So
 
/// the linking of function calls and component instantiations will be checked
 
/// and linked to the appropriate definitions, but the return types and/or
 
/// arguments will not be checked for validity.
 
///
 
/// The visitor visits each statement in a block in a breadth-first manner
 
/// first. We are thereby sure that we have found all variables/labels in a
 
/// particular block. In this phase nodes may queue statements for insertion
 
/// (e.g. the insertion of an `EndIf` statement for a particular `If`
 
/// statement). These will be inserted after visiting every node, after which
 
/// the visitor recurses into each statement in a block.
 
///
 
/// Because of this scheme expressions will not be visited in the breadth-first
 
/// pass.
 
pub(crate) struct ValidityAndLinkerVisitor {
 
    /// `in_sync` is `Some(id)` if the visitor is visiting the children of a
 
    /// synchronous statement. A single value is sufficient as nested
 
    /// synchronous statements are not allowed
 
    in_sync: Option<SynchronousStatementId>,
 
    /// `in_while` contains the last encountered `While` statement. This is used
 
    /// to resolve unlabeled `Continue`/`Break` statements.
 
    in_while: Option<WhileStatementId>,
 
    // Traversal state: current scope (which can be used to find the parent
 
    // scope), the definition variant we are considering, and whether the
 
    // visitor is performing breadthwise block statement traversal.
 
    cur_scope: Option<Scope>,
 
    def_type: DefinitionType,
 
    performing_breadth_pass: bool,
 
    // Parent expression (the previous stmt/expression we visited that could be
 
    // used as an expression parent)
 
    expr_parent: ExpressionParent,
 
    // Keeping track of relative position in block in the breadth-first pass.
 
    // May not correspond to block.statement[index] if any statements are
 
    // inserted after the breadth-pass
 
    relative_pos_in_block: u32,
 
    // Single buffer of statement IDs that we want to traverse in a block.
 
    // Required to work around Rust borrowing rules and to prevent constant
 
    // cloning of vectors.
 
    statement_buffer: Vec<StatementId>,
 
    // Another buffer, now with expression IDs, to prevent constant cloning of
 
    // vectors while working around rust's borrowing rules
 
    expression_buffer: Vec<ExpressionId>,
 
    // Yet another buffer, now with parser type IDs, similar to above
 
    parser_type_buffer: Vec<ParserTypeId>,
 
    // Statements to insert after the breadth pass in a single block
 
    insert_buffer: Vec<(u32, StatementId)>,
 
}
 

	
 
impl ValidityAndLinkerVisitor {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            in_sync: None,
 
            in_while: None,
 
            cur_scope: None,
 
            expr_parent: ExpressionParent::None,
 
            def_type: DefinitionType::None,
 
            performing_breadth_pass: false,
 
            relative_pos_in_block: 0,
 
            statement_buffer: Vec::with_capacity(STMT_BUFFER_INIT_CAPACITY),
 
            expression_buffer: Vec::with_capacity(EXPR_BUFFER_INIT_CAPACITY),
 
            parser_type_buffer: Vec::with_capacity(TYPE_BUFFER_INIT_CAPACITY),
 
            insert_buffer: Vec::with_capacity(32),
 
        }
 
    }
 

	
 
    fn reset_state(&mut self) {
 
        self.in_sync = None;
 
        self.in_while = None;
 
        self.cur_scope = None;
 
        self.expr_parent = ExpressionParent::None;
 
        self.def_type = DefinitionType::None;
 
        self.relative_pos_in_block = 0;
 
        self.performing_breadth_pass = false;
 
        self.statement_buffer.clear();
 
        self.expression_buffer.clear();
 
        self.parser_type_buffer.clear();
 
        self.insert_buffer.clear();
 
    }
 

	
 
    /// Debug call to ensure that we didn't make any mistakes in any of the
 
    /// employed buffers
 
    fn check_post_definition_state(&self) {
 
        debug_assert!(self.statement_buffer.is_empty());
 
        debug_assert!(self.expression_buffer.is_empty());
 
        debug_assert!(self.parser_type_buffer.is_empty());
 
        debug_assert!(self.insert_buffer.is_empty());
 
    }
 
}
 

	
 
impl Visitor2 for ValidityAndLinkerVisitor {
 
    //--------------------------------------------------------------------------
 
    // Definition visitors
 
    //--------------------------------------------------------------------------
 

	
 
    fn visit_component_definition(&mut self, ctx: &mut Ctx, id: ComponentId) -> VisitorResult {
 
        self.reset_state();
 

	
 
        self.def_type = match &ctx.heap[id].variant {
 
            ComponentVariant::Primitive => DefinitionType::Primitive(id),
 
            ComponentVariant::Composite => DefinitionType::Composite(id),
 
        };
 
        self.cur_scope = Some(Scope::Definition(id.upcast()));
 
        self.expr_parent = ExpressionParent::None;
 

	
 
        // Visit types of parameters
 
        debug_assert!(self.parser_type_buffer.is_empty());
 
        let comp_def = &ctx.heap[id];
 
        self.parser_type_buffer.extend(
 
            comp_def.parameters
 
                .iter()
 
                .map(|id| ctx.heap[*id].parser_type)
 
        );
 

	
 
        let num_types = self.parser_type_buffer.len();
 
        for idx in 0..num_types {
 
            self.visit_parser_type(ctx, self.parser_type_buffer[idx])?;
 
        }
 

	
 
        self.parser_type_buffer.clear();
 

	
 
        // Visit statements in component body
 
        let body_id = ctx.heap[id].body;
 
        self.performing_breadth_pass = true;
 
        self.visit_stmt(ctx, body_id)?;
 
        self.performing_breadth_pass = false;
 
        self.visit_stmt(ctx, body_id)?;
 

	
 
        self.check_post_definition_state();
 
        Ok(())
 
    }
 

	
 
    fn visit_function_definition(&mut self, ctx: &mut Ctx, id: FunctionId) -> VisitorResult {
 
        self.reset_state();
 

	
 
        // Set internal statement indices
 
        self.def_type = DefinitionType::Function(id);
 
        self.cur_scope = Some(Scope::Definition(id.upcast()));
 
        self.expr_parent = ExpressionParent::None;
 

	
 
        // Visit types of parameters
 
        debug_assert!(self.parser_type_buffer.is_empty());
 
        let func_def = &ctx.heap[id];
 
        self.parser_type_buffer.extend(
 
            func_def.parameters
 
                .iter()
 
                .map(|id| ctx.heap[*id].parser_type)
 
        );
 
        self.parser_type_buffer.push(func_def.return_type);
 

	
 
        let num_types = self.parser_type_buffer.len();
 
        for idx in 0..num_types {
 
            self.visit_parser_type(ctx, self.parser_type_buffer[idx])?;
 
        }
 

	
 
        self.parser_type_buffer.clear();
 

	
 
        // Visit statements in function body
 
        let body_id = ctx.heap[id].body;
 
        self.performing_breadth_pass = true;
 
        self.visit_stmt(ctx, body_id)?;
 
        self.performing_breadth_pass = false;
 
        self.visit_stmt(ctx, body_id)?;
 

	
 
        self.check_post_definition_state();
 
        Ok(())
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Statement visitors
 
    //--------------------------------------------------------------------------
 

	
 
    fn visit_block_stmt(&mut self, ctx: &mut Ctx, id: BlockStatementId) -> VisitorResult {
 
        self.visit_block_stmt_with_hint(ctx, id, None)
 
    }
 

	
 
    fn visit_local_memory_stmt(&mut self, ctx: &mut Ctx, id: MemoryStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let variable_id = ctx.heap[id].variable;
 
            self.checked_local_add(ctx, self.relative_pos_in_block, variable_id)?;
 
        } else {
 
            let variable_id = ctx.heap[id].variable;
 
            let parser_type_id = ctx.heap[variable_id].parser_type;
 
            self.visit_parser_type(ctx, parser_type_id)?;
 

	
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_channel_stmt(&mut self, ctx: &mut Ctx, id: ChannelStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let (from_id, to_id) = {
 
                let stmt = &ctx.heap[id];
 
                (stmt.from, stmt.to)
 
            };
 
            self.checked_local_add(ctx, self.relative_pos_in_block, from_id)?;
 
            self.checked_local_add(ctx, self.relative_pos_in_block, to_id)?;
 
        } else {
 
            let chan_stmt = &ctx.heap[id];
 
            let from_type_id = ctx.heap[chan_stmt.from].parser_type;
 
            let to_type_id = ctx.heap[chan_stmt.to].parser_type;
 
            self.visit_parser_type(ctx, from_type_id)?;
 
            self.visit_parser_type(ctx, to_type_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_labeled_stmt(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            // Add label to block lookup
 
            self.checked_label_add(ctx, id)?;
 

	
 
            // Modify labeled statement itself
 
            let labeled = &mut ctx.heap[id];
 
            labeled.relative_pos_in_block = self.relative_pos_in_block;
 
            labeled.in_sync = self.in_sync.clone();
 
        }
 

	
 
        let body_id = ctx.heap[id].body;
 
        self.visit_stmt(ctx, body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_if_stmt(&mut self, ctx: &mut Ctx, id: IfStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let position = ctx.heap[id].position;
 
            let end_if_id = ctx.heap.alloc_end_if_statement(|this| {
 
                EndIfStatement {
 
                    this,
 
                    start_if: id,
 
                    position,
 
                    next: None,
 
                }
 
            });
 
            let stmt = &mut ctx.heap[id];
 
            stmt.end_if = Some(end_if_id);
 
            self.insert_buffer.push((self.relative_pos_in_block + 1, end_if_id.upcast()));
 
        } else {
 
            // Traverse expression and bodies
 
            let (test_id, true_id, false_id) = {
 
                let stmt = &ctx.heap[id];
 
                (stmt.test, stmt.true_body, stmt.false_body)
 
            };
 

	
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::If(id);
 
            self.visit_expr(ctx, test_id)?;
 
            self.expr_parent = ExpressionParent::None;
 

	
 
            self.visit_stmt(ctx, true_id)?;
 
            self.visit_stmt(ctx, false_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_while_stmt(&mut self, ctx: &mut Ctx, id: WhileStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let position = ctx.heap[id].position;
 
            let end_while_id = ctx.heap.alloc_end_while_statement(|this| {
 
                EndWhileStatement {
 
                    this,
 
                    start_while: id,
 
                    position,
 
                    next: None,
 
                }
 
            });
 
            let stmt = &mut ctx.heap[id];
 
            stmt.end_while = Some(end_while_id);
 
            stmt.in_sync = self.in_sync.clone();
 

	
 
            self.insert_buffer.push((self.relative_pos_in_block + 1, end_while_id.upcast()));
 
        } else {
 
            let (test_id, body_id) = {
 
                let stmt = &ctx.heap[id];
 
                (stmt.test, stmt.body)
 
            };
 
            let old_while = self.in_while.replace(id);
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::While(id);
 
            self.visit_expr(ctx, test_id)?;
 
            self.expr_parent = ExpressionParent::None;
 

	
 
            self.visit_stmt(ctx, body_id)?;
 
            self.in_while = old_while;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_break_stmt(&mut self, ctx: &mut Ctx, id: BreakStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            // Should be able to resolve break statements with a label in the
 
            // breadth pass, no need to do after resolving all labels
 
            let target_end_while = {
 
                let stmt = &ctx.heap[id];
 
                let target_while_id = self.resolve_break_or_continue_target(ctx, stmt.position, &stmt.label)?;
 
                let target_while = &ctx.heap[target_while_id];
 
                debug_assert!(target_while.end_while.is_some());
 
                target_while.end_while.unwrap()
 
            };
 

	
 
            let stmt = &mut ctx.heap[id];
 
            stmt.target = Some(target_end_while);
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_continue_stmt(&mut self, ctx: &mut Ctx, id: ContinueStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let target_while_id = {
 
                let stmt = &ctx.heap[id];
 
                self.resolve_break_or_continue_target(ctx, stmt.position, &stmt.label)?
 
            };
 

	
 
            let stmt = &mut ctx.heap[id];
 
            stmt.target = Some(target_while_id)
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_synchronous_stmt(&mut self, ctx: &mut Ctx, id: SynchronousStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            // Check for validity of synchronous statement
 
            let cur_sync_position = ctx.heap[id].position;
 
            if self.in_sync.is_some() {
 
                // Nested synchronous statement
 
                let old_sync = &ctx.heap[self.in_sync.unwrap()];
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, cur_sync_position, "Illegal nested synchronous statement")
 
                    ParseError::new_error(&ctx.module.source, cur_sync_position, "Illegal nested synchronous statement")
 
                        .with_postfixed_info(&ctx.module.source, old_sync.position, "It is nested in this synchronous statement")
 
                );
 
            }
 

	
 
            if !self.def_type.is_primitive() {
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    &ctx.module.source, cur_sync_position,
 
                    "Synchronous statements may only be used in primitive components"
 
                ));
 
            }
 

	
 
            // Append SynchronousEnd pseudo-statement
 
            let sync_end_id = ctx.heap.alloc_end_synchronous_statement(|this| EndSynchronousStatement{
 
                this,
 
                position: cur_sync_position,
 
                start_sync: id,
 
                next: None,
 
            });
 
            let sync_start = &mut ctx.heap[id];
 
            sync_start.end_sync = Some(sync_end_id);
 
            self.insert_buffer.push((self.relative_pos_in_block + 1, sync_end_id.upcast()));
 
        } else {
 
            let sync_body = ctx.heap[id].body;
 
            let old = self.in_sync.replace(id);
 
            self.visit_stmt_with_hint(ctx, sync_body, Some(id))?;
 
            self.in_sync = old;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_return_stmt(&mut self, ctx: &mut Ctx, id: ReturnStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let stmt = &ctx.heap[id];
 
            if !self.def_type.is_function() {
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, stmt.position, "Return statements may only appear in function bodies")
 
                    ParseError::new_error(&ctx.module.source, stmt.position, "Return statements may only appear in function bodies")
 
                );
 
            }
 
        } else {
 
            // If here then we are within a function
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::Return(id);
 
            self.visit_expr(ctx, ctx.heap[id].expression)?;
 
            self.expr_parent = ExpressionParent::None;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_assert_stmt(&mut self, ctx: &mut Ctx, id: AssertStatementId) -> VisitorResult {
 
        let stmt = &ctx.heap[id];
 
        if self.performing_breadth_pass {
 
            if self.def_type.is_function() {
 
                // TODO: We probably want to allow this. Mark the function as
 
                //  using asserts, and then only allow calls to these functions
 
                //  within components. Such a marker will cascade through any
 
                //  functions that then call an asserting function
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, stmt.position, "Illegal assert statement in a function")
 
                    ParseError::new_error(&ctx.module.source, stmt.position, "Illegal assert statement in a function")
 
                );
 
            }
 

	
 
            // We are in a component of some sort, but we also need to be within a
 
            // synchronous statement
 
            if self.in_sync.is_none() {
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, stmt.position, "Illegal assert statement outside of a synchronous block")
 
                    ParseError::new_error(&ctx.module.source, stmt.position, "Illegal assert statement outside of a synchronous block")
 
                );
 
            }
 
        } else {
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            let expr_id = stmt.expression;
 

	
 
            self.expr_parent = ExpressionParent::Assert(id);
 
            self.visit_expr(ctx, expr_id)?;
 
            self.expr_parent = ExpressionParent::None;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_goto_stmt(&mut self, ctx: &mut Ctx, id: GotoStatementId) -> VisitorResult {
 
        if !self.performing_breadth_pass {
 
            // Must perform goto label resolving after the breadth pass, this
 
            // way we are able to find all the labels in current and outer
 
            // scopes.
 
            let target_id = self.find_label(ctx, &ctx.heap[id].label)?;
 
            ctx.heap[id].target = Some(target_id);
 

	
 
            let target = &ctx.heap[target_id];
 
            if self.in_sync != target.in_sync {
 
                // We can only goto the current scope or outer scopes. Because
 
                // nested sync statements are not allowed so if the value does
 
                // not match, then we must be inside a sync scope
 
                debug_assert!(self.in_sync.is_some());
 
                let goto_stmt = &ctx.heap[id];
 
                let sync_stmt = &ctx.heap[self.in_sync.unwrap()];
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, goto_stmt.position, "Goto may not escape the surrounding synchronous block")
 
                    ParseError::new_error(&ctx.module.source, goto_stmt.position, "Goto may not escape the surrounding synchronous block")
 
                        .with_postfixed_info(&ctx.module.source, target.position, "This is the target of the goto statement")
 
                        .with_postfixed_info(&ctx.module.source, sync_stmt.position, "Which will jump past this statement")
 
                );
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_new_stmt(&mut self, ctx: &mut Ctx, id: NewStatementId) -> VisitorResult {
 
        // Link the call expression following the new statement
 
        if self.performing_breadth_pass {
 
            // TODO: Cleanup error messages, can be done cleaner
 
            // Make sure new statement occurs within a composite component
 
            let call_expr_id = ctx.heap[id].expression;
 
            if !self.def_type.is_composite() {
 
                let new_stmt = &ctx.heap[id];
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, new_stmt.position, "Instantiating components may only be done in composite components")
 
                    ParseError::new_error(&ctx.module.source, new_stmt.position, "Instantiating components may only be done in composite components")
 
                );
 
            }
 

	
 
            // We make sure that we point to a symbolic method. Checking that it
 
            // points to a component is done in the depth pass.
 
            let call_expr = &ctx.heap[call_expr_id];
 
            if let Method::Symbolic(_) = &call_expr.method {
 
                // We're fine
 
            } else {
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, call_expr.position, "Must instantiate a component")
 
                    ParseError::new_error(&ctx.module.source, call_expr.position, "Must instantiate a component")
 
                );
 
            }
 
        } else {
 
            // Just call `visit_call_expr`. We do some extra work we don't have
 
            // to, but this prevents silly mistakes.
 
            let call_expr_id = ctx.heap[id].expression;
 

	
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::New(id);
 
            self.visit_call_expr(ctx, call_expr_id)?;
 
            self.expr_parent = ExpressionParent::None;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_expr_stmt(&mut self, ctx: &mut Ctx, id: ExpressionStatementId) -> VisitorResult {
 
        if !self.performing_breadth_pass {
 
            let expr_id = ctx.heap[id].expression;
 

	
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::ExpressionStmt(id);
 
            self.visit_expr(ctx, expr_id)?;
 
            self.expr_parent = ExpressionParent::None;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 

	
 
    //--------------------------------------------------------------------------
 
    // Expression visitors
 
    //--------------------------------------------------------------------------
 

	
 
    fn visit_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let upcast_id = id.upcast();
 
        let assignment_expr = &mut ctx.heap[id];
 

	
 
        let left_expr_id = assignment_expr.left;
 
        let right_expr_id = assignment_expr.right;
 
        let old_expr_parent = self.expr_parent;
 
        assignment_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, right_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 
        Ok(())
 
    }
 

	
 
    fn visit_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 
        let upcast_id = id.upcast();
 
        let conditional_expr = &mut ctx.heap[id];
 

	
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        conditional_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, true_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 2);
 
        self.visit_expr(ctx, false_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 
        let upcast_id = id.upcast();
 
        let binary_expr = &mut ctx.heap[id];
 
        let left_expr_id = binary_expr.left;
 
        let right_expr_id = binary_expr.right;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        binary_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, right_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let unary_expr = &mut ctx.heap[id];
 
        let expr_id = unary_expr.expression;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        unary_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(id.upcast(), 0);
 
        self.visit_expr(ctx, expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 
        let upcast_id = id.upcast();
 
        let indexing_expr = &mut ctx.heap[id];
 

	
 
        let subject_expr_id = indexing_expr.subject;
 
        let index_expr_id = indexing_expr.index;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        indexing_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, index_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 
        let upcast_id = id.upcast();
 
        let slicing_expr = &mut ctx.heap[id];
 

	
 
        let subject_expr_id = slicing_expr.subject;
 
        let from_expr_id = slicing_expr.from_index;
 
        let to_expr_id = slicing_expr.to_index;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        slicing_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 0);
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 1);
 
        self.visit_expr(ctx, from_expr_id)?;
 
        self.expr_parent = ExpressionParent::Expression(upcast_id, 2);
 
        self.visit_expr(ctx, to_expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let select_expr = &mut ctx.heap[id];
 
        let expr_id = select_expr.subject;
 

	
 
        let old_expr_parent = self.expr_parent;
 
        select_expr.parent = old_expr_parent;
 

	
 
        self.expr_parent = ExpressionParent::Expression(id.upcast(), 0);
 
        self.visit_expr(ctx, expr_id)?;
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let upcast_id = id.upcast();
 
        let array_expr = &mut ctx.heap[id];
 

	
 
        let old_num_exprs = self.expression_buffer.len();
 
        self.expression_buffer.extend(&array_expr.elements);
 
        let new_num_exprs = self.expression_buffer.len();
 

	
 
        let old_expr_parent = self.expr_parent;
 
        array_expr.parent = old_expr_parent;
 

	
 
        for field_expr_idx in old_num_exprs..new_num_exprs {
 
            let field_expr_id = self.expression_buffer[field_expr_idx];
 
            self.expr_parent = ExpressionParent::Expression(upcast_id, field_expr_idx as u32);
 
            self.visit_expr(ctx, field_expr_id)?;
 
        }
 

	
 
        self.expression_buffer.truncate(old_num_exprs);
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        const FIELD_NOT_FOUND_SENTINEL: usize = usize::max_value();
 

	
 
        let constant_expr = &mut ctx.heap[id];
 
        let old_expr_parent = self.expr_parent;
 
        constant_expr.parent = old_expr_parent;
 

	
 
        match &mut constant_expr.value {
 
            Literal::Null | Literal::True | Literal::False |
 
            Literal::Character(_) | Literal::Integer(_) => {
 
                // Just the parent has to be set, done above
 
            },
 
            Literal::Struct(literal) => {
 
                let upcast_id = id.upcast();
 

	
 
                // Retrieve and set the literal's definition
 
                let definition = self.find_symbol_of_type(
 
                    &ctx.module.source, ctx.module.root_id, &ctx.symbols, &ctx.types,
 
                    &literal.identifier, TypeClass::Struct
 
                )?;
 
                literal.definition = Some(definition.ast_definition);
 

	
 
                let definition = definition.definition.as_struct();
 

	
 
                // Make sure all fields are specified, none are specified twice
 
                // and all fields exist on the struct definition
 
                let mut specified = Vec::new(); // TODO: @performance
 
                specified.resize(definition.fields.len(), false);
 

	
 
                for field in &mut literal.fields {
 
                    // Find field in the struct definition
 
                    field.field_idx = FIELD_NOT_FOUND_SENTINEL;
 
                    for (def_field_idx, def_field) in definition.fields.iter().enumerate() {
 
                        if field.identifier == def_field.identifier {
 
                            field.field_idx = def_field_idx;
 
                            break;
 
                        }
 
                    }
 

	
 
                    // Check if not found
 
                    if field.field_idx == FIELD_NOT_FOUND_SENTINEL {
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, field.identifier.position,
 
                            &format!(
 
                                "This field does not exist on the struct '{}'",
 
                                &String::from_utf8_lossy(&literal.identifier.value),
 
                            )
 
                        ));
 
                    }
 

	
 
                    // Check if specified more than once
 
                    if specified[field.field_idx] {
 
                        return Err(ParseError2::new_error(
 
                        return Err(ParseError::new_error(
 
                            &ctx.module.source, field.identifier.position,
 
                            "This field is specified more than once"
 
                        ));
 
                    }
 

	
 
                    specified[field.field_idx] = true;
 
                }
 

	
 
                if !specified.iter().all(|v| *v) {
 
                    // Some fields were not specified
 
                    let mut not_specified = String::new();
 
                    for (def_field_idx, is_specified) in specified.iter().enumerate() {
 
                        if !is_specified {
 
                            if !not_specified.is_empty() { not_specified.push_str(", ") }
 
                            let field_ident = &definition.fields[def_field_idx].identifier;
 
                            not_specified.push_str(&String::from_utf8_lossy(&field_ident.value));
 
                        }
 
                    }
 

	
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, literal.identifier.position,
 
                        &format!("Not all fields are specified, [{}] are missing", not_specified)
 
                    ));
 
                }
 

	
 
                // Need to traverse fields expressions in struct and evaluate
 
                // the poly args
 
                let old_num_exprs = self.expression_buffer.len();
 
                self.expression_buffer.extend(literal.fields.iter().map(|v| v.value));
 
                let new_num_exprs = self.expression_buffer.len();
 

	
 
                self.visit_literal_poly_args(ctx, id)?;
 

	
 
                for expr_idx in old_num_exprs..new_num_exprs {
 
                    let expr_id = self.expression_buffer[expr_idx];
 
                    self.expr_parent = ExpressionParent::Expression(upcast_id, expr_idx as u32);
 
                    self.visit_expr(ctx, expr_id)?;
 
                }
 

	
 
                self.expression_buffer.truncate(old_num_exprs);
 
            }
 
        }
 

	
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let call_expr = &mut ctx.heap[id];
 
        let num_expr_args = call_expr.arguments.len();
 

	
 
        // Resolve the method to the appropriate definition and check the
 
        // legality of the particular method call.
 
        // TODO: @cleanup Unify in some kind of signature call, see similar
 
        //  cleanup comments with this `match` format.
 
        let num_definition_args;
 
        match &mut call_expr.method {
 
            Method::Create => {
 
                num_definition_args = 1;
 
            },
 
            Method::Fires => {
 
                if !self.def_type.is_primitive() {
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'fires' may only occur in primitive component definitions"
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'fires' may only occur inside synchronous blocks"
 
                    ));
 
                }
 
                num_definition_args = 1;
 
            },
 
            Method::Get => {
 
                if !self.def_type.is_primitive() {
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'get' may only occur in primitive component definitions"
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'get' may only occur inside synchronous blocks"
 
                    ));
 
                }
 
                num_definition_args = 1;
 
            },
 
            Method::Put => {
 
                if !self.def_type.is_primitive() {
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'put' may only occur in primitive component definitions"
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError2::new_error(
 
                    return Err(ParseError::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'put' may only occur inside synchronous blocks"
 
                    ));
 
                }
 
                num_definition_args = 2;
 
            }
 
            Method::Symbolic(symbolic) => {
 
                // Find symbolic procedure
 
                let expected_type = if let ExpressionParent::New(_) = self.expr_parent {
 
                    // Expect to find a component
 
                    TypeClass::Component
 
                } else {
 
                    // Expect to find a function
 
                    TypeClass::Function
 
                };
 

	
 
                let definition = self.find_symbol_of_type(
 
                    &ctx.module.source, ctx.module.root_id, &ctx.symbols, &ctx.types,
 
                    &symbolic.identifier, expected_type
 
                )?;
 

	
 
                symbolic.definition = Some(definition.ast_definition);
 
                match &definition.definition {
 
                    DefinedTypeVariant::Function(definition) => {
 
                        num_definition_args = definition.arguments.len();
 
                    },
 
                    DefinedTypeVariant::Component(definition) => {
 
                        num_definition_args = definition.arguments.len();
 
                    }
 
                    _ => unreachable!(),
 
                }
 
            }
 
        }
 

	
 
        // Check the poly args and the number of variables in the call
 
        // expression
 
        self.visit_call_poly_args(ctx, id)?;
 
        let call_expr = &mut ctx.heap[id];
 
        if num_expr_args != num_definition_args {
 
            return Err(ParseError2::new_error(
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, call_expr.position,
 
                &format!(
 
                    "This call expects {} arguments, but {} were provided",
 
                    num_definition_args, num_expr_args
 
                )
 
            ));
 
        }
 

	
 
        // Recurse into all of the arguments and set the expression's parent
 
        let upcast_id = id.upcast();
 

	
 
        let old_num_exprs = self.expression_buffer.len();
 
        self.expression_buffer.extend(&call_expr.arguments);
 
        let new_num_exprs = self.expression_buffer.len();
 

	
 
        let old_expr_parent = self.expr_parent;
 
        call_expr.parent = old_expr_parent;
 

	
 
        for arg_expr_idx in old_num_exprs..new_num_exprs {
 
            let arg_expr_id = self.expression_buffer[arg_expr_idx];
 
            self.expr_parent = ExpressionParent::Expression(upcast_id, arg_expr_idx as u32);
 
            self.visit_expr(ctx, arg_expr_id)?;
 
        }
 

	
 
        self.expression_buffer.truncate(old_num_exprs);
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let var_expr = &ctx.heap[id];
 
        let variable_id = self.find_variable(ctx, self.relative_pos_in_block, &var_expr.identifier)?;
 
        let var_expr = &mut ctx.heap[id];
 
        var_expr.declaration = Some(variable_id);
 
        var_expr.parent = self.expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // ParserType visitors
 
    //--------------------------------------------------------------------------
 

	
 
    fn visit_parser_type(&mut self, ctx: &mut Ctx, id: ParserTypeId) -> VisitorResult {
 
        let old_num_types = self.parser_type_buffer.len();
 
        match self.visit_parser_type_without_buffer_cleanup(ctx, id) {
 
            Ok(_) => {
 
                debug_assert_eq!(self.parser_type_buffer.len(), old_num_types);
 
                Ok(())
 
            },
 
            Err(err) => {
 
                self.parser_type_buffer.truncate(old_num_types);
 
                Err(err)
 
            }
 
        }
 
    }
 
}
 

	
 
impl ValidityAndLinkerVisitor {
 
    //--------------------------------------------------------------------------
 
    // Special traversal
 
    //--------------------------------------------------------------------------
 

	
 
    /// Will visit a statement with a hint about its wrapping statement. This is
 
    /// used to distinguish block statements with a wrapping synchronous
 
    /// statement from normal block statements.
 
    fn visit_stmt_with_hint(&mut self, ctx: &mut Ctx, id: StatementId, hint: Option<SynchronousStatementId>) -> VisitorResult {
 
        if let Statement::Block(block_stmt) = &ctx.heap[id] {
 
            let block_id = block_stmt.this;
 
            self.visit_block_stmt_with_hint(ctx, block_id, hint)
 
        } else {
 
            self.visit_stmt(ctx, id)
 
        }
 
    }
 

	
 
    fn visit_block_stmt_with_hint(&mut self, ctx: &mut Ctx, id: BlockStatementId, hint: Option<SynchronousStatementId>) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            // Performing a breadth pass, so don't traverse into the statements
 
            // of the block.
 
            return Ok(())
 
        }
 

	
 
        // Set parent scope and relative position in the parent scope. Remember
 
        // these values to set them back to the old values when we're done with
 
        // the traversal of the block's statements.
 
        let body = &mut ctx.heap[id];
 
        body.parent_scope = self.cur_scope.clone();
 
        body.relative_pos_in_parent = self.relative_pos_in_block;
 

	
 
        let old_scope = self.cur_scope.replace(match hint {
 
            Some(sync_id) => Scope::Synchronous((sync_id, id)),
 
            None => Scope::Regular(id),
 
        });
 
        let old_relative_pos = self.relative_pos_in_block;
 

	
 
        // Copy statement IDs into buffer
 
        let old_num_stmts = self.statement_buffer.len();
 
        {
 
            let body = &ctx.heap[id];
 
            self.statement_buffer.extend_from_slice(&body.statements);
 
        }
 
        let new_num_stmts = self.statement_buffer.len();
 

	
 
        // Perform the breadth-first pass. Its main purpose is to find labeled
 
        // statements such that we can find the `goto`-targets immediately when
 
        // performing the depth pass
 
        self.performing_breadth_pass = true;
 
        for stmt_idx in old_num_stmts..new_num_stmts {
 
            self.relative_pos_in_block = (stmt_idx - old_num_stmts) as u32;
 
            self.visit_stmt(ctx, self.statement_buffer[stmt_idx])?;
 
        }
 

	
 
        if !self.insert_buffer.is_empty() {
 
            let body = &mut ctx.heap[id];
 
            for (insert_idx, (pos, stmt)) in self.insert_buffer.drain(..).enumerate() {
 
                body.statements.insert(pos as usize + insert_idx, stmt);
 
            }
 
        }
 

	
 
        // And the depth pass. Because we're not actually visiting any inserted
 
        // nodes because we're using the statement buffer, we may safely use the
 
        // relative_pos_in_block counter.
 
        self.performing_breadth_pass = false;
 
        for stmt_idx in old_num_stmts..new_num_stmts {
 
            self.relative_pos_in_block = (stmt_idx - old_num_stmts) as u32;
 
            self.visit_stmt(ctx, self.statement_buffer[stmt_idx])?;
 
        }
 

	
 
        self.cur_scope = old_scope;
 
        self.relative_pos_in_block = old_relative_pos;
 

	
 
        // Pop statement buffer
 
        debug_assert!(self.insert_buffer.is_empty(), "insert buffer not empty after depth pass");
 
        self.statement_buffer.truncate(old_num_stmts);
 

	
 
        Ok(())
 
    }
 

	
 
    /// Visits a particular ParserType in the AST and resolves temporary and
 
    /// implicitly inferred types into the appropriate tree. Note that a
 
    /// ParserType node is a tree. Only call this function on the root node of
 
    /// that tree to prevent doing work more than once.
 
    fn visit_parser_type_without_buffer_cleanup(&mut self, ctx: &mut Ctx, id: ParserTypeId) -> VisitorResult {
 
        use ParserTypeVariant as PTV;
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let init_num_types = self.parser_type_buffer.len();
 
        self.parser_type_buffer.push(id);
 

	
 
        'resolve_loop: while self.parser_type_buffer.len() > init_num_types {
 
            let parser_type_id = self.parser_type_buffer.pop().unwrap();
 
            let parser_type = &ctx.heap[parser_type_id];
 

	
 
            let (symbolic_pos, symbolic_variant, num_inferred_to_allocate) = match &parser_type.variant {
 
                PTV::Message | PTV::Bool |
 
                PTV::Byte | PTV::Short | PTV::Int | PTV::Long |
 
                PTV::String |
 
                PTV::IntegerLiteral | PTV::Inferred => {
 
                    // Builtin types or types that do not require recursion
 
                    continue 'resolve_loop;
 
                },
 
                PTV::Array(subtype_id) |
 
                PTV::Input(subtype_id) |
 
                PTV::Output(subtype_id) => {
 
                    // Requires recursing
 
                    self.parser_type_buffer.push(*subtype_id);
 
                    continue 'resolve_loop;
 
                },
 
                PTV::Symbolic(symbolic) => {
 
                    // Retrieve poly_vars from function/component definition to
 
                    // match against.
 
                    let (definition_id, poly_vars) = match self.def_type {
 
                        DefinitionType::None => unreachable!(),
 
                        DefinitionType::Primitive(id) => (id.upcast(), &ctx.heap[id].poly_vars),
 
                        DefinitionType::Composite(id) => (id.upcast(), &ctx.heap[id].poly_vars),
 
                        DefinitionType::Function(id) => (id.upcast(), &ctx.heap[id].poly_vars),
 
                    };
 

	
 
                    let mut symbolic_variant = None;
 
                    for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
                        if symbolic.identifier.matches_identifier(poly_var) {
 
                            // Type refers to a polymorphic variable.
 
                            // TODO: @hkt Maybe allow higher-kinded types?
 
                            if symbolic.identifier.get_poly_args().is_some() {
 
                                return Err(ParseError2::new_error(
 
                                return Err(ParseError::new_error(
 
                                    &ctx.module.source, symbolic.identifier.position,
 
                                    "Polymorphic arguments to a polymorphic variable (higher-kinded types) are not allowed (yet)"
 
                                ));
 
                            }
 
                            symbolic_variant = Some(SymbolicParserTypeVariant::PolyArg(definition_id, poly_var_idx));
 
                        }
 
                    }
 

	
 
                    if let Some(symbolic_variant) = symbolic_variant {
 
                        // Identifier points to a polymorphic argument
 
                        (symbolic.identifier.position, symbolic_variant, 0)
 
                    } else {
 
                        // Must be a user-defined type, otherwise an error
 
                        let (found_type, ident_iter) = find_type_definition(
 
                            &ctx.symbols, &ctx.types, ctx.module.root_id, &symbolic.identifier
 
                        ).as_parse_error(&ctx.module.source)?;
 

	
 
                        // TODO: @function_ptrs: Allow function pointers at some
 
                        //  point in the future
 
                        if found_type.definition.type_class().is_proc_type() {
 
                            return Err(ParseError2::new_error(
 
                            return Err(ParseError::new_error(
 
                                &ctx.module.source, symbolic.identifier.position,
 
                                &format!(
 
                                    "This identifier points to a {} type, expected a datatype",
 
                                    found_type.definition.type_class()
 
                                )
 
                            ));
 
                        }
 

	
 
                        // If the type is polymorphic then we have two cases: if
 
                        // the programmer did not specify the polyargs then we
 
                        // assume we're going to infer all of them. Otherwise we
 
                        // make sure that they match in count.
 
                        let (_, poly_args) = ident_iter.prev().unwrap();
 
                        let num_to_infer = match_polymorphic_args_to_vars(
 
                            found_type, poly_args, symbolic.identifier.position
 
                        ).as_parse_error(&ctx.heap, &ctx.module.source)?;
 

	
 
                        (
 
                            symbolic.identifier.position,
 
                            SymbolicParserTypeVariant::Definition(found_type.ast_definition),
 
                            num_to_infer
 
                        )
 
                    }
 
                }
 
            };
 

	
 
            // If here then type is symbolic, perform a mutable borrow (and do
 
            // some rust shenanigans) to set the required information.
 
            for _ in 0..num_inferred_to_allocate {
 
                // TODO: @hack, not very user friendly to manually allocate
 
                //  `inferred` ParserTypes with the InputPosition of the
 
                //  symbolic type's identifier.
 
                // We reuse the `parser_type_buffer` to temporarily store these
 
                // and we'll take them out later
 
                self.parser_type_buffer.push(ctx.heap.alloc_parser_type(|this| ParserType{
 
                    this,
 
                    pos: symbolic_pos,
 
                    variant: ParserTypeVariant::Inferred,
 
                }));
 
            }
 

	
 
            if let PTV::Symbolic(symbolic) = &mut ctx.heap[parser_type_id].variant {
 
                if num_inferred_to_allocate != 0 {
 
                    symbolic.poly_args2.reserve(num_inferred_to_allocate);
 
                    for _ in 0..num_inferred_to_allocate {
 
                        symbolic.poly_args2.push(self.parser_type_buffer.pop().unwrap());
 
                    }
 
                } else if !symbolic.identifier.poly_args.is_empty() {
 
                    symbolic.poly_args2.extend(&symbolic.identifier.poly_args)
 
                }
 
                symbolic.variant = Some(symbolic_variant);
 
            } else {
 
                unreachable!();
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Utilities
 
    //--------------------------------------------------------------------------
 

	
 
    /// Adds a local variable to the current scope. It will also annotate the
 
    /// `Local` in the AST with its relative position in the block.
 
    fn checked_local_add(&mut self, ctx: &mut Ctx, relative_pos: u32, id: LocalId) -> Result<(), ParseError2> {
 
    fn checked_local_add(&mut self, ctx: &mut Ctx, relative_pos: u32, id: LocalId) -> Result<(), ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 

	
 
        // Make sure we do not conflict with any global symbols
 
        {
 
            let ident = &ctx.heap[id].identifier;
 
            if let Some(symbol) = ctx.symbols.resolve_symbol(ctx.module.root_id, &ident.value) {
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, ident.position, "Local variable declaration conflicts with symbol")
 
                    ParseError::new_error(&ctx.module.source, ident.position, "Local variable declaration conflicts with symbol")
 
                        .with_postfixed_info(&ctx.module.source, symbol.position, "Conflicting symbol is found here")
 
                );
 
            }
 
        }
 

	
 
        let local = &mut ctx.heap[id];
 
        local.relative_pos_in_block = relative_pos;
 

	
 
        // Make sure we do not shadow any variables in any of the scopes. Note
 
        // that variables in parent scopes may be declared later
 
        let local = &ctx.heap[id];
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        let mut local_relative_pos = self.relative_pos_in_block;
 

	
 
        loop {
 
            debug_assert!(scope.is_block(), "scope is not a block");
 
            let block = &ctx.heap[scope.to_block()];
 
            for other_local_id in &block.locals {
 
                let other_local = &ctx.heap[*other_local_id];
 
                // Position check in case another variable with the same name
 
                // is defined in a higher-level scope, but later than the scope
 
                // in which the current variable resides.
 
                if local.this != *other_local_id &&
 
                    local_relative_pos >= other_local.relative_pos_in_block &&
 
                    local.identifier == other_local.identifier {
 
                    // Collision within this scope
 
                    return Err(
 
                        ParseError2::new_error(&ctx.module.source, local.position, "Local variable name conflicts with another variable")
 
                        ParseError::new_error(&ctx.module.source, local.position, "Local variable name conflicts with another variable")
 
                            .with_postfixed_info(&ctx.module.source, other_local.position, "Previous variable is found here")
 
                    );
 
                }
 
            }
 

	
 
            // Current scope is fine, move to parent scope if any
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if let Scope::Definition(definition_id) = scope {
 
                // At outer scope, check parameters of function/component
 
                for parameter_id in ctx.heap[*definition_id].parameters() {
 
                    let parameter = &ctx.heap[*parameter_id];
 
                    if local.identifier == parameter.identifier {
 
                        return Err(
 
                            ParseError2::new_error(&ctx.module.source, local.position, "Local variable name conflicts with parameter")
 
                            ParseError::new_error(&ctx.module.source, local.position, "Local variable name conflicts with parameter")
 
                                .with_postfixed_info(&ctx.module.source, parameter.position, "Parameter definition is found here")
 
                        );
 
                    }
 
                }
 

	
 
                break;
 
            }
 

	
 
            // If here, then we are dealing with a block-like parent block
 
            local_relative_pos = ctx.heap[scope.to_block()].relative_pos_in_parent;
 
        }
 

	
 
        // No collisions at all
 
        let block = &mut ctx.heap[self.cur_scope.as_ref().unwrap().to_block()];
 
        block.locals.push(id);
 

	
 
        Ok(())
 
    }
 

	
 
    /// Finds a variable in the visitor's scope that must appear before the
 
    /// specified relative position within that block.
 
    fn find_variable(&self, ctx: &Ctx, mut relative_pos: u32, identifier: &NamespacedIdentifier) -> Result<VariableId, ParseError2> {
 
    fn find_variable(&self, ctx: &Ctx, mut relative_pos: u32, identifier: &NamespacedIdentifier) -> Result<VariableId, ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 
        debug_assert!(identifier.parts.len() == 1, "implement namespaced seeking of target associated with identifier");
 

	
 
        // TODO: May still refer to an alias of a global symbol using a single
 
        //  identifier in the namespace.
 
        // No need to use iterator over namespaces if here
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        
 
        loop {
 
            debug_assert!(scope.is_block());
 
            let block = &ctx.heap[scope.to_block()];
 
            
 
            for local_id in &block.locals {
 
                let local = &ctx.heap[*local_id];
 
                
 
                if local.relative_pos_in_block < relative_pos && identifier.matches_identifier(&local.identifier) {
 
                    return Ok(local_id.upcast());
 
                }
 
            }
 

	
 
            debug_assert!(block.parent_scope.is_some());
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                // Definition scope, need to check arguments to definition
 
                match scope {
 
                    Scope::Definition(definition_id) => {
 
                        let definition = &ctx.heap[*definition_id];
 
                        for parameter_id in definition.parameters() {
 
                            let parameter = &ctx.heap[*parameter_id];
 
                            if identifier.matches_identifier(&parameter.identifier) {
 
                                return Ok(parameter_id.upcast());
 
                            }
 
                        }
 
                    },
 
                    _ => unreachable!(),
 
                }
 

	
 
                // Variable could not be found
 
                return Err(ParseError2::new_error(
 
                return Err(ParseError::new_error(
 
                    &ctx.module.source, identifier.position, "This variable is not declared"
 
                ));
 
            } else {
 
                relative_pos = block.relative_pos_in_parent;
 
            }
 
        }
 
    }
 

	
 
    /// Adds a particular label to the current scope. Will return an error if
 
    /// there is another label with the same name visible in the current scope.
 
    fn checked_label_add(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> Result<(), ParseError2> {
 
    fn checked_label_add(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> Result<(), ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 

	
 
        // Make sure label is not defined within the current scope or any of the
 
        // parent scope.
 
        let label = &ctx.heap[id];
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 

	
 
        loop {
 
            debug_assert!(scope.is_block(), "scope is not a block");
 
            let block = &ctx.heap[scope.to_block()];
 
            for other_label_id in &block.labels {
 
                let other_label = &ctx.heap[*other_label_id];
 
                if other_label.label == label.label {
 
                    // Collision
 
                    return Err(
 
                        ParseError2::new_error(&ctx.module.source, label.position, "Label name conflicts with another label")
 
                        ParseError::new_error(&ctx.module.source, label.position, "Label name conflicts with another label")
 
                            .with_postfixed_info(&ctx.module.source, other_label.position, "Other label is found here")
 
                    );
 
                }
 
            }
 

	
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                break;
 
            }
 
        }
 

	
 
        // No collisions
 
        let block = &mut ctx.heap[self.cur_scope.as_ref().unwrap().to_block()];
 
        block.labels.push(id);
 

	
 
        Ok(())
 
    }
 

	
 
    /// Finds a particular labeled statement by its identifier. Once found it
 
    /// will make sure that the target label does not skip over any variable
 
    /// declarations within the scope in which the label was found.
 
    fn find_label(&self, ctx: &Ctx, identifier: &Identifier) -> Result<LabeledStatementId, ParseError2> {
 
    fn find_label(&self, ctx: &Ctx, identifier: &Identifier) -> Result<LabeledStatementId, ParseError> {
 
        debug_assert!(self.cur_scope.is_some());
 

	
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        loop {
 
            debug_assert!(scope.is_block(), "scope is not a block");
 
            let relative_scope_pos = ctx.heap[scope.to_block()].relative_pos_in_parent;
 

	
 
            let block = &ctx.heap[scope.to_block()];
 
            for label_id in &block.labels {
 
                let label = &ctx.heap[*label_id];
 
                if label.label == *identifier {
 
                    for local_id in &block.locals {
 
                        // TODO: Better to do this in control flow analysis, it
 
                        //  is legal to skip over a variable declaration if it
 
                        //  is not actually being used. I might be missing
 
                        //  something here when laying out the bytecode...
 
                        let local = &ctx.heap[*local_id];
 
                        if local.relative_pos_in_block > relative_scope_pos && local.relative_pos_in_block < label.relative_pos_in_block {
 
                            return Err(
 
                                ParseError2::new_error(&ctx.module.source, identifier.position, "This target label skips over a variable declaration")
 
                                ParseError::new_error(&ctx.module.source, identifier.position, "This target label skips over a variable declaration")
 
                                    .with_postfixed_info(&ctx.module.source, label.position, "Because it jumps to this label")
 
                                    .with_postfixed_info(&ctx.module.source, local.position, "Which skips over this variable")
 
                            );
 
                        }
 
                    }
 
                    return Ok(*label_id);
 
                }
 
            }
 

	
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                return Err(ParseError2::new_error(&ctx.module.source, identifier.position, "Could not find this label"));
 
                return Err(ParseError::new_error(&ctx.module.source, identifier.position, "Could not find this label"));
 
            }
 

	
 
        }
 
    }
 

	
 
    /// Finds a particular symbol in the symbol table which must correspond to
 
    /// a definition of a particular type.
 
    // Note: root_id, symbols and types passed in explicitly to prevent
 
    //  borrowing errors
 
    fn find_symbol_of_type<'a>(
 
        &self, source: &InputSource, root_id: RootId, symbols: &SymbolTable, types: &'a TypeTable,
 
        identifier: &NamespacedIdentifier, expected_type_class: TypeClass
 
    ) -> Result<&'a DefinedType, ParseError2> {
 
    ) -> Result<&'a DefinedType, ParseError> {
 
        // Find symbol associated with identifier
 
        let (find_result, _) = find_type_definition(symbols, types, root_id, identifier)
 
            .as_parse_error(source)?;
 

	
 
        let definition_type_class = find_result.definition.type_class();
 
        if expected_type_class != definition_type_class {
 
            return Err(ParseError2::new_error(
 
            return Err(ParseError::new_error(
 
                source, identifier.position,
 
                &format!(
 
                    "Expected to find a {}, this symbol points to a {}",
 
                    expected_type_class, definition_type_class
 
                )
 
            ))
 
        }
 

	
 
        Ok(find_result)
 
    }
 

	
 
    /// This function will check if the provided while statement ID has a block
 
    /// statement that is one of our current parents.
 
    fn has_parent_while_scope(&self, ctx: &Ctx, id: WhileStatementId) -> bool {
 
        debug_assert!(self.cur_scope.is_some());
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        let while_stmt = &ctx.heap[id];
 
        loop {
 
            debug_assert!(scope.is_block());
 
            let block = scope.to_block();
 
            if while_stmt.body == block.upcast() {
 
                return true;
 
            }
 

	
 
            let block = &ctx.heap[block];
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                return false;
 
            }
 
        }
 
    }
 

	
 
    /// This function should be called while dealing with break/continue
 
    /// statements. It will try to find the targeted while statement, using the
 
    /// target label if provided. If a valid target is found then the loop's
 
    /// ID will be returned, otherwise a parsing error is constructed.
 
    /// The provided input position should be the position of the break/continue
 
    /// statement.
 
    fn resolve_break_or_continue_target(&self, ctx: &Ctx, position: InputPosition, label: &Option<Identifier>) -> Result<WhileStatementId, ParseError2> {
 
    fn resolve_break_or_continue_target(&self, ctx: &Ctx, position: InputPosition, label: &Option<Identifier>) -> Result<WhileStatementId, ParseError> {
 
        let target = match label {
 
            Some(label) => {
 
                let target_id = self.find_label(ctx, label)?;
 

	
 
                // Make sure break target is a while statement
 
                let target = &ctx.heap[target_id];
 
                if let Statement::While(target_stmt) = &ctx.heap[target.body] {
 
                    // Even though we have a target while statement, the break might not be
 
                    // present underneath this particular labeled while statement
 
                    if !self.has_parent_while_scope(ctx, target_stmt.this) {
 
                        ParseError2::new_error(&ctx.module.source, label.position, "Break statement is not nested under the target label's while statement")
 
                        ParseError::new_error(&ctx.module.source, label.position, "Break statement is not nested under the target label's while statement")
 
                            .with_postfixed_info(&ctx.module.source, target.position, "The targeted label is found here");
 
                    }
 

	
 
                    target_stmt.this
 
                } else {
 
                    return Err(
 
                        ParseError2::new_error(&ctx.module.source, label.position, "Incorrect break target label, it must target a while loop")
 
                        ParseError::new_error(&ctx.module.source, label.position, "Incorrect break target label, it must target a while loop")
 
                            .with_postfixed_info(&ctx.module.source, target.position, "The targeted label is found here")
 
                    );
 
                }
 
            },
 
            None => {
 
                // Use the enclosing while statement, the break must be
 
                // nested within that while statement
 
                if self.in_while.is_none() {
 
                    return Err(
 
                        ParseError2::new_error(&ctx.module.source, position, "Break statement is not nested under a while loop")
 
                        ParseError::new_error(&ctx.module.source, position, "Break statement is not nested under a while loop")
 
                    );
 
                }
 

	
 
                self.in_while.unwrap()
 
            }
 
        };
 

	
 
        // We have a valid target for the break statement. But we need to
 
        // make sure we will not break out of a synchronous block
 
        {
 
            let target_while = &ctx.heap[target];
 
            if target_while.in_sync != self.in_sync {
 
                // Break is nested under while statement, so can only escape a
 
                // sync block if the sync is nested inside the while statement.
 
                debug_assert!(self.in_sync.is_some());
 
                let sync_stmt = &ctx.heap[self.in_sync.unwrap()];
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, position, "Break may not escape the surrounding synchronous block")
 
                    ParseError::new_error(&ctx.module.source, position, "Break may not escape the surrounding synchronous block")
 
                        .with_postfixed_info(&ctx.module.source, target_while.position, "The break escapes out of this loop")
 
                        .with_postfixed_info(&ctx.module.source, sync_stmt.position, "And would therefore escape this synchronous block")
 
                );
 
            }
 
        }
 

	
 
        Ok(target)
 
    }
 

	
 
    // TODO: @cleanup, merge with function below
 
    fn visit_call_poly_args(&mut self, ctx: &mut Ctx, call_id: CallExpressionId) -> VisitorResult {
 
        // TODO: @token Revisit when tokenizer is implemented
 
        let call_expr = &mut ctx.heap[call_id];
 
        if let Method::Symbolic(symbolic) = &mut call_expr.method {
 
            if let Some(poly_args) = symbolic.identifier.get_poly_args() {
 
                call_expr.poly_args.extend(poly_args);
 
            }
 
        }
 

	
 
        let call_expr = &ctx.heap[call_id];
 

	
 
        // Determine the polyarg signature
 
        let num_expected_poly_args = match &call_expr.method {
 
            Method::Create => {
 
                0
 
            },
 
            Method::Fires => {
 
                1
 
            },
 
            Method::Get => {
 
                1
 
            },
 
            Method::Put => {
 
                1
 
            }
 
            Method::Symbolic(symbolic) => {
 
                // Retrieve type and make sure number of specified polymorphic 
 
                // arguments is correct
 

	
 
                let definition = &ctx.heap[symbolic.definition.unwrap()];
 
                match definition {
 
                    Definition::Function(definition) => definition.poly_vars.len(),
 
                    Definition::Component(definition) => definition.poly_vars.len(),
 
                    _ => {
 
                        debug_assert!(false, "expected function or component definition while visiting call poly args");
 
                        unreachable!();
 
                    }
 
                }
 
            }
 
        };
 

	
 
        // We allow zero polyargs to imply all args are inferred. Otherwise the
 
        // number of arguments must be equal
 
        if call_expr.poly_args.is_empty() {
 
            if num_expected_poly_args != 0 {
 
                // Infer all polyargs
 
                // TODO: @cleanup Not nice to use method position as implicitly
 
                //  inferred parser type pos.
 
                let pos = call_expr.position();
 
                for _ in 0..num_expected_poly_args {
 
                    self.parser_type_buffer.push(ctx.heap.alloc_parser_type(|this| ParserType {
 
                        this,
 
                        pos,
 
                        variant: ParserTypeVariant::Inferred,
 
                    }));
 
                }
 

	
 
                let call_expr = &mut ctx.heap[call_id];
 
                call_expr.poly_args.reserve(num_expected_poly_args);
 
                for _ in 0..num_expected_poly_args {
 
                    call_expr.poly_args.push(self.parser_type_buffer.pop().unwrap());
 
                }
 
            }
 
            Ok(())
 
        } else if call_expr.poly_args.len() == num_expected_poly_args {
 
            // Number of args is not 0, so parse all the specified ParserTypes
 
            let old_num_types = self.parser_type_buffer.len();
 
            self.parser_type_buffer.extend(&call_expr.poly_args);
 
            while self.parser_type_buffer.len() > old_num_types {
 
                let parser_type_id = self.parser_type_buffer.pop().unwrap();
 
                self.visit_parser_type(ctx, parser_type_id)?;
 
            }
 
            self.parser_type_buffer.truncate(old_num_types);
 
            Ok(())
 
        } else {
 
            return Err(ParseError2::new_error(
 
            return Err(ParseError::new_error(
 
                &ctx.module.source, call_expr.position,
 
                &format!(
 
                    "Expected {} polymorphic arguments (or none, to infer them), but {} were specified",
 
                    num_expected_poly_args, call_expr.poly_args.len()
 
                )
 
            ));
 
        }
 
    }
 

	
 
    fn visit_literal_poly_args(&mut self, ctx: &mut Ctx, lit_id: LiteralExpressionId) -> VisitorResult {
 
        // TODO: @token Revisit when tokenizer is implemented
 
        let literal_expr = &mut ctx.heap[lit_id];
 
        if let Literal::Struct(literal) = &mut literal_expr.value {
 
            literal.poly_args2.extend(&literal.identifier.poly_args);
 
        }
 

	
 
        let literal_expr = &ctx.heap[lit_id];
 
        let literal_pos = literal_expr.position;
 
        let num_poly_args_to_infer = match &literal_expr.value {
 
            Literal::Null | Literal::False | Literal::True |
 
            Literal::Character(_) | Literal::Integer(_) => {
 
                // Not really an error, but a programmer error as we're likely
 
                // doing work twice
 
                debug_assert!(false, "called visit_literal_poly_args on a non-polymorphic literal");
 
                unreachable!();
 
            },
 
            Literal::Struct(literal) => {
 
                // Retrieve type and make sure number of specified polymorphic
 
                // arguments is correct.
 
                let defined_type = ctx.types.get_base_definition(literal.definition.as_ref().unwrap())
 
                    .unwrap();
 
                let maybe_poly_args = literal.identifier.get_poly_args();
 
                let num_to_infer = match_polymorphic_args_to_vars(
 
                    defined_type, maybe_poly_args, literal.identifier.position
 
                ).as_parse_error(&ctx.heap, &ctx.module.source)?;
 

	
 
                // Visit all specified parser types
 
                let old_num_types = self.parser_type_buffer.len();
 
                self.parser_type_buffer.extend(&literal.poly_args2);
 
                while self.parser_type_buffer.len() > old_num_types {
 
                    let parser_type_id = self.parser_type_buffer.pop().unwrap();
 
                    self.visit_parser_type(ctx, parser_type_id)?;
 
                }
 
                self.parser_type_buffer.truncate(old_num_types);
 

	
 
                num_to_infer
 
            }
 
        };
 

	
 
        if num_poly_args_to_infer != 0 {
 
            for _ in 0..num_poly_args_to_infer {
 
                self.parser_type_buffer.push(ctx.heap.alloc_parser_type(|this| ParserType{
 
                    this, pos: literal_pos, variant: ParserTypeVariant::Inferred
 
                }));
 
            }
 

	
 
            let literal = match &mut ctx.heap[lit_id].value {
 
                Literal::Struct(literal) => literal,
 
                _ => unreachable!(),
 
            };
 
            literal.poly_args2.reserve(num_poly_args_to_infer);
 
            for _ in 0..num_poly_args_to_infer {
 
                literal.poly_args2.push(self.parser_type_buffer.pop().unwrap());
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 
}
 
\ No newline at end of file
src/protocol/tests/utils.rs
Show inline comments
 
@@ -170,748 +170,748 @@ impl AstOkTester {
 
                }
 

	
 
                // Found function
 
                let tester = FunctionTester::new(self.ctx(), definition);
 
                f(tester);
 
                found = true;
 
                break;
 
            }
 
        }
 

	
 
        if found { return self }
 

	
 
        assert!(
 
            false, "[{}] failed to find definition for function '{}'",
 
            self.test_name, name
 
        );
 
        unreachable!();
 
    }
 

	
 
    fn ctx(&self) -> TestCtx {
 
        TestCtx{
 
            test_name: &self.test_name,
 
            modules: &self.modules,
 
            heap: &self.heap,
 
            types: &self.types,
 
            symbols: &self.symbols,
 
        }
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Utilities for successful compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct StructTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a StructDefinition,
 
}
 

	
 
impl<'a> StructTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a StructDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn assert_num_fields(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.def.fields.len(),
 
            "[{}] Expected {} struct fields, but found {} for {}",
 
            self.ctx.test_name, num, self.def.fields.len(), self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_num_monomorphs(self, num: usize) -> Self {
 
        let type_def = self.ctx.types.get_base_definition(&self.def.this.upcast()).unwrap();
 
        assert_eq!(
 
            num, type_def.monomorphs.len(),
 
            "[{}] Expected {} monomorphs, but found {} for {}",
 
            self.ctx.test_name, num, type_def.monomorphs.len(), self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    /// Asserts that a monomorph exist, separate polymorphic variable types by
 
    /// a semicolon.
 
    pub(crate) fn assert_has_monomorph(self, serialized_monomorph: &str) -> Self {
 
        let definition_id = self.def.this.upcast();
 
        let type_def = self.ctx.types.get_base_definition(&definition_id).unwrap();
 

	
 
        let mut full_buffer = String::new();
 
        full_buffer.push('[');
 
        for (monomorph_idx, monomorph) in type_def.monomorphs.iter().enumerate() {
 
            let mut buffer = String::new();
 
            for (element_idx, monomorph_element) in monomorph.iter().enumerate() {
 
                if element_idx != 0 { buffer.push(';'); }
 
                serialize_concrete_type(&mut buffer, self.ctx.heap, definition_id, monomorph_element);
 
            }
 

	
 
            if buffer == serialized_monomorph {
 
                // Found an exact match
 
                return self
 
            }
 

	
 
            if monomorph_idx != 0 {
 
                full_buffer.push_str(", ");
 
            }
 
            full_buffer.push('"');
 
            full_buffer.push_str(&buffer);
 
            full_buffer.push('"');
 
        }
 
        full_buffer.push(']');
 

	
 
        assert!(
 
            false, "[{}] Expected to find monomorph {}, but got {} for {}",
 
            self.ctx.test_name, serialized_monomorph, &full_buffer, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn for_field<F: Fn(StructFieldTester)>(self, name: &str, f: F) -> Self {
 
        // Find field with specified name
 
        for field in &self.def.fields {
 
            if String::from_utf8_lossy(&field.field.value) == name {
 
                let tester = StructFieldTester::new(self.ctx, field);
 
                f(tester);
 
                return self;
 
            }
 
        }
 

	
 
        assert!(
 
            false, "[{}] Could not find struct field '{}' for {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 
        unreachable!();
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("Struct{ name: ");
 
        v.push_str(&String::from_utf8_lossy(&self.def.identifier.value));
 
        v.push_str(", fields: [");
 
        for (field_idx, field) in self.def.fields.iter().enumerate() {
 
            if field_idx != 0 { v.push_str(", "); }
 
            v.push_str(&String::from_utf8_lossy(&field.field.value));
 
        }
 
        v.push_str("] }");
 
        v
 
    }
 
}
 

	
 
pub(crate) struct StructFieldTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a StructFieldDefinition,
 
}
 

	
 
impl<'a> StructFieldTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a StructFieldDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn assert_parser_type(self, expected: &str) -> Self {
 
        let mut serialized_type = String::new();
 
        serialize_parser_type(&mut serialized_type, &self.ctx.heap, self.def.parser_type);
 
        assert_eq!(
 
            expected, &serialized_type,
 
            "[{}] Expected type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized_type, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut serialized_type = String::new();
 
        serialize_parser_type(&mut serialized_type, &self.ctx.heap, self.def.parser_type);
 
        format!(
 
            "StructField{{ name: {}, parser_type: {} }}",
 
            String::from_utf8_lossy(&self.def.field.value), serialized_type
 
        )
 
    }
 
}
 

	
 
pub(crate) struct FunctionTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a Function,
 
}
 

	
 
impl<'a> FunctionTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a Function) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn for_variable<F: Fn(VariableTester)>(self, name: &str, f: F) -> Self {
 
        // Find the memory statement in order to find the local
 
        let mem_stmt_id = seek_stmt(
 
            self.ctx.heap, self.def.body,
 
            &|stmt| {
 
                if let Statement::Local(local) = stmt {
 
                    if let LocalStatement::Memory(memory) = local {
 
                        let local = &self.ctx.heap[memory.variable];
 
                        if local.identifier.value == name.as_bytes() {
 
                            return true;
 
                        }
 
                    }
 
                }
 

	
 
                false
 
            }
 
        );
 

	
 
        assert!(
 
            mem_stmt_id.is_some(), "[{}] Failed to find variable '{}' in {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 

	
 
        let mem_stmt_id = mem_stmt_id.unwrap();
 
        let local_id = self.ctx.heap[mem_stmt_id].as_memory().variable;
 
        let local = &self.ctx.heap[local_id];
 

	
 
        // Find the assignment expression that follows it
 
        let assignment_id = seek_expr_in_stmt(
 
            self.ctx.heap, self.def.body,
 
            &|expr| {
 
                if let Expression::Assignment(assign_expr) = expr {
 
                    if let Expression::Variable(variable_expr) = &self.ctx.heap[assign_expr.left] {
 
                        if variable_expr.position.offset == local.identifier.position.offset {
 
                            return true;
 
                        }
 
                    }
 
                }
 

	
 
                false
 
            }
 
        );
 

	
 
        assert!(
 
            assignment_id.is_some(), "[{}] Failed to find assignment to variable '{}' in {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 

	
 
        let assignment = &self.ctx.heap[assignment_id.unwrap()];
 

	
 
        // Construct tester and pass to tester function
 
        let tester = VariableTester::new(
 
            self.ctx, self.def.this.upcast(), local, 
 
            assignment.as_assignment()
 
        );
 
        f(tester);
 

	
 
        self
 
    }
 

	
 
    /// Finds a specific expression within a function. There are two matchers:
 
    /// one outer matcher (to find a rough indication of the expression) and an
 
    /// inner matcher to find the exact expression. 
 
    ///
 
    /// The reason being that, for example, a function's body might be littered
 
    /// with addition symbols, so we first match on "some_var + some_other_var",
 
    /// and then match exactly on "+".
 
    pub(crate) fn for_expression_by_source<F: Fn(ExpressionTester)>(self, outer_match: &str, inner_match: &str, f: F) -> Self {
 
        // Seek the expression in the source code
 
        assert!(outer_match.contains(inner_match), "improper testing code");
 

	
 
        let module = seek_def_in_modules(
 
            &self.ctx.heap, &self.ctx.modules, self.def.this.upcast()
 
        ).unwrap();
 

	
 
        // Find the first occurrence of the expression after the definition of
 
        // the function, we'll check that it is included in the body later.
 
        let mut outer_match_idx = self.def.position.offset;
 
        while outer_match_idx < module.source.input.len() {
 
            if module.source.input[outer_match_idx..].starts_with(outer_match.as_bytes()) {
 
                break;
 
            }
 
            outer_match_idx += 1
 
        }
 

	
 
        assert!(
 
            outer_match_idx < module.source.input.len(),
 
            "[{}] Failed to find '{}' within the source that contains {}",
 
            self.ctx.test_name, outer_match, self.assert_postfix()
 
        );
 
        let inner_match_idx = outer_match_idx + outer_match.find(inner_match).unwrap();
 

	
 
        // Use the inner match index to find the expression
 
        let expr_id = seek_expr_in_stmt(
 
            &self.ctx.heap, self.def.body,
 
            &|expr| expr.position().offset == inner_match_idx
 
        );
 
        assert!(
 
            expr_id.is_some(),
 
            "[{}] Failed to find '{}' within the source that contains {} \
 
            (note: expression was found, but not within the specified function",
 
            self.ctx.test_name, outer_match, self.assert_postfix()
 
        );
 
        let expr_id = expr_id.unwrap();
 

	
 
        // We have the expression, call the testing function
 
        let tester = ExpressionTester::new(
 
            self.ctx, self.def.this.upcast(), &self.ctx.heap[expr_id]
 
        );
 
        f(tester);
 

	
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!(
 
            "Function{{ name: {} }}",
 
            &String::from_utf8_lossy(&self.def.identifier.value)
 
        )
 
    }
 
}
 

	
 
pub(crate) struct VariableTester<'a> {
 
    ctx: TestCtx<'a>,
 
    definition_id: DefinitionId,
 
    local: &'a Local,
 
    assignment: &'a AssignmentExpression,
 
}
 

	
 
impl<'a> VariableTester<'a> {
 
    fn new(
 
        ctx: TestCtx<'a>, definition_id: DefinitionId, local: &'a Local, assignment: &'a AssignmentExpression
 
    ) -> Self {
 
        Self{ ctx, definition_id, local, assignment }
 
    }
 

	
 
    pub(crate) fn assert_parser_type(self, expected: &str) -> Self {
 
        let mut serialized = String::new();
 
        serialize_parser_type(&mut serialized, self.ctx.heap, self.local.parser_type);
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected parser type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_concrete_type(self, expected: &str) -> Self {
 
        let mut serialized = String::new();
 
        serialize_concrete_type(
 
            &mut serialized, self.ctx.heap, self.definition_id, 
 
            &self.assignment.concrete_type
 
        );
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected concrete type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!(
 
            "Variable{{ name: {} }}",
 
            &String::from_utf8_lossy(&self.local.identifier.value)
 
        )
 
    }
 
}
 

	
 
pub(crate) struct ExpressionTester<'a> {
 
    ctx: TestCtx<'a>,
 
    definition_id: DefinitionId, // of the enclosing function/component
 
    expr: &'a Expression
 
}
 

	
 
impl<'a> ExpressionTester<'a> {
 
    fn new(
 
        ctx: TestCtx<'a>, definition_id: DefinitionId, expr: &'a Expression
 
    ) -> Self {
 
        Self{ ctx, definition_id, expr }
 
    }
 

	
 
    pub(crate) fn assert_concrete_type(self, expected: &str) -> Self {
 
        let mut serialized = String::new();
 
        serialize_concrete_type(
 
            &mut serialized, self.ctx.heap, self.definition_id,
 
            self.expr.get_type()
 
        );
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected concrete type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!(
 
            "Expression{{ debug: {:?} }}",
 
            self.expr
 
        )
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Interface for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct AstErrTester {
 
    test_name: String,
 
    error: ParseError2,
 
    error: ParseError,
 
}
 

	
 
impl AstErrTester {
 
    fn new(test_name: String, error: ParseError2) -> Self {
 
    fn new(test_name: String, error: ParseError) -> Self {
 
        Self{ test_name, error }
 
    }
 

	
 
    pub(crate) fn error<F: Fn(ErrorTester)>(&self, f: F) {
 
        // Maybe multiple errors will be supported in the future
 
        let tester = ErrorTester{ test_name: &self.test_name, error: &self.error };
 
        f(tester)
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Utilities for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct ErrorTester<'a> {
 
    test_name: &'a str,
 
    error: &'a ParseError2,
 
    error: &'a ParseError,
 
}
 

	
 
impl<'a> ErrorTester<'a> {
 
    pub(crate) fn assert_num(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.error.statements.len(),
 
            "[{}] expected error to consist of '{}' parts, but encountered '{}' for {}",
 
            self.test_name, num, self.error.statements.len(), self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    pub(crate) fn assert_ctx_has(self, idx: usize, msg: &str) -> Self {
 
        assert!(
 
            self.error.statements[idx].context.contains(msg),
 
            "[{}] expected error statement {}'s context to contain '{}' for {}",
 
            self.test_name, idx, msg, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    pub(crate) fn assert_msg_has(self, idx: usize, msg: &str) -> Self {
 
        assert!(
 
            self.error.statements[idx].message.contains(msg),
 
            "[{}] expected error statement {}'s message to contain '{}' for {}",
 
            self.test_name, idx, msg, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    /// Seeks the index of the pattern in the context message, then checks if
 
    /// the input position corresponds to that index.
 
    pub (crate) fn assert_occurs_at(self, idx: usize, pattern: &str) -> Self {
 
        let pos = self.error.statements[idx].context.find(pattern);
 
        assert!(
 
            pos.is_some(),
 
            "[{}] incorrect occurs_at: '{}' could not be found in the context for {}",
 
            self.test_name, pattern, self.assert_postfix()
 
        );
 
        let pos = pos.unwrap();
 
        let col = self.error.statements[idx].position.col();
 
        assert_eq!(
 
            pos + 1, col,
 
            "[{}] Expected error to occur at column {}, but found it at {} for {}",
 
            self.test_name, pos + 1, col, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("error: [");
 
        for (idx, stmt) in self.error.statements.iter().enumerate() {
 
            if idx != 0 {
 
                v.push_str(", ");
 
            }
 

	
 
            v.push_str(&format!("{{ context: {}, message: {} }}", &stmt.context, stmt.message));
 
        }
 
        v.push(']');
 
        v
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Generic utilities
 
//------------------------------------------------------------------------------
 

	
 
fn serialize_parser_type(buffer: &mut String, heap: &Heap, id: ParserTypeId) {
 
    use ParserTypeVariant as PTV;
 

	
 
    let p = &heap[id];
 
    match &p.variant {
 
        PTV::Message => buffer.push_str("msg"),
 
        PTV::Bool => buffer.push_str("bool"),
 
        PTV::Byte => buffer.push_str("byte"),
 
        PTV::Short => buffer.push_str("short"),
 
        PTV::Int => buffer.push_str("int"),
 
        PTV::Long => buffer.push_str("long"),
 
        PTV::String => buffer.push_str("string"),
 
        PTV::IntegerLiteral => buffer.push_str("intlit"),
 
        PTV::Inferred => buffer.push_str("auto"),
 
        PTV::Array(sub_id) => {
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            buffer.push_str("[]");
 
        },
 
        PTV::Input(sub_id) => {
 
            buffer.push_str("in<");
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            buffer.push('>');
 
        },
 
        PTV::Output(sub_id) => {
 
            buffer.push_str("out<");
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            buffer.push('>');
 
        },
 
        PTV::Symbolic(symbolic) => {
 
            buffer.push_str(&String::from_utf8_lossy(&symbolic.identifier.value));
 
            if symbolic.poly_args2.len() > 0 {
 
                buffer.push('<');
 
                for (poly_idx, poly_arg) in symbolic.poly_args2.iter().enumerate() {
 
                    if poly_idx != 0 { buffer.push(','); }
 
                    serialize_parser_type(buffer, heap, *poly_arg);
 
                }
 
                buffer.push('>');
 
            }
 
        }
 
    }
 
}
 

	
 
fn serialize_concrete_type(buffer: &mut String, heap: &Heap, def: DefinitionId, concrete: &ConcreteType) {
 
    // Retrieve polymorphic variables, if present (since we're dealing with a 
 
    // concrete type we only expect procedure types)
 
    let poly_vars = match &heap[def] {
 
        Definition::Function(definition) => &definition.poly_vars,
 
        Definition::Component(definition) => &definition.poly_vars,
 
        Definition::Struct(definition) => &definition.poly_vars,
 
        _ => unreachable!("Error in testing utility: unexpected type for concrete type serialization"),
 
    };
 

	
 
    fn serialize_recursive(
 
        buffer: &mut String, heap: &Heap, poly_vars: &Vec<Identifier>, concrete: &ConcreteType, mut idx: usize
 
    ) -> usize {
 
        use ConcreteTypePart as CTP;
 

	
 
        let part = &concrete.parts[idx];
 
        match part {
 
            CTP::Marker(poly_idx) => {
 
                buffer.push_str(&String::from_utf8_lossy(&poly_vars[*poly_idx].value));
 
            },
 
            CTP::Void => buffer.push_str("void"),
 
            CTP::Message => buffer.push_str("msg"),
 
            CTP::Bool => buffer.push_str("bool"),
 
            CTP::Byte => buffer.push_str("byte"),
 
            CTP::Short => buffer.push_str("short"),
 
            CTP::Int => buffer.push_str("int"),
 
            CTP::Long => buffer.push_str("long"),
 
            CTP::String => buffer.push_str("string"),
 
            CTP::Array => {
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push_str("[]");
 
                idx += 1;
 
            },
 
            CTP::Slice => {
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push_str("[..]");
 
                idx += 1;
 
            },
 
            CTP::Input => {
 
                buffer.push_str("in<");
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push('>');
 
                idx += 1;
 
            },
 
            CTP::Output => {
 
                buffer.push_str("out<");
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push('>');
 
                idx += 1
 
            },
 
            CTP::Instance(definition_id, num_sub) => {
 
                let definition_name = heap[*definition_id].identifier();
 
                buffer.push_str(&String::from_utf8_lossy(&definition_name.value));
 
                buffer.push('<');
 
                for sub_idx in 0..*num_sub {
 
                    if sub_idx != 0 { buffer.push(','); }
 
                    idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                }
 
                buffer.push('>');
 
                idx += 1;
 
            }
 
        }
 

	
 
        idx
 
    }
 

	
 
    serialize_recursive(buffer, heap, poly_vars, concrete, 0);
 
}
 

	
 
fn seek_def_in_modules<'a>(heap: &Heap, modules: &'a [LexedModule], def_id: DefinitionId) -> Option<&'a LexedModule> {
 
    for module in modules {
 
        let root = &heap.protocol_descriptions[module.root_id];
 
        for definition in &root.definitions {
 
            if *definition == def_id {
 
                return Some(module)
 
            }
 
        }
 
    }
 

	
 
    None
 
}
 

	
 
fn seek_stmt<F: Fn(&Statement) -> bool>(heap: &Heap, start: StatementId, f: &F) -> Option<StatementId> {
 
    let stmt = &heap[start];
 
    if f(stmt) { return Some(start); }
 

	
 
    // This statement wasn't it, try to recurse
 
    let matched = match stmt {
 
        Statement::Block(block) => {
 
            for sub_id in &block.statements {
 
                if let Some(id) = seek_stmt(heap, *sub_id, f) {
 
                    return Some(id);
 
                }
 
            }
 

	
 
            None
 
        },
 
        Statement::Labeled(stmt) => seek_stmt(heap, stmt.body, f),
 
        Statement::If(stmt) => {
 
            if let Some(id) = seek_stmt(heap,stmt.true_body, f) {
 
                return Some(id);
 
            } else if let Some(id) = seek_stmt(heap, stmt.false_body, f) {
 
                return Some(id);
 
            }
 
            None
 
        },
 
        Statement::While(stmt) => seek_stmt(heap, stmt.body, f),
 
        Statement::Synchronous(stmt) => seek_stmt(heap, stmt.body, f),
 
        _ => None
 
    };
 

	
 
    matched
 
}
 

	
 
fn seek_expr_in_expr<F: Fn(&Expression) -> bool>(heap: &Heap, start: ExpressionId, f: &F) -> Option<ExpressionId> {
 
    let expr = &heap[start];
 
    if f(expr) { return Some(start); }
 

	
 
    match expr {
 
        Expression::Assignment(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.left, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.right, f))
 
        },
 
        Expression::Conditional(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.test, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.true_expression, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.false_expression, f))
 
        },
 
        Expression::Binary(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.left, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.right, f))
 
        },
 
        Expression::Unary(expr) => {
 
            seek_expr_in_expr(heap, expr.expression, f)
 
        },
 
        Expression::Indexing(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.subject, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.index, f))
 
        },
 
        Expression::Slicing(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.subject, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.from_index, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.to_index, f))
 
        },
 
        Expression::Select(expr) => {
 
            seek_expr_in_expr(heap, expr.subject, f)
 
        },
 
        Expression::Array(expr) => {
 
            for element in &expr.elements {
 
                if let Some(id) = seek_expr_in_expr(heap, *element, f) {
 
                    return Some(id)
 
                }
 
            }
 
            None
 
        },
 
        Expression::Literal(expr) => {
 
            if let Literal::Struct(lit) = &expr.value {
 
                for field in &lit.fields {
 
                    if let Some(id) = seek_expr_in_expr(heap, field.value, f) {
 
                        return Some(id)
 
                    }
 
                }
 
            }
 
            None
 
        },
 
        Expression::Call(expr) => {
 
            for arg in &expr.arguments {
 
                if let Some(id) = seek_expr_in_expr(heap, *arg, f) {
 
                    return Some(id)
 
                }
 
            }
 
            None
 
        },
 
        Expression::Variable(expr) => {
 
            None
 
        }
 
    }
 
}
 

	
 
fn seek_expr_in_stmt<F: Fn(&Expression) -> bool>(heap: &Heap, start: StatementId, f: &F) -> Option<ExpressionId> {
 
    let stmt = &heap[start];
 

	
 
    match stmt {
 
        Statement::Block(stmt) => {
 
            for stmt_id in &stmt.statements {
 
                if let Some(id) = seek_expr_in_stmt(heap, *stmt_id, f) {
 
                    return Some(id)
 
                }
 
            }
 
            None
 
        },
 
        Statement::Labeled(stmt) => {
 
            seek_expr_in_stmt(heap, stmt.body, f)
 
        },
 
        Statement::If(stmt) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, stmt.test, f))
 
            .or_else(|| seek_expr_in_stmt(heap, stmt.true_body, f))
 
            .or_else(|| seek_expr_in_stmt(heap, stmt.false_body, f))
 
        },
 
        Statement::While(stmt) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, stmt.test, f))
 
            .or_else(|| seek_expr_in_stmt(heap, stmt.body, f))
 
        },
 
        Statement::Synchronous(stmt) => {
 
            seek_expr_in_stmt(heap, stmt.body, f)
 
        },
 
        Statement::Return(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression, f)
 
        },
 
        Statement::Assert(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression, f)
 
        },
 
        Statement::New(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression.upcast(), f)
 
        },
 
        Statement::Expression(stmt) => {
 
            seek_expr_in_expr(heap, stmt.expression, f)
 
        },
 
        _ => None
 
    }
 
}
 
\ No newline at end of file
0 comments (0 inline, 0 general)