Changeset - e406c61b1158
[Not reviewed]
0 9 1
MH - 4 years ago 2021-03-29 15:31:30
contact@maxhenger.nl
start on enum literals, extended some tests
10 files changed with 543 insertions and 45 deletions:
0 comments (0 inline, 0 general)
src/macros.rs
Show inline comments
 
macro_rules! enabled_debug_print {
 
    (false, $name:literal, $format:literal) => {};
 
    (false, $name:literal, $format:literal, $($args:expr),*) => {};
 
    (true, $name:literal, $format:literal) => {
 
        println!("[{}] {}", $name, $format)
 
    };
 
    (true, $name:literal, $format:literal, $($args:expr),*) => {
 
        println!("[{}] {}", $name, format!($format, $($args),*))
 
    };
 
}
 

	
 
/*
 
Change the definition of these macros to control the logging level statically
 
*/
 

	
 
macro_rules! log {
 
    (@ENDPT, $logger:expr, $($arg:tt)*) => {{
 
        // if let Some(w) = $logger.line_writer() {
 
        //     let _ = writeln!(w, $($arg)*);
 
        // }
 
    }};
 
    ($logger:expr, $($arg:tt)*) => {{
 
        #[cfg(not(feature = "no_logging"))]
 
        if let Some(w) = $logger.line_writer() {
 
            let _ = writeln!(w, $($arg)*);
 
        }
 
    }};
 
}
src/protocol/ast.rs
Show inline comments
 
@@ -911,219 +911,236 @@ impl Display for Type {
 
            }
 
            PrimitiveType::Output => {
 
                write!(f, "out")?;
 
            }
 
            PrimitiveType::Message => {
 
                write!(f, "msg")?;
 
            }
 
            PrimitiveType::Boolean => {
 
                write!(f, "boolean")?;
 
            }
 
            PrimitiveType::Byte => {
 
                write!(f, "byte")?;
 
            }
 
            PrimitiveType::Short => {
 
                write!(f, "short")?;
 
            }
 
            PrimitiveType::Int => {
 
                write!(f, "int")?;
 
            }
 
            PrimitiveType::Long => {
 
                write!(f, "long")?;
 
            }
 
            PrimitiveType::Symbolic(data) => {
 
                // Type data is in ASCII range.
 
                if let Some(id) = &data.definition {
 
                    write!(
 
                        f, "Symbolic({}, id: {})", 
 
                        String::from_utf8_lossy(&data.identifier.value),
 
                        id.index
 
                    )?;
 
                } else {
 
                    write!(
 
                        f, "Symbolic({}, id: Unresolved)",
 
                        String::from_utf8_lossy(&data.identifier.value)
 
                    )?;
 
                }
 
            }
 
        }
 
        if self.array {
 
            write!(f, "[]")
 
        } else {
 
            Ok(())
 
        }
 
    }
 
}
 

	
 
type LiteralCharacter = Vec<u8>;
 
type LiteralInteger = i64; // TODO: @int_literal
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Literal {
 
    Null, // message
 
    True,
 
    False,
 
    Character(LiteralCharacter),
 
    Integer(LiteralInteger),
 
    Struct(LiteralStruct),
 
}
 

	
 
impl Literal {
 
    pub(crate) fn as_struct(&self) -> &LiteralStruct {
 
        if let Literal::Struct(literal) = self{
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Struct", self)
 
        }
 
    }
 

	
 
    pub(crate) fn as_struct_mut(&mut self) -> &mut LiteralStruct {
 
        if let Literal::Struct(literal) = self{
 
            literal
 
        } else {
 
            unreachable!("Attempted to obtain {:?} as Literal::Struct", self)
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct LiteralStructField {
 
    // Phase 1: parser
 
    pub(crate) identifier: Identifier,
 
    pub(crate) value: ExpressionId,
 
    // Phase 2: linker
 
    pub(crate) field_idx: usize, // in struct definition
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct LiteralStruct {
 
    // Phase 1: parser
 
    pub(crate) identifier: NamespacedIdentifier,
 
    pub(crate) poly_args: Vec<ParserTypeId>,
 
    pub(crate) fields: Vec<LiteralStructField>,
 
    // Phase 2: linker
 
    pub(crate) definition: Option<DefinitionId>
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct LiteralEnum {
 
    // Phase 1: parser
 
    pub(crate) identifier: NamespacedIdentifier,
 
    pub(crate) poly_args: Vec<ParserTypeId>,
 
    // Phase 2: linker
 
    pub(crate) definition: Option<DefinitionId>,
 
    pub(crate) variant_idx: usize,
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Method {
 
    Get,
 
    Put,
 
    Fires,
 
    Create,
 
    Symbolic(MethodSymbolic)
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct MethodSymbolic {
 
    pub(crate) identifier: NamespacedIdentifier,
 
    pub(crate) definition: Option<DefinitionId>
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Field {
 
    Length,
 
    Symbolic(FieldSymbolic),
 
}
 
impl Field {
 
    pub fn is_length(&self) -> bool {
 
        match self {
 
            Field::Length => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    pub fn as_symbolic(&self) -> &FieldSymbolic {
 
        match self {
 
            Field::Symbolic(v) => v,
 
            _ => unreachable!("attempted to get Field::Symbolic from {:?}", self)
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct FieldSymbolic {
 
    // Phase 1: Parser
 
    pub(crate) identifier: Identifier,
 
    // Phase 3: Typing
 
    pub(crate) definition: Option<DefinitionId>,
 
    pub(crate) field_idx: usize,
 
}
 

	
 
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
 
pub enum Scope {
 
    Definition(DefinitionId),
 
    Regular(BlockStatementId),
 
    Synchronous((SynchronousStatementId, BlockStatementId)),
 
}
 

	
 
impl Scope {
 
    pub fn is_block(&self) -> bool {
 
        match &self {
 
            Scope::Definition(_) => false,
 
            Scope::Regular(_) => true,
 
            Scope::Synchronous(_) => true,
 
        }
 
    }
 
    pub fn to_block(&self) -> BlockStatementId {
 
        match &self {
 
            Scope::Regular(id) => *id,
 
            Scope::Synchronous((_, id)) => *id,
 
            _ => panic!("unable to get BlockStatement from Scope")
 
        }
 
    }
 
}
 

	
 
pub trait VariableScope {
 
    fn parent_scope(&self, h: &Heap) -> Option<Scope>;
 
    fn get_variable(&self, h: &Heap, id: &Identifier) -> Option<VariableId>;
 
}
 

	
 
impl VariableScope for Scope {
 
    fn parent_scope(&self, h: &Heap) -> Option<Scope> {
 
        match self {
 
            Scope::Definition(def) => h[*def].parent_scope(h),
 
            Scope::Regular(stmt) => h[*stmt].parent_scope(h),
 
            Scope::Synchronous((stmt, _)) => h[*stmt].parent_scope(h),
 
        }
 
    }
 
    fn get_variable(&self, h: &Heap, id: &Identifier) -> Option<VariableId> {
 
        match self {
 
            Scope::Definition(def) => h[*def].get_variable(h, id),
 
            Scope::Regular(stmt) => h[*stmt].get_variable(h, id),
 
            Scope::Synchronous((stmt, _)) => h[*stmt].get_variable(h, id),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Variable {
 
    Parameter(Parameter),
 
    Local(Local),
 
}
 

	
 
impl Variable {
 
    pub fn identifier(&self) -> &Identifier {
 
        match self {
 
            Variable::Parameter(var) => &var.identifier,
 
            Variable::Local(var) => &var.identifier,
 
        }
 
    }
 
    pub fn is_parameter(&self) -> bool {
 
        match self {
 
            Variable::Parameter(_) => true,
 
            _ => false,
 
        }
 
    }
 
    pub fn as_parameter(&self) -> &Parameter {
 
        match self {
 
            Variable::Parameter(result) => result,
 
            _ => panic!("Unable to cast `Variable` to `Parameter`"),
 
        }
 
    }
 
    pub fn as_local(&self) -> &Local {
 
        match self {
 
            Variable::Local(result) => result,
 
            _ => panic!("Unable to cast `Variable` to `Local`"),
 
        }
 
    }
 
    pub fn as_local_mut(&mut self) -> &mut Local {
 
        match self {
 
            Variable::Local(result) => result,
 
            _ => panic!("Unable to cast 'Variable' to 'Local'"),
 
        }
 
    }
 
}
 

	
src/protocol/lexer.rs
Show inline comments
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 

	
 
const MAX_LEVEL: usize = 128;
 
const MAX_NAMESPACES: u8 = 8; // only three levels are supported at the moment
 

	
 
macro_rules! debug_log {
 
    ($format:literal) => {
 
        enabled_debug_print!(false, "lexer", $format);
 
    };
 
    ($format:literal, $($args:expr),*) => {
 
        enabled_debug_print!(false, "lexer", $format, $($args),*);
 
    };
 
}
 

	
 
macro_rules! debug_line {
 
    ($source:expr) => {
 
        {
 
            let mut buffer = String::with_capacity(128);
 
            for idx in 0..buffer.capacity() {
 
                let next = $source.lookahead(idx);
 
                if next.is_none() || Some(b'\n') == next { break; }
 
                buffer.push(next.unwrap() as char);
 
            }
 
            buffer
 
        }
 
    };
 
}
 
fn is_vchar(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c >= 0x21 && c <= 0x7E
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn is_wsp(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c == b' ' || c == b'\t'
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn is_ident_start(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c >= b'A' && c <= b'Z' || c >= b'a' && c <= b'z'
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn is_ident_rest(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c >= b'A' && c <= b'Z' || c >= b'a' && c <= b'z' || c >= b'0' && c <= b'9' || c == b'_'
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn is_constant(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c >= b'0' && c <= b'9' || c == b'\''
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn is_integer_start(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c >= b'0' && c <= b'9'
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn is_integer_rest(x: Option<u8>) -> bool {
 
    if let Some(c) = x {
 
        c >= b'0' && c <= b'9'
 
            || c >= b'a' && c <= b'f'
 
            || c >= b'A' && c <= b'F'
 
            || c == b'x'
 
            || c == b'o'
 
    } else {
 
        false
 
    }
 
}
 

	
 
fn lowercase(x: u8) -> u8 {
 
    if x >= b'A' && x <= b'Z' {
 
        x - b'A' + b'a'
 
    } else {
 
        x
 
    }
 
}
 

	
 
pub struct Lexer<'a> {
 
    source: &'a mut InputSource,
 
    level: usize,
 
}
 

	
 
impl Lexer<'_> {
 
    pub fn new(source: &mut InputSource) -> Lexer {
 
        Lexer { source, level: 0 }
 
    }
 
    fn error_at_pos(&self, msg: &str) -> ParseError2 {
 
        ParseError2::new_error(self.source, self.source.pos(), msg)
 
    }
 
    fn consume_line(&mut self) -> Result<Vec<u8>, ParseError2> {
 
        let mut result: Vec<u8> = Vec::new();
 
        let mut next = self.source.next();
 
        while next.is_some() && next != Some(b'\n') && next != Some(b'\r') {
 
            if !(is_vchar(next) || is_wsp(next)) {
 
                return Err(self.error_at_pos("Expected visible character or whitespace"));
 
            }
 
            result.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 
        if next.is_some() {
 
            self.source.consume();
 
        }
 
        if next == Some(b'\r') && self.source.next() == Some(b'\n') {
 
            self.source.consume();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_whitespace(&mut self, expected: bool) -> Result<(), ParseError2> {
 
        let mut found = false;
 
        let mut next = self.source.next();
 
        while next.is_some() {
 
            if next == Some(b' ')
 
                || next == Some(b'\t')
 
                || next == Some(b'\r')
 
                || next == Some(b'\n')
 
            {
 
                self.source.consume();
 
                next = self.source.next();
 
                found = true;
 
                continue;
 
            }
 
            if next == Some(b'/') {
 
                next = self.source.lookahead(1);
 
                if next == Some(b'/') {
 
                    self.source.consume(); // slash
 
                    self.source.consume(); // slash
 
                    self.consume_line()?;
 
                    next = self.source.next();
 
                    found = true;
 
                    continue;
 
                }
 
                if next == Some(b'*') {
 
                    self.source.consume(); // slash
 
                    self.source.consume(); // star
 
                    next = self.source.next();
 
                    while next.is_some() {
 
                        if next == Some(b'*') {
 
                            next = self.source.lookahead(1);
 
                            if next == Some(b'/') {
 
                                self.source.consume(); // star
 
                                self.source.consume(); // slash
 
                                break;
 
                            }
 
                        }
 
                        self.source.consume();
 
                        next = self.source.next();
 
                    }
 
                    next = self.source.next();
 
                    found = true;
 
                    continue;
 
                }
 
            }
 
            break;
 
        }
 
        if expected && !found {
 
            Err(self.error_at_pos("Expected whitespace"))
 
        } else {
 
            Ok(())
 
        }
 
    }
 
    fn consume_any_chars(&mut self) {
 
        if !is_ident_start(self.source.next()) { return }
 
        self.source.consume();
 
        while is_ident_rest(self.source.next()) {
 
            self.source.consume()
 
        }
 
    }
 
    fn has_keyword(&self, keyword: &[u8]) -> bool {
 
        if !self.source.has(keyword) {
 
            return false;
 
        }
 

	
 
        // Word boundary
 
        let next = self.source.lookahead(keyword.len());
 
        if next.is_none() { return true; }
 
        return !is_ident_rest(next);
 
        
 
        if let Some(next) = self.source.lookahead(keyword.len()) {
 
            !(next >= b'A' && next <= b'Z' || next >= b'a' && next <= b'z')
 
        } else {
 
            true
 
        }
 
    }
 
    fn consume_keyword(&mut self, keyword: &[u8]) -> Result<(), ParseError2> {
 
        let len = keyword.len();
 
        for i in 0..len {
 
            let expected = Some(lowercase(keyword[i]));
 
            let next = self.source.next();
 
            if next != expected {
 
                return Err(self.error_at_pos(&format!("Expected keyword '{}'", String::from_utf8_lossy(keyword))));
 
            }
 
            self.source.consume();
 
        }
 
        if let Some(next) = self.source.next() {
 
            if next >= b'A' && next <= b'Z' || next >= b'a' && next <= b'z' || next >= b'0' && next <= b'9' {
 
                return Err(self.error_at_pos(&format!("Expected word boundary after '{}'", String::from_utf8_lossy(keyword))));
 
            }
 
        }
 
        Ok(())
 
    }
 
    fn has_string(&self, string: &[u8]) -> bool {
 
        self.source.has(string)
 
    }
 
    fn consume_string(&mut self, string: &[u8]) -> Result<(), ParseError2> {
 
        let len = string.len();
 
        for i in 0..len {
 
            let expected = Some(string[i]);
 
            let next = self.source.next();
 
            if next != expected {
 
                return Err(self.error_at_pos(&format!("Expected {}", String::from_utf8_lossy(string))));
 
            }
 
            self.source.consume();
 
        }
 
        Ok(())
 
    }
 
    /// Generic comma-separated consumer. If opening delimiter is not found then
 
    /// `Ok(None)` will be returned. Otherwise will consume the comma separated
 
    /// values, allowing a trailing comma. If no comma is found and the closing
 
    /// delimiter is not found, then a parse error with `expected_end_msg` is
 
    /// returned.
 
    fn consume_comma_separated<T, F>(
 
        &mut self, h: &mut Heap, open: u8, close: u8, expected_end_msg: &str, func: F
 
    ) -> Result<Option<Vec<T>>, ParseError2>
 
        where F: Fn(&mut Lexer, &mut Heap) -> Result<T, ParseError2>
 
    {
 
        if Some(open) != self.source.next() {
 
            return Ok(None)
 
        }
 

	
 
        self.source.consume();
 
        self.consume_whitespace(false)?;
 
        let mut elements = Vec::new();
 
        let mut had_comma = true;
 

	
 
        loop {
 
            if Some(close) == self.source.next() {
 
                self.source.consume();
 
                break;
 
            } else if !had_comma {
 
                return Err(ParseError2::new_error(
 
                    &self.source, self.source.pos(), expected_end_msg
 
                ));
 
            }
 

	
 
            elements.push(func(self, h)?);
 
            self.consume_whitespace(false)?;
 

	
 
            had_comma = self.source.next() == Some(b',');
 
            if had_comma {
 
                self.source.consume();
 
                self.consume_whitespace(false)?;
 
            }
 
        }
 

	
 
        Ok(Some(elements))
 
    }
 
    /// Essentially the same as `consume_comma_separated`, but will not allocate
 
    /// memory. Will return `true` and leave the input position at the end of
 
    /// the comma-separated list if well formed. Otherwise returns `false` and
 
    /// leaves the input position at a "random" position.
 
    /// memory. Will return `Ok(true)` and leave the input position at the end
 
    /// the comma-separated list if well formed and `Ok(false)` if the list is
 
    /// not present. Otherwise returns `Err(())` and leaves the input position 
 
    /// at a "random" position.
 
    fn consume_comma_separated_spilled_without_pos_recovery<F: Fn(&mut Lexer) -> bool>(
 
        &mut self, open: u8, close: u8, func: F
 
    ) -> bool {
 
    ) -> Result<bool, ()> {
 
        if Some(open) != self.source.next() {
 
            return true;
 
            return Ok(false);
 
        }
 

	
 
        self.source.consume();
 
        if self.consume_whitespace(false).is_err() { return false };
 
        if self.consume_whitespace(false).is_err() { return Err(()) };
 
        let mut had_comma = true;
 
        loop {
 
            if Some(close) == self.source.next() {
 
                self.source.consume();
 
                return true;
 
                return Ok(true);
 
            } else if !had_comma {
 
                return false;
 
                return Err(());
 
            }
 

	
 
            if !func(self) { return false; }
 
            if self.consume_whitespace(false).is_err() { return false };
 
            if !func(self) { return Err(()); }
 
            if self.consume_whitespace(false).is_err() { return Err(()) };
 

	
 
            had_comma = self.source.next() == Some(b',');
 
            if had_comma {
 
                self.source.consume();
 
                if self.consume_whitespace(false).is_err() { return false; }
 
                if self.consume_whitespace(false).is_err() { return Err(()); }
 
            }
 
        }
 
    }
 
    fn consume_ident(&mut self) -> Result<Vec<u8>, ParseError2> {
 
        if !self.has_identifier() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let mut result = Vec::new();
 
        let mut next = self.source.next();
 
        result.push(next.unwrap());
 
        self.source.consume();
 
        next = self.source.next();
 
        while is_ident_rest(next) {
 
            result.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 
        Ok(result)
 
    }
 
    fn has_integer(&mut self) -> bool {
 
        is_integer_start(self.source.next())
 
    }
 
    fn consume_integer(&mut self) -> Result<i64, ParseError2> {
 
        let position = self.source.pos();
 
        let mut data = Vec::new();
 
        let mut next = self.source.next();
 
        while is_integer_rest(next) {
 
            data.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 

	
 
        let data_len = data.len();
 
        debug_assert_ne!(data_len, 0);
 
        if data_len == 1 {
 
            debug_assert!(data[0] >= b'0' && data[0] <= b'9');
 
            return Ok((data[0] - b'0') as i64);
 
        } else {
 
            // TODO: Fix, u64 should be supported as well
 
            let parsed = if data[1] == b'b' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 2)
 
            } else if data[1] == b'o' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 8)
 
            } else if data[1] == b'x' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 16)
 
            } else {
 
                // Assume decimal
 
                let data = String::from_utf8_lossy(&data);
 
                i64::from_str_radix(&data, 10)
 
            };
 

	
 
            if let Err(_err) = parsed {
 
                return Err(ParseError2::new_error(&self.source, position, "Invalid integer constant"));
 
            }
 

	
 
            Ok(parsed.unwrap())
 
        }
 
    }
 

	
 
    // Statement keywords
 
    // TODO: Clean up these functions
 
    fn has_statement_keyword(&self) -> bool {
 
        self.has_keyword(b"channel")
 
            || self.has_keyword(b"skip")
 
            || self.has_keyword(b"if")
 
            || self.has_keyword(b"while")
 
            || self.has_keyword(b"break")
 
            || self.has_keyword(b"continue")
 
            || self.has_keyword(b"synchronous")
 
            || self.has_keyword(b"return")
 
            || self.has_keyword(b"assert")
 
            || self.has_keyword(b"goto")
 
            || self.has_keyword(b"new")
 
    }
 
    fn has_type_keyword(&self) -> bool {
 
        self.has_keyword(b"in")
 
            || self.has_keyword(b"out")
 
            || self.has_keyword(b"msg")
 
            || self.has_keyword(b"boolean")
 
            || self.has_keyword(b"byte")
 
            || self.has_keyword(b"short")
 
            || self.has_keyword(b"int")
 
            || self.has_keyword(b"long")
 
            || self.has_keyword(b"auto")
 
    }
 
    fn has_builtin_keyword(&self) -> bool {
 
        self.has_keyword(b"get")
 
            || self.has_keyword(b"fires")
 
            || self.has_keyword(b"create")
 
            || self.has_keyword(b"length")
 
    }
 
    fn has_reserved(&self) -> bool {
 
        self.has_statement_keyword()
 
@@ -387,355 +414,363 @@ impl Lexer<'_> {
 

	
 
    fn has_identifier(&self) -> bool {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return false;
 
        }
 
        let next = self.source.next();
 
        is_ident_start(next)
 
    }
 
    fn consume_identifier(&mut self) -> Result<Identifier, ParseError2> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let position = self.source.pos();
 
        let value = self.consume_ident()?;
 
        Ok(Identifier{ position, value })
 
    }
 
    fn consume_identifier_spilled(&mut self) -> Result<(), ParseError2> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        self.consume_ident()?;
 
        Ok(())
 
    }
 
    fn has_namespaced_identifier(&self) -> bool { 
 
        self.has_identifier() 
 
    }
 
    fn consume_namespaced_identifier(&mut self) -> Result<NamespacedIdentifier, ParseError2> {
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        let position = self.source.pos();
 
        let mut ns_ident = self.consume_ident()?;
 
        let mut num_namespaces = 1;
 
        while self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            if num_namespaces >= MAX_NAMESPACES {
 
                return Err(self.error_at_pos("Too many namespaces in identifier"));
 
            }
 
            let new_ident = self.consume_ident()?;
 
            ns_ident.extend(b"::");
 
            ns_ident.extend(new_ident);
 
            num_namespaces += 1;
 
        }
 

	
 
        Ok(NamespacedIdentifier{
 
            position,
 
            value: ns_ident,
 
            num_namespaces,
 
        })
 
    }
 
    fn consume_namespaced_identifier_spilled(&mut self) -> Result<(), ParseError2> {
 
        // TODO: @performance
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        self.consume_ident()?;
 
        while self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_ident()?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    // Types and type annotations
 

	
 
    /// Consumes a type definition. When called the input position should be at
 
    /// the type specification. When done the input position will be at the end
 
    /// of the type specifications (hence may be at whitespace).
 
    fn consume_type2(&mut self, h: &mut Heap, allow_inference: bool) -> Result<ParserTypeId, ParseError2> {
 
        // Small helper function to convert in/out polymorphic arguments. Not
 
        // pretty, but return boolean is true if the error is due to inference
 
        // not being allowed
 
        let reduce_port_poly_args = |
 
            heap: &mut Heap,
 
            port_pos: &InputPosition,
 
            args: Vec<ParserTypeId>,
 
        | -> Result<ParserTypeId, bool> {
 
            match args.len() {
 
                0 => if allow_inference {  
 
                    Ok(heap.alloc_parser_type(|this| ParserType{
 
                        this,
 
                        pos: port_pos.clone(),
 
                        variant: ParserTypeVariant::Inferred
 
                    }))
 
                } else {
 
                    Err(true)
 
                },
 
                1 => Ok(args[0]),
 
                _ => Err(false)
 
            }
 
        };
 

	
 
        // Consume the type
 
        debug_log!("consume_type2: {}", debug_line!(self.source));
 
        let pos = self.source.pos();
 
        let parser_type_variant = if self.has_keyword(b"msg") {
 
            self.consume_keyword(b"msg")?;
 
            ParserTypeVariant::Message
 
        } else if self.has_keyword(b"boolean") {
 
            self.consume_keyword(b"boolean")?;
 
            ParserTypeVariant::Bool
 
        } else if self.has_keyword(b"byte") {
 
            self.consume_keyword(b"byte")?;
 
            ParserTypeVariant::Byte
 
        } else if self.has_keyword(b"short") {
 
            self.consume_keyword(b"short")?;
 
            ParserTypeVariant::Short
 
        } else if self.has_keyword(b"int") {
 
            self.consume_keyword(b"int")?;
 
            ParserTypeVariant::Int
 
        } else if self.has_keyword(b"long") {
 
            self.consume_keyword(b"long")?;
 
            ParserTypeVariant::Long
 
        } else if self.has_keyword(b"str") {
 
            self.consume_keyword(b"str")?;
 
            ParserTypeVariant::String
 
        } else if self.has_keyword(b"auto") {
 
            if !allow_inference {
 
                return Err(ParseError2::new_error(
 
                        &self.source, pos,
 
                        "Type inference is not allowed here"
 
                ));
 
            }
 

	
 
            self.consume_keyword(b"auto")?;
 
            ParserTypeVariant::Inferred
 
        } else if self.has_keyword(b"in") {
 
            // TODO: @cleanup: not particularly neat to have this special case
 
            //  where we enforce polyargs in the parser-phase
 
            self.consume_keyword(b"in")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?;
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error|  {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'in' only allows for 1 polymorphic argument"
 
                    };
 
                    ParseError2::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Input(poly_arg)
 
        } else if self.has_keyword(b"out") {
 
            self.consume_keyword(b"out")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?;
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error| {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'out' only allows for 1 polymorphic argument, but {} were specified"
 
                    };
 
                    ParseError2::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Output(poly_arg)
 
        } else {
 
            // Must be a symbolic type
 
            let identifier = self.consume_namespaced_identifier()?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?;
 
            ParserTypeVariant::Symbolic(SymbolicParserType{identifier, poly_args, variant: None})
 
        };
 

	
 
        // If the type was a basic type (not supporting polymorphic type
 
        // arguments), then we make sure the user did not specify any of them.
 
        let mut backup_pos = self.source.pos();
 
        if !parser_type_variant.supports_polymorphic_args() {
 
            self.consume_whitespace(false)?;
 
            if let Some(b'<') = self.source.next() {
 
                return Err(ParseError2::new_error(
 
                    &self.source, self.source.pos(),
 
                    "This type does not allow polymorphic arguments"
 
                ));
 
            }
 

	
 
            self.source.seek(backup_pos);
 
        }
 

	
 
        let mut parser_type_id = h.alloc_parser_type(|this| ParserType{
 
            this, pos, variant: parser_type_variant
 
        });
 

	
 
        // If we're dealing with arrays, then we need to wrap the currently
 
        // parsed type in array types
 
        self.consume_whitespace(false)?;
 
        while let Some(b'[') = self.source.next() {
 
            let pos = self.source.pos();
 
            self.source.consume();
 
            self.consume_whitespace(false)?;
 
            if let Some(b']') = self.source.next() {
 
                // Type is wrapped in an array
 
                self.source.consume();
 
                parser_type_id = h.alloc_parser_type(|this| ParserType{
 
                    this, pos, variant: ParserTypeVariant::Array(parser_type_id)
 
                });
 
                backup_pos = self.source.pos();
 

	
 
                // In case we're dealing with another array
 
                self.consume_whitespace(false)?;
 
            } else {
 
                return Err(ParseError2::new_error(
 
                    &self.source, pos,
 
                    "Expected a closing ']'"
 
                ));
 
            }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(parser_type_id)
 
    }
 

	
 
    /// Attempts to consume a type without returning it. If it doesn't encounter
 
    /// a well-formed type, then the input position is left at a "random"
 
    /// position.
 
    fn maybe_consume_type_spilled_without_pos_recovery(&mut self) -> bool {
 
        // Consume type identifier
 
        debug_log!("maybe_consume_type_spilled_...: {}", debug_line!(self.source));
 
        if self.has_type_keyword() {
 
            self.consume_any_chars();
 
        } else {
 
            let ident = self.consume_namespaced_identifier();
 
            if ident.is_err() { return false; }
 
        }
 

	
 
        // Consume any polymorphic arguments that follow the type identifier
 
        let mut backup_pos = self.source.pos();
 
        if self.consume_whitespace(false).is_err() { return false; }
 
        if !self.maybe_consume_poly_args_spilled_without_pos_recovery() { return false; }
 
        match self.maybe_consume_poly_args_spilled_without_pos_recovery() {
 
            Ok(true) => backup_pos = self.source.pos(),
 
            Ok(false) => {},
 
            Err(()) => return false
 
        }
 
        
 
        // Consume any array specifiers. Make sure we always leave the input
 
        // position at the end of the last array specifier if we do find a
 
        // valid type
 

	
 
        if self.consume_whitespace(false).is_err() { return false; }
 
        while let Some(b'[') = self.source.next() {
 
            self.source.consume();
 
            if self.consume_whitespace(false).is_err() { return false; }
 
            if self.source.next() != Some(b']') { return false; }
 
            self.source.consume();
 
            backup_pos = self.source.pos();
 
            if self.consume_whitespace(false).is_err() { return false; }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return true;
 
    }
 

	
 
    fn maybe_consume_type_spilled(&mut self) -> bool {
 
        let backup_pos = self.source.pos();
 
        if !self.maybe_consume_type_spilled_without_pos_recovery() {
 
            self.source.seek(backup_pos);
 
            return false;
 
        }
 

	
 
        return true;
 
    }
 

	
 
    /// Attempts to consume polymorphic arguments without returning them. If it
 
    /// doesn't encounter well-formed polymorphic arguments, then the input
 
    /// position is left at a "random" position.
 
    fn maybe_consume_poly_args_spilled_without_pos_recovery(&mut self) -> bool {
 
    /// position is left at a "random" position. Returns a boolean indicating if
 
    /// the poly_args list was present.
 
    fn maybe_consume_poly_args_spilled_without_pos_recovery(&mut self) -> Result<bool, ()> {
 
        debug_log!("maybe_consume_poly_args_spilled_...: {}", debug_line!(self.source));
 
        self.consume_comma_separated_spilled_without_pos_recovery(
 
            b'<', b'>', |lexer| {
 
                lexer.maybe_consume_type_spilled_without_pos_recovery()
 
            })
 
    }
 

	
 
    /// Consumes polymorphic arguments and its delimiters if specified. If
 
    /// polyargs are present then the args are consumed and the input position
 
    /// will be placed after the polyarg list. If polyargs are not present then
 
    /// the input position will remain unmodified and an empty vector will be
 
    /// returned.
 
    ///
 
    /// Polymorphic arguments represent the specification of the parametric
 
    /// types of a polymorphic type: they specify the value of the polymorphic
 
    /// type's polymorphic variables.
 
    fn consume_polymorphic_args(&mut self, h: &mut Heap, allow_inference: bool) -> Result<Vec<ParserTypeId>, ParseError2> {
 
        let backup_pos = self.source.pos();
 
        match self.consume_comma_separated(
 
            h, b'<', b'>', "Expected the end of the polymorphic argument list",
 
            |lexer, heap| lexer.consume_type2(heap, allow_inference)
 
        )? {
 
            Some(poly_args) => Ok(poly_args),
 
            None => {
 
                self.source.seek(backup_pos);
 
                Ok(vec![])
 
            }
 
        }
 
    }
 

	
 
    /// Consumes polymorphic variables. These are identifiers that are used
 
    /// within polymorphic types. The input position may be at whitespace. If
 
    /// polymorphic variables are present then the whitespace, wrapping
 
    /// delimiters and the polymorphic variables are consumed. Otherwise the
 
    /// input position will stay where it is. If no polymorphic variables are
 
    /// present then an empty vector will be returned.
 
    fn consume_polymorphic_vars(&mut self, h: &mut Heap) -> Result<Vec<Identifier>, ParseError2> {
 
        let backup_pos = self.source.pos();
 
        match self.consume_comma_separated(
 
            h, b'<', b'>', "Expected the end of the polymorphic variable list",
 
            |lexer, _heap| lexer.consume_identifier()
 
        )? {
 
            Some(poly_vars) => Ok(poly_vars),
 
            None => {
 
                self.source.seek(backup_pos);
 
                Ok(vec!())
 
            }
 
        }
 
    }
 

	
 
    // Parameters
 

	
 
    fn consume_parameter(&mut self, h: &mut Heap) -> Result<ParameterId, ParseError2> {
 
        let parser_type = self.consume_type2(h, false)?;
 
        self.consume_whitespace(true)?;
 
        let position = self.source.pos();
 
        let identifier = self.consume_identifier()?;
 
        let id =
 
            h.alloc_parameter(|this| Parameter { this, position, parser_type, identifier });
 
        Ok(id)
 
    }
 
    fn consume_parameters(&mut self, h: &mut Heap) -> Result<Vec<ParameterId>, ParseError2> {
 
        match self.consume_comma_separated(
 
            h, b'(', b')', "Expected the end of the parameter list",
 
            |lexer, heap| lexer.consume_parameter(heap)
 
        )? {
 
            Some(params) => Ok(params),
 
            None => {
 
                Err(ParseError2::new_error(
 
                    &self.source, self.source.pos(),
 
                    "Expected a parameter list"
 
                ))
 
            }
 
        }
 
    }
 

	
 
    // ====================
 
    // Expressions
 
    // ====================
 

	
 
    fn consume_paren_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        self.consume_string(b"(")?;
 
        self.consume_whitespace(false)?;
 
        let result = self.consume_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b")")?;
 
        Ok(result)
 
    }
 
    fn consume_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        if self.level >= MAX_LEVEL {
 
            return Err(self.error_at_pos("Too deeply nested expression"));
 
        }
 
        self.level += 1;
 
        let result = self.consume_assignment_expression(h);
 
        self.level -= 1;
 
        result
 
    }
 
@@ -1337,326 +1372,329 @@ impl Lexer<'_> {
 
        }
 
        if self.has_builtin_literal() {
 
            return Ok(self.consume_builtin_literal_expression(h)?.upcast());
 
        }
 
        if self.has_struct_literal() {
 
            return Ok(self.consume_struct_literal_expression(h)?.upcast());
 
        }
 
        if self.has_call_expression() {
 
            return Ok(self.consume_call_expression(h)?.upcast());
 
        }
 
        Ok(self.consume_variable_expression(h)?.upcast())
 
    }
 
    fn consume_array_expression(&mut self, h: &mut Heap) -> Result<ArrayExpressionId, ParseError2> {
 
        let position = self.source.pos();
 
        let mut elements = Vec::new();
 
        self.consume_string(b"{")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b"}") {
 
            while self.source.next().is_some() {
 
                elements.push(self.consume_expression(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b"}") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?;
 
            }
 
        }
 
        self.consume_string(b"}")?;
 
        Ok(h.alloc_array_expression(|this| ArrayExpression {
 
            this,
 
            position,
 
            elements,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn has_builtin_literal(&self) -> bool {
 
        is_constant(self.source.next())
 
            || self.has_keyword(b"null")
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
    }
 
    fn consume_builtin_literal_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<LiteralExpressionId, ParseError2> {
 
        let position = self.source.pos();
 
        let value;
 
        if self.has_keyword(b"null") {
 
            self.consume_keyword(b"null")?;
 
            value = Literal::Null;
 
        } else if self.has_keyword(b"true") {
 
            self.consume_keyword(b"true")?;
 
            value = Literal::True;
 
        } else if self.has_keyword(b"false") {
 
            self.consume_keyword(b"false")?;
 
            value = Literal::False;
 
        } else if self.source.next() == Some(b'\'') {
 
            self.source.consume();
 
            let mut data = Vec::new();
 
            let mut next = self.source.next();
 
            while next != Some(b'\'') && (is_vchar(next) || next == Some(b' ')) {
 
                data.push(next.unwrap());
 
                self.source.consume();
 
                next = self.source.next();
 
            }
 
            if next != Some(b'\'') || data.is_empty() {
 
                return Err(self.error_at_pos("Expected character constant"));
 
            }
 
            self.source.consume();
 
            value = Literal::Character(data);
 
        } else {
 
            if !self.has_integer() {
 
                return Err(self.error_at_pos("Expected integer constant"));
 
            }
 

	
 
            value = Literal::Integer(self.consume_integer()?);
 
        }
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression {
 
            this,
 
            position,
 
            value,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 

	
 
    fn has_struct_literal(&mut self) -> bool {
 
        // A struct literal is written as:
 
        //      namespace::StructName<maybe_one_of_these, auto>{ field: expr }
 
        // We will parse up until the opening brace to see if we're dealing with
 
        // a struct literal.
 
        let backup_pos = self.source.pos();
 
        let result = self.consume_namespaced_identifier_spilled().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.maybe_consume_poly_args_spilled_without_pos_recovery() &&
 
            self.maybe_consume_poly_args_spilled_without_pos_recovery().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.source.next() == Some(b'{');
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 

	
 
    fn consume_struct_literal_expression(&mut self, h: &mut Heap) -> Result<LiteralExpressionId, ParseError2> {
 
        // Consume identifier and polymorphic arguments
 
        debug_log!("consume_struct_literal_expression: {}", debug_line!(self.source));
 
        let position = self.source.pos();
 
        let identifier = self.consume_namespaced_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_args = self.consume_polymorphic_args(h, true)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume fields
 
        let fields = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected the end of the list of struct fields",
 
            |lexer, heap| {
 
                let identifier = lexer.consume_identifier()?;
 
                lexer.consume_whitespace(false)?;
 
                lexer.consume_string(b":")?;
 
                lexer.consume_whitespace(false)?;
 
                let value = lexer.consume_expression(heap)?;
 

	
 
                Ok(LiteralStructField{ identifier, value, field_idx: 0 })
 
            }
 
        )? {
 
            Some(fields) => fields,
 
            None => return Err(ParseError2::new_error(
 
                self.source, self.source.pos(),
 
                "A struct literal must be followed by its field values"
 
            ))
 
        };
 

	
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression{
 
            this,
 
            position,
 
            value: Literal::Struct(LiteralStruct{
 
                identifier,
 
                poly_args,
 
                fields,
 
                definition: None,
 
            }),
 
            parent: ExpressionParent::None,
 
            concrete_type: Default::default()
 
        }))
 
    }
 

	
 
    fn has_call_expression(&mut self) -> bool {
 
        // We need to prevent ambiguity with various operators (because we may
 
        // be specifying polymorphic variables) and variables.
 
        if self.has_builtin_keyword() {
 
            return true;
 
        }
 

	
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 

	
 
        if self.consume_namespaced_identifier_spilled().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.maybe_consume_poly_args_spilled_without_pos_recovery() &&
 
            self.maybe_consume_poly_args_spilled_without_pos_recovery().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.source.next() == Some(b'(') {
 
            // Seems like we have a function call or an enum literal
 
            result = true;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_call_expression(&mut self, h: &mut Heap) -> Result<CallExpressionId, ParseError2> {
 
        let position = self.source.pos();
 

	
 
        // Consume method identifier
 
        debug_log!("consume_call_expression: {}", debug_line!(self.source));
 
        let method;
 
        if self.has_keyword(b"get") {
 
            self.consume_keyword(b"get")?;
 
            method = Method::Get;
 
        } else if self.has_keyword(b"put") {
 
            self.consume_keyword(b"put")?;
 
            method = Method::Put;
 
        } else if self.has_keyword(b"fires") {
 
            self.consume_keyword(b"fires")?;
 
            method = Method::Fires;
 
        } else if self.has_keyword(b"create") {
 
            self.consume_keyword(b"create")?;
 
            method = Method::Create;
 
        } else {
 
            let identifier = self.consume_namespaced_identifier()?;
 
            method = Method::Symbolic(MethodSymbolic{
 
                identifier,
 
                definition: None
 
            })
 
        }
 

	
 
        // Consume polymorphic arguments
 
        self.consume_whitespace(false)?;
 
        let poly_args = self.consume_polymorphic_args(h, true)?;
 

	
 
        // Consume arguments to call
 
        self.consume_whitespace(false)?;
 
        let mut arguments = Vec::new();
 
        self.consume_string(b"(")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b")") {
 
            // TODO: allow trailing comma
 
            while self.source.next().is_some() {
 
                arguments.push(self.consume_expression(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b")") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?
 
            }
 
        }
 
        self.consume_string(b")")?;
 
        Ok(h.alloc_call_expression(|this| CallExpression {
 
            this,
 
            position,
 
            method,
 
            arguments,
 
            poly_args,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn consume_variable_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<VariableExpressionId, ParseError2> {
 
        let position = self.source.pos();
 
        debug_log!("consume_variable_expression: {}", debug_line!(self.source));
 
        let identifier = self.consume_namespaced_identifier()?;
 
        Ok(h.alloc_variable_expression(|this| VariableExpression {
 
            this,
 
            position,
 
            identifier,
 
            declaration: None,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 

	
 
    // ====================
 
    // Statements
 
    // ====================
 

	
 
    /// Consumes any kind of statement from the source and will error if it
 
    /// did not encounter a statement. Will also return an error if the
 
    /// statement is nested too deeply.
 
    ///
 
    /// `wrap_in_block` may be set to true to ensure that the parsed statement
 
    /// will be wrapped in a block statement if it is not already a block
 
    /// statement. This is used to ensure that all `if`, `while` and `sync`
 
    /// statements have a block statement as body.
 
    fn consume_statement(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError2> {
 
        if self.level >= MAX_LEVEL {
 
            return Err(self.error_at_pos("Too deeply nested statement"));
 
        }
 
        self.level += 1;
 
        let result = self.consume_statement_impl(h, wrap_in_block);
 
        self.level -= 1;
 
        result
 
    }
 
    fn has_label(&mut self) -> bool {
 
        // To prevent ambiguity with expression statements consisting only of an
 
        // identifier or a namespaced identifier, we look ahead and match on the
 
        // *single* colon that signals a labeled statement.
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 
        if self.consume_identifier_spilled().is_ok() {
 
            // next character is ':', second character is NOT ':'
 
            result = Some(b':') == self.source.next() && Some(b':') != self.source.lookahead(1)
 
        }
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_statement_impl(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError2> {
 
        // Parse and allocate statement
 
        let mut must_wrap = true;
 
        let mut stmt_id = if self.has_string(b"{") {
 
            must_wrap = false;
 
            self.consume_block_statement(h)?
 
        } else if self.has_keyword(b"skip") {
 
            must_wrap = false;
 
            self.consume_skip_statement(h)?.upcast()
 
        } else if self.has_keyword(b"if") {
 
            self.consume_if_statement(h)?.upcast()
 
        } else if self.has_keyword(b"while") {
 
            self.consume_while_statement(h)?.upcast()
 
        } else if self.has_keyword(b"break") {
 
            self.consume_break_statement(h)?.upcast()
 
        } else if self.has_keyword(b"continue") {
 
            self.consume_continue_statement(h)?.upcast()
 
        } else if self.has_keyword(b"synchronous") {
 
            self.consume_synchronous_statement(h)?.upcast()
 
        } else if self.has_keyword(b"return") {
 
            self.consume_return_statement(h)?.upcast()
 
        } else if self.has_keyword(b"assert") {
 
            self.consume_assert_statement(h)?.upcast()
 
        } else if self.has_keyword(b"goto") {
 
            self.consume_goto_statement(h)?.upcast()
 
        } else if self.has_keyword(b"new") {
 
            self.consume_new_statement(h)?.upcast()
 
        } else if self.has_label() {
 
            self.consume_labeled_statement(h)?.upcast()
 
        } else {
 
            self.consume_expression_statement(h)?.upcast()
 
        };
 

	
 
        // Wrap if desired and if needed
 
        if must_wrap && wrap_in_block {
 
            let position = h[stmt_id].position();
 
            let block_wrapper = h.alloc_block_statement(|this| BlockStatement{
 
                this,
 
                position,
 
                statements: vec![stmt_id],
 
                parent_scope: None,
 
                relative_pos_in_parent: 0,
 
                locals: Vec::new(),
 
                labels: Vec::new()
 
            });
 

	
 
            stmt_id = block_wrapper.upcast();
 
        }
 

	
 
        Ok(stmt_id)
 
    }
 
@@ -2301,149 +2339,173 @@ impl Lexer<'_> {
 

	
 
    fn has_import(&self) -> bool {
 
        self.has_keyword(b"import")
 
    }
 
    fn consume_import(&mut self, h: &mut Heap) -> Result<ImportId, ParseError2> {
 
        // Parse the word "import" and the name of the module
 
        let position = self.source.pos();
 
        self.consume_keyword(b"import")?;
 
        self.consume_whitespace(true)?;
 
        let mut value = Vec::new();
 
        let mut ident = self.consume_ident()?;
 
        value.append(&mut ident);
 
        let mut last_ident_start = 0;
 

	
 
        while self.has_string(b".") {
 
            self.consume_string(b".")?;
 
            value.push(b'.');
 
            ident = self.consume_ident()?;
 
            last_ident_start = value.len();
 
            value.append(&mut ident);
 
        }
 

	
 

	
 
        self.consume_whitespace(false)?;
 

	
 
        // Check for the potential aliasing or specific module imports
 
        let import = if self.has_string(b"as") {
 
            self.consume_string(b"as")?;
 
            self.consume_whitespace(true)?;
 
            let alias = self.consume_ident()?;
 

	
 
            h.alloc_import(|this| Import::Module(ImportModule{
 
                this,
 
                position,
 
                module_name: value,
 
                alias,
 
                module_id: None,
 
            }))
 
        } else if self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_whitespace(false)?;
 

	
 
            let next = self.source.next();
 
            if Some(b'{') == next {
 
                let symbols = match self.consume_comma_separated(
 
                    h, b'{', b'}', "Expected end of import list",
 
                    |lexer, _heap| {
 
                        // Symbol name
 
                        let position = lexer.source.pos();
 
                        let name = lexer.consume_ident()?;
 
                        lexer.consume_whitespace(false)?;
 

	
 
                        // Symbol alias
 
                        if lexer.has_string(b"as") {
 
                            // With alias
 
                            lexer.consume_string(b"as")?;
 
                            lexer.consume_whitespace(true)?;
 
                            let alias = lexer.consume_ident()?;
 

	
 
                            Ok(AliasedSymbol{
 
                                position,
 
                                name,
 
                                alias,
 
                                definition_id: None
 
                            })
 
                        } else {
 
                            // Without alias
 
                            Ok(AliasedSymbol{
 
                                position,
 
                                name: name.clone(),
 
                                alias: name,
 
                                definition_id: None
 
                            })
 
                        }
 
                    }
 
                )? {
 
                    Some(symbols) => symbols,
 
                    None => unreachable!(), // because we checked for opening '{'
 
                };
 

	
 
                h.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    position,
 
                    module_name: value,
 
                    module_id: None,
 
                    symbols,
 
                }))
 
            } else if Some(b'*') == next {
 
                self.source.consume();
 
                h.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    position,
 
                    module_name: value,
 
                    module_id: None,
 
                    symbols: Vec::new()
 
                }))
 
            } else if self.has_identifier() {
 
                let position = self.source.pos();
 
                let name = self.consume_ident()?;
 
                self.consume_whitespace(false)?;
 
                let alias = if self.has_string(b"as") {
 
                    self.consume_string(b"as")?;
 
                    self.consume_whitespace(true)?;
 
                    self.consume_ident()?
 
                } else {
 
                    name.clone()
 
                };
 

	
 
                h.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    position,
 
                    module_name: value,
 
                    module_id: None,
 
                    symbols: vec![AliasedSymbol{
 
                        position,
 
                        name,
 
                        alias,
 
                        definition_id: None
 
                    }]
 
                }))
 
            } else {
 
                return Err(self.error_at_pos("Expected '*' or '{'"));
 
                return Err(self.error_at_pos("Expected '*', '{' or a symbol name"));
 
            }
 
        } else {
 
            // No explicit alias or subimports, so implicit alias
 
            let alias = Vec::from(&value[last_ident_start..]);
 
            h.alloc_import(|this| Import::Module(ImportModule{
 
                this,
 
                position,
 
                module_name: value,
 
                alias,
 
                module_id: None,
 
            }))
 
        };
 

	
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        Ok(import)
 
    }
 
    pub fn consume_protocol_description(&mut self, h: &mut Heap) -> Result<RootId, ParseError2> {
 
        let position = self.source.pos();
 
        let mut pragmas = Vec::new();
 
        let mut imports = Vec::new();
 
        let mut definitions = Vec::new();
 
        self.consume_whitespace(false)?;
 
        while self.has_pragma() {
 
            let pragma = self.consume_pragma(h)?;
 
            pragmas.push(pragma);
 
            self.consume_whitespace(false)?;
 
        }
 
        while self.has_import() {
 
            let import = self.consume_import(h)?;
 
            imports.push(import);
 
            self.consume_whitespace(false)?;
 
        }
 
        while self.has_symbol_definition() {
 
            let def = self.consume_symbol_definition(h)?;
 
            definitions.push(def);
 
            self.consume_whitespace(false)?;
 
        }
 
        // end of file
 
        if !self.source.is_eof() {
 
            return Err(self.error_at_pos("Expected end of file"));
 
        }
 
        Ok(h.alloc_protocol_description(|this| Root {
 
            this,
 
            position,
 
            pragmas,
 
            imports,
 
            definitions,
 
        }))
 
    }
 
}
src/protocol/parser/mod.rs
Show inline comments
 
@@ -82,181 +82,183 @@ impl Parser {
 

	
 
                    module_version_pos = version.position.clone();
 
                    module_version = Some(version.version);
 
                },
 
            }
 
        }
 

	
 
        // Add module to list of modules and prevent naming conflicts
 
        let cur_module_idx = self.modules.len();
 
        if let Some(prev_module_idx) = self.module_lookup.get(&module_name) {
 
            // Find `#module` statement in other module again
 
            let prev_module = &self.modules[*prev_module_idx];
 
            let prev_module_pos = self.heap[prev_module.root_id].pragmas
 
                .iter()
 
                .find_map(|p| {
 
                    match &self.heap[*p] {
 
                        Pragma::Module(module) => Some(module.position.clone()),
 
                        _ => None
 
                    }
 
                })
 
                .unwrap_or(InputPosition::default());
 

	
 
            let module_name_msg = if module_name.is_empty() {
 
                format!("a nameless module")
 
            } else {
 
                format!("module '{}'", String::from_utf8_lossy(&module_name))
 
            };
 

	
 
            return Err(
 
                ParseError2::new_error(&source, module_name_pos, &format!("Double definition of {} across files", module_name_msg))
 
                    .with_postfixed_info(&prev_module.source, prev_module_pos, "Other definition was here")
 
            );
 
        }
 

	
 
        self.modules.push(LexedModule{
 
            source,
 
            module_name: module_name.clone(),
 
            version: module_version,
 
            root_id: pd
 
        });
 
        self.module_lookup.insert(module_name, cur_module_idx);
 
        Ok(pd)
 
    }
 

	
 
    fn resolve_symbols_and_types(&mut self) -> Result<(), ParseError2> {
 
        // Construct the symbol table to resolve any imports and/or definitions,
 
        // then use the symbol table to actually annotate all of the imports.
 
        // If the type table is constructed correctly then all imports MUST be
 
        // resolvable.
 
        self.symbol_table.build(&self.heap, &self.modules)?;
 

	
 
        // Not pretty, but we need to work around rust's borrowing rules, it is
 
        // totally safe to mutate the contents of an AST element that we are
 
        // not borrowing anywhere else.
 
        let mut module_index = 0;
 
        let mut import_index = 0;
 
        loop {
 
            if module_index >= self.modules.len() {
 
                break;
 
            }
 

	
 
            let module_root_id = self.modules[module_index].root_id;
 
            let import_id = {
 
                let root = &self.heap[module_root_id];
 
                if import_index >= root.imports.len() {
 
                    module_index += 1;
 
                    import_index = 0;
 
                    continue
 
                }
 
                root.imports[import_index]
 
            };
 

	
 
            let import = &mut self.heap[import_id];
 
            match import {
 
                Import::Module(import) => {
 
                    debug_assert!(import.module_id.is_none(), "module import already resolved");
 
                    let target_module_id = self.symbol_table.resolve_module(&import.module_name)
 
                        .expect("module import is resolved by symbol table");
 
                    import.module_id = Some(target_module_id)
 
                },
 
                Import::Symbols(import) => {
 
                    debug_assert!(import.module_id.is_none(), "module of symbol import already resolved");
 
                    let target_module_id = self.symbol_table.resolve_module(&import.module_name)
 
                        .expect("symbol import's module is resolved by symbol table");
 
                    import.module_id = Some(target_module_id);
 

	
 
                    for symbol in &mut import.symbols {
 
                        debug_assert!(symbol.definition_id.is_none(), "symbol import already resolved");
 
                        let (_, target_definition_id) = self.symbol_table.resolve_symbol(module_root_id, &symbol.alias)
 
                            .expect("symbol import is resolved by symbol table")
 
                            .as_definition()
 
                            .expect("symbol import does not resolve to namespace symbol");
 
                        symbol.definition_id = Some(target_definition_id);
 
                    }
 
                }
 
            }
 

	
 
            import_index += 1;
 
        }
 

	
 
        // All imports in the AST are now annotated. We now use the symbol table
 
        // to construct the type table.
 
        let mut type_ctx = TypeCtx::new(&self.symbol_table, &mut self.heap, &self.modules);
 
        self.type_table.build_base_types(&mut type_ctx)?;
 

	
 
        Ok(())
 
    }
 

	
 
    pub fn parse(&mut self) -> Result<(), ParseError2> {
 
        self.resolve_symbols_and_types()?;
 

	
 
        // Validate and link all modules
 
        let mut visit = ValidityAndLinkerVisitor::new();
 
        for module in &self.modules {
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            visit.visit_module(&mut ctx)?;
 
        }
 

	
 
        // Perform typechecking on all modules
 
        let mut visit = TypeResolvingVisitor::new();
 
        let mut queue = ResolveQueue::new();
 
        for module in &self.modules {
 
            let ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module,
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            TypeResolvingVisitor::queue_module_definitions(&ctx, &mut queue);   
 
        };
 
        while !queue.is_empty() {
 
            let top = queue.pop().unwrap();
 
            let mut ctx = visitor::Ctx{
 
                heap: &mut self.heap,
 
                module: &self.modules[top.root_id.index as usize],
 
                symbols: &mut self.symbol_table,
 
                types: &mut self.type_table,
 
            };
 
            visit.handle_module_definition(&mut ctx, &mut queue, top)?;
 
        }
 

	
 
        // Perform remaining steps
 
        // TODO: Phase out at some point
 
        for module in &self.modules {
 
            let root_id = module.root_id;
 
            if let Err((position, message)) = Self::parse_inner(&mut self.heap, root_id) {
 
                return Err(ParseError2::new_error(&self.modules[0].source, position, &message))
 
            }
 
        }
 

	
 
        // let mut writer = ASTWriter::new();
 
        // let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap();
 
        // writer.write_ast(&mut file, &self.heap);
 

	
 
        Ok(())
 
    }
 

	
 
    pub fn parse_inner(h: &mut Heap, pd: RootId) -> VisitorResult {
 
        // TODO: @cleanup, slowly phasing out old compiler
 
        // NestedSynchronousStatements::new().visit_protocol_description(h, pd)?;
 
        // ChannelStatementOccurrences::new().visit_protocol_description(h, pd)?;
 
        // FunctionStatementReturns::new().visit_protocol_description(h, pd)?;
 
        // ComponentStatementReturnNew::new().visit_protocol_description(h, pd)?;
 
        // CheckBuiltinOccurrences::new().visit_protocol_description(h, pd)?;
 
        // BuildSymbolDeclarations::new().visit_protocol_description(h, pd)?;
 
        // LinkCallExpressions::new().visit_protocol_description(h, pd)?;
 
        // BuildScope::new().visit_protocol_description(h, pd)?;
 
        // ResolveVariables::new().visit_protocol_description(h, pd)?;
 
        LinkStatements::new().visit_protocol_description(h, pd)?;
 
        // BuildLabels::new().visit_protocol_description(h, pd)?;
 
        // ResolveLabels::new().visit_protocol_description(h, pd)?;
 
        AssignableExpressions::new().visit_protocol_description(h, pd)?;
 
        IndexableExpressions::new().visit_protocol_description(h, pd)?;
 
        SelectableExpressions::new().visit_protocol_description(h, pd)?;
 

	
 
        Ok(())
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/type_resolver.rs
Show inline comments
 
/// type_resolver.rs
 
///
 
/// Performs type inference and type checking. Type inference is implemented by
 
/// applying constraints on (sub)trees of types. During this process the
 
/// resolver takes the `ParserType` structs (the representation of the types
 
/// written by the programmer), converts them to `InferenceType` structs (the
 
/// temporary data structure used during type inference) and attempts to arrive
 
/// at `ConcreteType` structs (the representation of a fully checked and
 
/// validated type).
 
///
 
/// The resolver will visit every statement and expression relevant to the
 
/// procedure and insert and determine its initial type based on context (e.g. a
 
/// return statement's expression must match the function's return type, an
 
/// if statement's test expression must evaluate to a boolean). When all are
 
/// visited we attempt to make progress in evaluating the types. Whenever a type
 
/// is progressed we queue the related expressions for further type progression.
 
/// Once no more expressions are in the queue the algorithm is finished. At this
 
/// point either all types are inferred (or can be trivially implicitly
 
/// determined), or we have incomplete types. In the latter casee we return an
 
/// error.
 
///
 
/// Inference may be applied on non-polymorphic procedures and on polymorphic
 
/// procedures. When dealing with a non-polymorphic procedure we apply the type
 
/// resolver and annotate the AST with the `ConcreteType`s. When dealing with
 
/// polymorphic procedures we will only annotate the AST once, preserving
 
/// references to polymorphic variables. Any later pass will perform just the
 
/// type checking.
 
///
 
/// TODO: Needs a thorough rewrite:
 
///  0. polymorph_progress is intentionally broken at the moment.
 
///  1. For polymorphic type inference we need to have an extra datastructure
 
///     for progressing the polymorphic variables and mapping them back to each
 
///     signature type that uses that polymorphic type. The two types of markers
 
///     became somewhat of a mess.
 
///  2. We're doing a lot of extra work. It seems better to apply the initial
 
///     type based on expression parents, then to apply forced constraints (arg
 
///     to a fires() call must be port-like), only then to start progressing the
 
///     types.
 
///     Furthermore, queueing of expressions can be more intelligent, currently
 
///     every child/parent of an expression is inferred again when queued. Hence
 
///     we need to queue only specific children/parents of expressions.
 
///  3. Remove the `msg` type?
 
///  4. Disallow certain types in certain operations (e.g. `Void`).
 
///  5. Implement implicit and explicit casting.
 
///  6. Investigate different ways of performing the type-on-type inference,
 
///     maybe there is a better way then flattened trees + markers?
 

	
 
macro_rules! enabled_debug_print {
 
    (false, $name:literal, $format:literal) => {};
 
    (false, $name:literal, $format:literal, $($args:expr),*) => {};
 
    (true, $name:literal, $format:literal) => {
 
        println!("[{}] {}", $name, $format)
 
    };
 
    (true, $name:literal, $format:literal, $($args:expr),*) => {
 
        println!("[{}] {}", $name, format!($format, $($args),*))
 
    };
 
}
 

	
 
macro_rules! debug_log {
 
    ($format:literal) => {
 
        enabled_debug_print!(true, "types", $format);
 
    };
 
    ($format:literal, $($args:expr),*) => {
 
        enabled_debug_print!(true, "types", $format, $($args),*);
 
    };
 
}
 

	
 
use std::collections::{HashMap, HashSet, VecDeque};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::parser::type_table::*;
 
use super::visitor::{
 
    STMT_BUFFER_INIT_CAPACITY,
 
    EXPR_BUFFER_INIT_CAPACITY,
 
    Ctx,
 
    Visitor2,
 
    VisitorResult
 
};
 
use std::collections::hash_map::Entry;
 

	
 
const MESSAGE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Message, InferenceTypePart::Byte ];
 
const BOOL_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Bool ];
 
const NUMBERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::NumberLike ];
 
const INTEGERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::IntegerLike ];
 
const ARRAY_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Array, InferenceTypePart::Unknown ];
 
const ARRAYLIKE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::ArrayLike, InferenceTypePart::Unknown ];
 

	
 
/// TODO: @performance Turn into PartialOrd+Ord to simplify checks
 
/// TODO: @types Remove the Message -> Byte hack at some point...
 
#[derive(Debug, Clone, Eq, PartialEq)]
 
pub(crate) enum InferenceTypePart {
 
    // A marker with an identifier which we can use to retrieve the type subtree
 
    // that follows the marker. This is used to perform type inference on
 
    // polymorphs: an expression may determine the polymorphs type, after we
 
    // need to apply that information to all other places where the polymorph is
 
    // used.
 
    MarkerDefinition(usize), // marker for polymorph types on a procedure's definition
 
    MarkerBody(usize), // marker for polymorph types within a procedure body
 
    // Completely unknown type, needs to be inferred
 
    Unknown,
 
    // Partially known type, may be inferred to to be the appropriate related 
 
    // type.
 
    // IndexLike,      // index into array/slice
 
    NumberLike,     // any kind of integer/float
 
    IntegerLike,    // any kind of integer
 
    ArrayLike,      // array or slice. Note that this must have a subtype
 
    PortLike,       // input or output port
 
    // Special types that cannot be instantiated by the user
 
    Void, // For builtin functions that do not return anything
 
    // Concrete types without subtypes
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    // One subtype
 
    Message,
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // A user-defined type with any number of subtypes
 
    Instance(DefinitionId, usize)
 
}
 

	
 
impl InferenceTypePart {
 
    fn is_marker(&self) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        match self {
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    /// Checks if the type is concrete, markers are interpreted as concrete
 
    /// types.
 
    fn is_concrete(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | 
 
            ITP::ArrayLike | ITP::PortLike => false,
 
            _ => true
 
        }
 
    }
 

	
 
    fn is_concrete_number(&self) -> bool {
 
        // TODO: @float
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long => true,
 
            _ => false,
 
@@ -1896,231 +1886,237 @@ impl TypeResolvingVisitor {
 
                    if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                        self.expr_queued.insert(parent_id);
 
                    }
 
                }
 

	
 
                // Reapply progress in polymorphic variables to struct's type
 
                let signature_type: *mut _ = &mut poly_data.embedded[0];
 
                let subject_type: *mut _ = self.expr_types.get_mut(&subject_id).unwrap();
 
                
 
                let progress_subject = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    poly_data, &poly_progress, signature_type, subject_type
 
                );
 

	
 
                let signature_type: *mut _ = &mut poly_data.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    poly_data, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                (progress_subject, progress_expr)
 
            }
 
        };
 

	
 
        if progress_subject { self.queue_expr(subject_id); }
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Subject type [{}]: {}", progress_subject, self.expr_types.get(&subject_id).unwrap().display_name(&ctx.heap));
 
        debug_log!("   - Expr    type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> Result<(), ParseError2> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let expr_elements = expr.elements.clone(); // TODO: @performance
 

	
 
        debug_log!("Array expr ({} elements): {}", expr_elements.len(), upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // All elements should have an equal type
 
        let progress = self.apply_equal_n_constraint(ctx, upcast_id, &expr_elements)?;
 
        for (progress_arg, arg_id) in progress.iter().zip(expr_elements.iter()) {
 
            if *progress_arg {
 
                self.queue_expr(*arg_id);
 
            }
 
        }
 

	
 
        // And the output should be an array of the element types
 
        let mut expr_progress = self.apply_forced_constraint(ctx, upcast_id, &ARRAY_TEMPLATE)?;
 
        if !expr_elements.is_empty() {
 
            let first_arg_id = expr_elements[0];
 
            let (inner_expr_progress, arg_progress) = self.apply_equal2_constraint(
 
                ctx, upcast_id, upcast_id, 1, first_arg_id, 0
 
            )?;
 

	
 
            expr_progress = expr_progress || inner_expr_progress;
 

	
 
            // Note that if the array type progressed the type of the arguments,
 
            // then we should enqueue this progression function again
 
            // TODO: @fix Make apply_equal_n accept a start idx as well
 
            if arg_progress { self.queue_expr(upcast_id); }
 
        }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type [{}]: {}", expr_progress, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if expr_progress { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> Result<(), ParseError2> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 

	
 
        debug_log!("Literal expr: {}", upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        let progress_expr = match &expr.value {
 
            Literal::Null => {
 
                self.apply_forced_constraint(ctx, upcast_id, &MESSAGE_TEMPLATE)?
 
            },
 
            Literal::Integer(_) => {
 
                self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?
 
            },
 
            Literal::True | Literal::False => {
 
                self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?
 
            },
 
            Literal::Character(_) => todo!("character literals"),
 
            Literal::Struct(data) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
                debug_assert_eq!(extra.embedded.len(), data.fields.len());
 

	
 
                debug_log!(" * During (inferring types from fields and struct type):");
 

	
 
                // Mutually infer field signature/expression types
 
                for (field_idx, field) in data.fields.iter().enumerate() {
 
                    let field_expr_id = field.value;
 
                    let signature_type: *mut _ = &mut extra.embedded[field_idx];
 
                    let field_type: *mut _ = self.expr_types.get_mut(&field_expr_id).unwrap();
 
                    let (_, progress_arg) = Self::apply_equal2_signature_constraint(
 
                        ctx, upcast_id, Some(field_expr_id), extra, &mut poly_progress,
 
                        signature_type, 0, field_type, 0
 
                    )?;
 

	
 
                    debug_log!(
 
                        "   - Field {} type | sig: {}, field: {}", field_idx,
 
                        unsafe{&*signature_type}.display_name(&ctx.heap),
 
                        unsafe{&*field_type}.display_name(&ctx.heap)
 
                    );
 

	
 
                    if progress_arg {
 
                        self.expr_queued.insert(field_expr_id);
 
                    }
 
                }
 

	
 
                debug_log!("   - Field poly progress | {:?}", poly_progress);
 

	
 
                // Same for the type of the struct itself
 
                let signature_type: *mut _ = &mut extra.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 
                let (_, progress_expr) = Self::apply_equal2_signature_constraint(
 
                    ctx, upcast_id, None, extra, &mut poly_progress,
 
                    signature_type, 0, expr_type, 0
 
                )?;
 

	
 
                debug_log!(
 
                    "   - Ret type | sig: {}, expr: {}",
 
                    unsafe{&*signature_type}.display_name(&ctx.heap),
 
                    unsafe{&*expr_type}.display_name(&ctx.heap)
 
                );
 
                debug_log!("   - Ret poly progress | {:?}", poly_progress);
 

	
 
                if progress_expr {
 
                    // TODO: @cleanup, cannot call utility self.queue_parent thingo
 
                    if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                        self.expr_queued.insert(parent_id);
 
                    }
 
                }
 

	
 
                // Check which expressions use the polymorphic arguments. If the
 
                // polymorphic variables have been progressed then we try to 
 
                // progress them inside the expression as well.
 
                debug_log!(" * During (reinferring from progressed polyvars):");
 

	
 
                // For all field expressions
 
                for field_idx in 0..extra.embedded.len() {
 
                    debug_assert_eq!(field_idx, data.fields[field_idx].field_idx, "confusing, innit?");
 
                    let signature_type: *mut _ = &mut extra.embedded[field_idx];
 
                    let field_expr_id = data.fields[field_idx].value;
 
                    let field_type: *mut _ = self.expr_types.get_mut(&field_expr_id).unwrap();
 

	
 
                    let progress_arg = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                        extra, &poly_progress, signature_type, field_type
 
                    );
 

	
 
                    debug_log!(
 
                        "   - Field {} type | sig: {}, field: {}", field_idx,
 
                        unsafe{&*signature_type}.display_name(&ctx.heap),
 
                        unsafe{&*field_type}.display_name(&ctx.heap)
 
                    );
 
                    if progress_arg {
 
                        self.expr_queued.insert(field_expr_id);
 
                    }
 
                }
 
                
 
                // For the return type
 
                let signature_type: *mut _ = &mut extra.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    extra, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                progress_expr
 
            }
 
        };
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        // TODO: FIX!!!!
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    // TODO: @cleanup, see how this can be cleaned up once I implement
 
    //  polymorphic struct/enum/union literals. These likely follow the same
 
    //  pattern as here.
 
    fn progress_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> Result<(), ParseError2> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 

	
 
        debug_log!("Call expr '{}': {}", match &expr.method {
 
            Method::Create => String::from("create"),
 
            Method::Fires => String::from("fires"),
 
            Method::Get => String::from("get"),
 
            Method::Put => String::from("put"),
 
            Method::Symbolic(method) => String::from_utf8_lossy(&method.identifier.value).to_string()
 
        },upcast_id.index);
 
        debug_log!(" * Before:");
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 
        debug_log!(" * During (inferring types from arguments and return type):");
 

	
 
        // Check if we can make progress using the arguments and/or return types
 
        // while keeping track of the polyvars we've extended
 
        let mut poly_progress = HashSet::new();
 
        debug_assert_eq!(extra.embedded.len(), expr.arguments.len());
 

	
 
        for (arg_idx, arg_id) in expr.arguments.clone().into_iter().enumerate() {
 
            let signature_type: *mut _ = &mut extra.embedded[arg_idx];
 
            let argument_type: *mut _ = self.expr_types.get_mut(&arg_id).unwrap();
 
            let (_, progress_arg) = Self::apply_equal2_signature_constraint(
 
                ctx, upcast_id, Some(arg_id), extra, &mut poly_progress,
 
                signature_type, 0, argument_type, 0
 
            )?;
 

	
 
            debug_log!(
 
                "   - Arg {} type | sig: {}, arg: {}", arg_idx,
 
                unsafe{&*signature_type}.display_name(&ctx.heap), 
 
                unsafe{&*argument_type}.display_name(&ctx.heap));
 

	
 
            if progress_arg {
 
                // Progressed argument expression
 
                self.expr_queued.insert(arg_id);
 
            }
 
@@ -2359,219 +2355,219 @@ impl TypeResolvingVisitor {
 
            arg2_type, arg2_start_idx
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
        }
 

	
 
        Ok((infer_res.modified_lhs(), infer_res.modified_rhs()))
 
    }
 

	
 
    /// Applies an equal2 constraint between a signature type (e.g. a function
 
    /// argument or struct field) and an expression whose type should match that
 
    /// expression. If we make progress on the signature, then we try to see if
 
    /// any of the embedded polymorphic types can be progressed.
 
    ///
 
    /// `outer_expr_id` is the main expression we're progressing (e.g. a 
 
    /// function call), while `expr_id` is the embedded expression we're 
 
    /// matching against the signature. `expression_type` and 
 
    /// `expression_start_idx` belong to `expr_id`.
 
    fn apply_equal2_signature_constraint(
 
        ctx: &Ctx, outer_expr_id: ExpressionId, expr_id: Option<ExpressionId>,
 
        polymorph_data: &mut ExtraData, polymorph_progress: &mut HashSet<usize>,
 
        signature_type: *mut InferenceType, signature_start_idx: usize,
 
        expression_type: *mut InferenceType, expression_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError2> {
 
        // Safety: cannot mutually infer the same types
 
        //         polymorph_data containers may not be modified
 
        debug_assert_ptrs_distinct!(signature_type, expression_type);
 

	
 
        // Infer the signature and expression type
 
        let infer_res = unsafe { 
 
            InferenceType::infer_subtrees_for_both_types(
 
                signature_type, signature_start_idx,
 
                expression_type, expression_start_idx
 
            ) 
 
        };
 

	
 
        if infer_res == DualInferenceResult::Incompatible {
 
            // TODO: Check if I still need to use this
 
            let outer_position = ctx.heap[outer_expr_id].position();
 
            let (position_name, position) = match expr_id {
 
                Some(expr_id) => ("argument's", ctx.heap[expr_id].position()),
 
                None => ("type's", outer_position)
 
            };
 
            let (signature_display_type, expression_display_type) = unsafe { (
 
                (&*signature_type).display_name(&ctx.heap),
 
                (&*expression_type).display_name(&ctx.heap)
 
            ) };
 

	
 
            return Err(ParseError2::new_error(
 
                &ctx.module.source, outer_position,
 
                "Failed to fully resolve the types of this expression"
 
            ).with_postfixed_info(
 
                &ctx.module.source, position,
 
                &format!(
 
                    "Because the {} signature has been resolved to '{}', but the expression has been resolved to '{}'",
 
                    position_name, signature_display_type, expression_display_type
 
                )
 
            ));
 
        }
 

	
 
        // Try to see if we can progress any of the polymorphic variables
 
        let progress_sig = infer_res.modified_lhs();
 
        let progress_expr = infer_res.modified_rhs();
 

	
 
        if progress_sig {
 
            let signature_type = unsafe{&mut *signature_type};
 
            debug_assert!(
 
                signature_type.has_body_marker, 
 
                "made progress on signature type, but it doesn't have a marker"
 
            );
 
            for (poly_idx, poly_section) in signature_type.body_marker_iter() {
 
                let polymorph_type = &mut polymorph_data.poly_vars[poly_idx];
 
                match Self::apply_forced_constraint_types(
 
                    polymorph_type, 0, poly_section, 0
 
                ) {
 
                    Ok(true) => { polymorph_progress.insert(poly_idx); },
 
                    Ok(false) => {},
 
                    Err(()) => { return Err(Self::construct_poly_arg_error(ctx, polymorph_data, outer_expr_id))}
 
                }
 
            }
 
        }
 
        Ok((progress_sig, progress_expr))
 
    }
 

	
 
    /// Applies equal2 constraints on the signature type for each of the 
 
    /// polymorphic variables. If the signature type is progressed then we 
 
    /// progress the expression type as well.
 
    ///
 
    /// This function assumes that the polymorphic variables have already been
 
    /// progressed as far as possible by calling 
 
    /// `apply_equal2_signature_constraint`. As such, we expect to not encounter
 
    /// any errors.
 
    ///
 
    /// This function returns true if the expression's type has been progressed
 
    fn apply_equal2_polyvar_constraint(
 
        heap: &Heap,
 
        polymorph_data: &ExtraData, polymorph_progress: &HashSet<usize>,
 
        polymorph_data: &ExtraData, _polymorph_progress: &HashSet<usize>,
 
        signature_type: *mut InferenceType, expr_type: *mut InferenceType
 
    ) -> bool {
 
        // Safety: all pointers should be distinct
 
        //         polymorph_data contains may not be modified
 
        debug_assert_ptrs_distinct!(signature_type, expr_type);
 
        let signature_type = unsafe{&mut *signature_type};
 
        let expr_type = unsafe{&mut *expr_type};
 

	
 
        // Iterate through markers in signature type to try and make progress
 
        // on the polymorphic variable        
 
        let mut seek_idx = 0;
 
        let mut modified_sig = false;
 
        
 
        while let Some((poly_idx, start_idx)) = signature_type.find_body_marker(seek_idx) {
 
            let end_idx = InferenceType::find_subtree_end_idx(&signature_type.parts, start_idx);
 
            if polymorph_progress.contains(&poly_idx) {
 
            // if polymorph_progress.contains(&poly_idx) {
 
                // Need to match subtrees
 
                let polymorph_type = &polymorph_data.poly_vars[poly_idx];
 
                debug_log!("   - DEBUG: Applying {} to '{}' from '{}'", polymorph_type.display_name(heap), InferenceType::partial_display_name(heap, &signature_type.parts[start_idx..]), signature_type.display_name(heap));
 
                let modified_at_marker = Self::apply_forced_constraint_types(
 
                    signature_type, start_idx, 
 
                    &polymorph_type.parts, 0
 
                ).expect("no failure when applying polyvar constraints");
 

	
 
                modified_sig = modified_sig || modified_at_marker;
 
            }
 
            // }
 

	
 
            seek_idx = end_idx;
 
        }
 

	
 
        // If we made any progress on the signature's type, then we also need to
 
        // apply it to the expression that is supposed to match the signature.
 
        if modified_sig {
 
            match InferenceType::infer_subtree_for_single_type(
 
                expr_type, 0, &signature_type.parts, 0
 
            ) {
 
                SingleInferenceResult::Modified => true,
 
                SingleInferenceResult::Unmodified => false,
 
                SingleInferenceResult::Incompatible =>
 
                    unreachable!("encountered failure while reapplying modified signature to expression after polyvar inference")
 
            }
 
        } else {
 
            false
 
        }
 
    }
 

	
 
    /// Applies a type constraint that expects all three provided types to be
 
    /// equal. In case we can make progress in inferring the types then we
 
    /// attempt to do so. If the call is successful then the composition of all
 
    /// types is made equal.
 
    fn apply_equal3_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId,
 
        start_idx: usize
 
    ) -> Result<(bool, bool, bool), ParseError2> {
 
        // Safety: all expression IDs are always distinct, and we do not modify
 
        //  the container
 
        debug_assert_expr_ids_unique_and_known!(self, expr_id, arg1_id, arg2_id);
 
        let expr_type: *mut _ = self.expr_types.get_mut(&expr_id).unwrap();
 
        let arg1_type: *mut _ = self.expr_types.get_mut(&arg1_id).unwrap();
 
        let arg2_type: *mut _ = self.expr_types.get_mut(&arg2_id).unwrap();
 

	
 
        let expr_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(expr_type, start_idx, arg1_type, start_idx)
 
        };
 
        if expr_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_expr_type_error(ctx, expr_id, arg1_id));
 
        }
 

	
 
        let args_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(arg1_type, start_idx, arg2_type, start_idx) };
 
        if args_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
        }
 

	
 
        // If all types are compatible, but the second call caused the arg1_type
 
        // to be expanded, then we must also assign this to expr_type.
 
        let mut progress_expr = expr_res.modified_lhs();
 
        let mut progress_arg1 = expr_res.modified_rhs();
 
        let progress_arg2 = args_res.modified_rhs();
 

	
 
        if args_res.modified_lhs() { 
 
            unsafe {
 
                let end_idx = InferenceType::find_subtree_end_idx(&(*arg2_type).parts, start_idx);
 
                let subtree = &((*arg2_type).parts[start_idx..end_idx]);
 
                (*expr_type).replace_subtree(start_idx, subtree);
 
            }
 
            progress_expr = true;
 
            progress_arg1 = true;
 
        }
 

	
 
        Ok((progress_expr, progress_arg1, progress_arg2))
 
    }
 

	
 
    // TODO: @optimize Since we only deal with a single type this might be done
 
    //  a lot more efficiently, methinks (disregarding the allocations here)
 
    fn apply_equal_n_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId, args: &[ExpressionId],
 
    ) -> Result<Vec<bool>, ParseError2> {
 
        // Early exit
 
        match args.len() {
 
            0 => return Ok(vec!()),         // nothing to progress
 
            1 => return Ok(vec![false]),    // only one type, so nothing to infer
 
            _ => {}
 
        }
 

	
 
        let mut progress = Vec::new();
 
        progress.resize(args.len(), false);
 

	
 
        // Do pairwise inference, keep track of the last entry we made progress
 
        // on. Once done we need to update everything to the most-inferred type.
 
        let mut arg_iter = args.iter();
 
        let mut last_arg_id = *arg_iter.next().unwrap();
 
        let mut last_lhs_progressed = 0;
 
        let mut lhs_arg_idx = 0;
 

	
 
        while let Some(next_arg_id) = arg_iter.next() {
 
            let arg1_type: *mut _ = self.expr_types.get_mut(&last_arg_id).unwrap();
 
            let arg2_type: *mut _ = self.expr_types.get_mut(next_arg_id).unwrap();
 

	
 
            let res = unsafe {
 
                InferenceType::infer_subtrees_for_both_types(arg1_type, 0, arg2_type, 0)
 
@@ -2745,196 +2741,193 @@ impl TypeResolvingVisitor {
 
                            let param = &ctx.heap[param_id];
 
                            let param_parser_type_id = param.parser_type;
 
                            parameter_types.push(self.determine_inference_type_from_parser_type(ctx, param_parser_type_id, false));
 
                        }
 

	
 
                        let return_type = self.determine_inference_type_from_parser_type(ctx, definition.return_type, false);
 
                        (parameter_types, return_type)
 
                    },
 
                    Definition::Struct(_) | Definition::Enum(_) => {
 
                        unreachable!("insert initial polymorph data for struct/enum");
 
                    }
 
                }
 
            }
 
        };
 

	
 
        self.extra_data.insert(call_id.upcast(), ExtraData {
 
            poly_vars,
 
            embedded: embedded_types,
 
            returned: return_type
 
        });
 
    }
 

	
 
    fn insert_initial_struct_polymorph_data(
 
        &mut self, ctx: &mut Ctx, lit_id: LiteralExpressionId,
 
    ) {
 
        use InferenceTypePart as ITP;
 
        let literal = ctx.heap[lit_id].value.as_struct();
 

	
 
        // Handle polymorphic arguments
 
        let mut poly_vars = Vec::with_capacity(literal.poly_args.len());
 
        let mut total_num_poly_parts = 0;
 
        for poly_arg_type_id in literal.poly_args.clone() { // TODO: @performance
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, poly_arg_type_id, true
 
            ); 
 
            total_num_poly_parts += inference_type.parts.len();
 
            poly_vars.push(inference_type);
 
        }
 

	
 
        // Handle parser types on struct definition
 
        let definition = &ctx.heap[literal.definition.unwrap()];
 
        let definition = match definition {
 
            Definition::Struct(definition) => {
 
                debug_assert_eq!(poly_vars.len(), definition.poly_vars.len());
 
                definition
 
            },
 
            _ => unreachable!("definition for struct literal does not point to struct definition")
 
        };
 

	
 
        // Note: programmer is capable of specifying fields in a struct literal
 
        // in a different order than on the definition. We take the programmer-
 
        // specified order to be leading.
 
        let mut embedded_types = Vec::with_capacity(definition.fields.len());
 
        for lit_field in literal.fields.iter() {
 
            let def_field = &definition.fields[lit_field.field_idx];
 
            let inference_type = self.determine_inference_type_from_parser_type(
 
                ctx, def_field.parser_type, false
 
            );
 
            embedded_types.push(inference_type);
 
        }
 

	
 
        // Return type is the struct type itself, with the appropriate 
 
        // polymorphic variables. So:
 
        // - 1 part for definition
 
        // - N_poly_arg marker parts for each polymorphic argument
 
        // - all the parts for the currently known polymorphic arguments 
 
        let parts_reserved = 1 + poly_vars.len() + total_num_poly_parts;
 
        let mut parts = Vec::with_capacity(parts_reserved);
 
        parts.push(ITP::Instance(definition.this.upcast(), poly_vars.len()));
 
        let mut return_type_done = true;
 
        for (poly_var_idx, poly_var) in poly_vars.iter().enumerate() {
 
            if !poly_var.is_done { return_type_done = false; }
 

	
 
            parts.push(ITP::MarkerBody(poly_var_idx));
 
            parts.extend(poly_var.parts.iter().cloned());
 
        }
 

	
 
        debug_assert_eq!(parts.len(), parts_reserved);
 
        let return_type = InferenceType::new(true, return_type_done, parts);
 

	
 
        self.extra_data.insert(lit_id.upcast(), ExtraData{
 
            poly_vars, 
 
            embedded: embedded_types,
 
            returned: return_type,
 
        });
 
    }
 

	
 
    /// Inserts the extra polymorphic data struct. Assumes that the select
 
    /// expression's referenced (definition_id, field_idx) has been resolved.
 
    fn insert_initial_select_polymorph_data(
 
        &mut self, ctx: &Ctx, select_id: SelectExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 

	
 
        // Retrieve relevant data
 
        let expr = &ctx.heap[select_id];
 
        let field = match &expr.field {
 
            Field::Symbolic(field) => field,
 
            _ => unreachable!(),
 
        };
 
        let field = expr.field.as_symbolic();
 

	
 
        let definition_id = field.definition.unwrap();
 
        let definition = ctx.heap[definition_id].as_struct();
 
        let field_idx = field.field_idx;
 

	
 
        // Generate initial polyvar types and struct type
 
        let num_poly_vars = definition.poly_vars.len();
 
        let mut poly_vars = Vec::with_capacity(num_poly_vars);
 
        let struct_parts_reserved = 1 + 2 * num_poly_vars;
 
        let mut struct_parts = Vec::with_capacity(struct_parts_reserved);
 
        struct_parts.push(ITP::Instance(definition_id, num_poly_vars));        
 

	
 
        for poly_idx in 0..num_poly_vars {
 
            poly_vars.push(InferenceType::new(true, false, vec![
 
                ITP::MarkerBody(poly_idx), ITP::Unknown,
 
            ]));
 
            struct_parts.push(ITP::MarkerBody(poly_idx));
 
            struct_parts.push(ITP::Unknown);
 
        }
 
        debug_assert_eq!(struct_parts.len(), struct_parts_reserved);
 

	
 
        // Generate initial field type
 
        let field_type = self.determine_inference_type_from_parser_type(
 
            ctx, definition.fields[field_idx].parser_type, false
 
        );
 

	
 
        self.extra_data.insert(select_id.upcast(), ExtraData{
 
            poly_vars,
 
            embedded: vec![InferenceType::new(true, false, struct_parts)],
 
            returned: field_type
 
        });
 
    }
 

	
 
    /// Determines the initial InferenceType from the provided ParserType. This
 
    /// may be called with two kinds of intentions:
 
    /// 1. To resolve a ParserType within the body of a function, or on
 
    ///     polymorphic arguments to calls/instantiations within that body. This
 
    ///     means that the polymorphic variables are known and can be replaced
 
    ///     with the monomorph we're instantiating.
 
    /// 2. To resolve a ParserType on a called function's definition or on
 
    ///     an instantiated datatype's members. This means that the polymorphic
 
    ///     arguments inside those ParserTypes refer to the polymorphic
 
    ///     variables in the called/instantiated type's definition.
 
    /// In the second case we place InferenceTypePart::Marker instances such
 
    /// that we can perform type inference on the polymorphic variables.
 
    fn determine_inference_type_from_parser_type(
 
        &mut self, ctx: &Ctx, parser_type_id: ParserTypeId,
 
        parser_type_in_body: bool
 
    ) -> InferenceType {
 
        use ParserTypeVariant as PTV;
 
        use InferenceTypePart as ITP;
 

	
 
        let mut to_consider = VecDeque::with_capacity(16);
 
        to_consider.push_back(parser_type_id);
 

	
 
        let mut infer_type = Vec::new();
 
        let mut has_inferred = false;
 
        let mut has_markers = false;
 

	
 
        while !to_consider.is_empty() {
 
            let parser_type_id = to_consider.pop_front().unwrap();
 
            let parser_type = &ctx.heap[parser_type_id];
 
            match &parser_type.variant {
 
                PTV::Message => {
 
                    // TODO: @types Remove the Message -> Byte hack at some point...
 
                    infer_type.push(ITP::Message);
 
                    infer_type.push(ITP::Byte);
 
                },
 
                PTV::Bool => { infer_type.push(ITP::Bool); },
 
                PTV::Byte => { infer_type.push(ITP::Byte); },
 
                PTV::Short => { infer_type.push(ITP::Short); },
 
                PTV::Int => { infer_type.push(ITP::Int); },
 
                PTV::Long => { infer_type.push(ITP::Long); },
 
                PTV::String => { infer_type.push(ITP::String); },
 
                PTV::IntegerLiteral => { unreachable!("integer literal type on variable type"); },
 
                PTV::Inferred => {
 
                    infer_type.push(ITP::Unknown);
 
                    has_inferred = true;
 
                },
 
                PTV::Array(subtype_id) => {
 
                    infer_type.push(ITP::Array);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Input(subtype_id) => {
 
                    infer_type.push(ITP::Input);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Output(subtype_id) => {
 
                    infer_type.push(ITP::Output);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Symbolic(symbolic) => {
 
                    debug_assert!(symbolic.variant.is_some(), "symbolic variant not yet determined");
 
                    match symbolic.variant.as_ref().unwrap() {
 
                        SymbolicParserTypeVariant::PolyArg(_, arg_idx) => {
 
                            let arg_idx = *arg_idx;
 
@@ -3027,248 +3020,266 @@ impl TypeResolvingVisitor {
 

	
 
        return ParseError2::new_error(
 
            &ctx.module.source, expr.position(),
 
            "Incompatible types: cannot apply this expression"
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg1.position(),
 
            &format!(
 
                "Because this expression has type '{}'",
 
                arg1_type.display_name(&ctx.heap)
 
            )
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg2.position(),
 
            &format!(
 
                "But this expression has type '{}'",
 
                arg2_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_template_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
    ) -> ParseError2 {
 
        let expr = &ctx.heap[expr_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 

	
 
        return ParseError2::new_error(
 
            &ctx.module.source, expr.position(),
 
            &format!(
 
                "Incompatible types: got a '{}' but expected a '{}'",
 
                expr_type.display_name(&ctx.heap), 
 
                InferenceType::partial_display_name(&ctx.heap, template)
 
            )
 
        )
 
    }
 

	
 
    /// Constructs a human interpretable error in the case that type inference
 
    /// on a polymorphic variable to a function call or literal construction 
 
    /// failed. This may only be caused by a pair of inference types (which may 
 
    /// come from arguments or the return type) having two different inferred 
 
    /// values for that polymorphic variable.
 
    ///
 
    /// So we find this pair and construct the error using it.
 
    ///
 
    /// We assume that the expression is a function call or a struct literal,
 
    /// and that an actual error has occurred.
 
    fn construct_poly_arg_error(
 
        ctx: &Ctx, poly_data: &ExtraData, expr_id: ExpressionId
 
    ) -> ParseError2 {
 
        // Helper function to check for polymorph mismatch between two inference
 
        // types.
 
        fn has_poly_mismatch<'a>(type_a: &'a InferenceType, type_b: &'a InferenceType) -> Option<(usize, &'a [InferenceTypePart], &'a [InferenceTypePart])> {
 
            if !type_a.has_body_marker || !type_b.has_body_marker {
 
                return None
 
            }
 

	
 
            for (marker_a, section_a) in type_a.body_marker_iter() {
 
                for (marker_b, section_b) in type_b.body_marker_iter() {
 
                    if marker_a != marker_b {
 
                        // Not the same polymorphic variable
 
                        continue;
 
                    }
 

	
 
                    if !InferenceType::check_subtrees(section_a, 0, section_b, 0) {
 
                        // Not compatible
 
                        return Some((marker_a, section_a, section_b))
 
                    }
 
                }
 
            }
 

	
 
            None
 
        }
 

	
 
        // Helpers function to retrieve polyvar name and definition name
 
        fn get_poly_var_and_func_name(ctx: &Ctx, poly_var_idx: usize, expr: &CallExpression) -> (String, String) {
 
            match &expr.method {
 
                Method::Create => unreachable!(),
 
                Method::Fires => (String::from('T'), String::from("fires")),
 
                Method::Get => (String::from('T'), String::from("get")),
 
                Method::Put => (String::from('T'), String::from("put")),
 
                Method::Symbolic(symbolic) => {
 
                    let definition = &ctx.heap[symbolic.definition.unwrap()];
 
                    let poly_var = match definition {
 
                        Definition::Struct(_) | Definition::Enum(_) => unreachable!(),
 
                        Definition::Function(definition) => {
 
                            String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string()
 
                        },
 
                        Definition::Component(definition) => {
 
                            String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string()
 
                        }
 
                    };
 
                    let func_name = String::from_utf8_lossy(&symbolic.identifier.value).to_string();
 
                    (poly_var, func_name)
 
                }
 
            }
 
        }
 

	
 
        fn get_poly_var_and_literal_name(ctx: &Ctx, poly_var_idx: usize, expr: &LiteralExpression) -> (String, String) {
 
            let expr = expr.value.as_struct();
 
            let definition = &ctx.heap[expr.definition.unwrap()];
 
        fn get_poly_var_and_type_name(ctx: &Ctx, poly_var_idx: usize, definition_id: DefinitionId) -> (String, String) {
 
            let definition = &ctx.heap[definition_id];
 
            match definition {
 
                Definition::Enum(_) | Definition::Function(_) | Definition::Component(_) =>
 
                    unreachable!(),
 
                    unreachable!("get_poly_var_and_type_name called on non-struct value"),
 
                Definition::Struct(definition) => (
 
                    String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string(),
 
                    String::from_utf8_lossy(&definition.identifier.value).to_string()
 
                ),
 
            }
 
        }
 

	
 
        // Helper function to construct initial error
 
        fn construct_main_error(ctx: &Ctx, poly_var_idx: usize, expr: &Expression) -> ParseError2 {
 
            match expr {
 
                Expression::Call(expr) => {
 
                    let (poly_var, func_name) = get_poly_var_and_func_name(ctx, poly_var_idx, expr);
 
                    return ParseError2::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' of '{}'",
 
                            poly_var, func_name
 
                        )
 
                    )
 
                },
 
                Expression::Literal(expr) => {
 
                    let (poly_var, struct_name) = get_poly_var_and_literal_name(ctx, poly_var_idx, expr);
 
                    let lit_struct = expr.value.as_struct();
 
                    let (poly_var, struct_name) = get_poly_var_and_type_name(ctx, poly_var_idx, lit_struct.definition.unwrap());
 
                    return ParseError2::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' of instantiation of '{}'",
 
                            poly_var, struct_name
 
                        )
 
                    )
 
                },
 
                Expression::Select(expr) => {
 
                    let field = expr.field.as_symbolic();
 
                    let (poly_var, struct_name) = get_poly_var_and_type_name(ctx, poly_var_idx, field.definition.unwrap());
 
                    return ParseError2::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Conflicting type for polymorphic variable '{}' while accessing field '{}' of '{}'",
 
                            poly_var, &String::from_utf8_lossy(&field.identifier.value), struct_name
 
                        )
 
                    )
 
                }
 
                _ => unreachable!("called construct_poly_arg_error without a call/literal expression")
 
            }
 
        }
 

	
 
        // Actual checking
 
        let expr = &ctx.heap[expr_id];
 
        let (expr_args, expr_return_name) = match expr {
 
            Expression::Call(expr) => 
 
                (
 
                    expr.arguments.clone(),
 
                    "return type"
 
                ),
 
            Expression::Literal(expr) => 
 
                (
 
                    expr.value.as_struct().fields
 
                        .iter()
 
                        .map(|f| f.value)
 
                        .collect(),
 
                    "literal"
 
                ),
 
            Expression::Select(expr) =>
 
                // Select expression uses the polymorphic variables of the 
 
                // struct it is accessing, so get the subject expression.
 
                (
 
                    vec![expr.subject],
 
                    "selected field"
 
                ),
 
            _ => unreachable!(),
 
        };
 

	
 
        // - check return type with itself
 
        if let Some((poly_idx, section_a, section_b)) = has_poly_mismatch(
 
            &poly_data.returned, &poly_data.returned
 
        ) {
 
            return construct_main_error(ctx, poly_idx, expr)
 
                .with_postfixed_info(
 
                    &ctx.module.source, expr.position(),
 
                    &format!(
 
                        "The {} inferred the conflicting types '{}' and '{}'",
 
                        expr_return_name,
 
                        InferenceType::partial_display_name(&ctx.heap, section_a),
 
                        InferenceType::partial_display_name(&ctx.heap, section_b)
 
                    )
 
                )
 
        }
 

	
 
        // - check arguments with each other argument and with return type
 
        for (arg_a_idx, arg_a) in poly_data.embedded.iter().enumerate() {
 
            for (arg_b_idx, arg_b) in poly_data.embedded.iter().enumerate() {
 
                if arg_b_idx > arg_a_idx {
 
                    break;
 
                }
 

	
 
                if let Some((poly_idx, section_a, section_b)) = has_poly_mismatch(&arg_a, &arg_b) {
 
                    let error = construct_main_error(ctx, poly_idx, expr);
 
                    if arg_a_idx == arg_b_idx {
 
                        // Same argument
 
                        let arg = &ctx.heap[expr_args[arg_a_idx]];
 
                        return error.with_postfixed_info(
 
                            &ctx.module.source, arg.position(),
 
                            &format!(
 
                                "This argument inferred the conflicting types '{}' and '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_a),
 
                                InferenceType::partial_display_name(&ctx.heap, section_b)
 
                            )
 
                        )
 
                    } else {
 
                        let arg_a = &ctx.heap[expr_args[arg_a_idx]];
 
                        let arg_b = &ctx.heap[expr_args[arg_b_idx]];
 
                        return error.with_postfixed_info(
 
                            &ctx.module.source, arg_a.position(),
 
                            &format!(
 
                                "This argument inferred it to '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_a)
 
                            )
 
                        ).with_postfixed_info(
 
                            &ctx.module.source, arg_b.position(),
 
                            &format!(
 
                                "While this argument inferred it to '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_b)
 
                            )
 
                        )
 
                    }
 
                }
 
            }
 

	
 
            // Check with return type
 
            if let Some((poly_idx, section_arg, section_ret)) = has_poly_mismatch(arg_a, &poly_data.returned) {
 
                let arg = &ctx.heap[expr_args[arg_a_idx]];
 
                return construct_main_error(ctx, poly_idx, expr)
 
                    .with_postfixed_info(
 
                        &ctx.module.source, arg.position(),
 
                        &format!(
 
                            "This argument inferred it to '{}'",
 
                            InferenceType::partial_display_name(&ctx.heap, section_arg)
 
                        )
 
                    )
 
                    .with_postfixed_info(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "While the {} inferred it to '{}'",
 
                            expr_return_name,
 
                            InferenceType::partial_display_name(&ctx.heap, section_ret)
 
                        )
 
                    )
 
            }
 
        }
 

	
 
        unreachable!("construct_poly_arg_error without actual error found?")
 
    }
 
}
 

	
 
#[cfg(test)]
 
mod tests {
 
    use super::*;
 
    use crate::protocol::arena::Id;
 
    use InferenceTypePart as ITP;
 
    use InferenceType as IT;
 

	
 
    #[test]
 
    fn test_single_part_inference() {
 
        // lhs argument inferred from rhs
 
        let pairs = [
src/protocol/tests/mod.rs
Show inline comments
 
mod utils;
 
mod lexer;
 
mod parser_validation;
 
mod parser_inference;
 
mod parser_monomorphs;
 
mod parser_imports;
 

	
 
pub(crate) use utils::{Tester};
 
\ No newline at end of file
src/protocol/tests/parser_imports.rs
Show inline comments
 
new file 100644
 
/// parser_imports.rs
 
///
 
/// Simple import tests
 

	
 
use super::*;
 

	
 
#[test]
 
fn test_module_import() {
 
    Tester::new("single domain name")
 
        .with_source("
 
        #module external
 
        struct Foo { int field }
 
        ")
 
        .with_source("
 
        import external;
 
        int caller() {
 
            auto a = external::Foo{ field: 0 };
 
            return a.field;
 
        }
 
        ")
 
        .compile()
 
        .expect_ok();
 

	
 
    Tester::new("multi domain name")
 
        .with_source("
 
        #module external.domain
 
        struct Foo { int field }
 
        ")
 
        .with_source("
 
        import external.domain;
 
        int caller() {
 
            auto a = domain::Foo{ field: 0 };
 
            return a.field;
 
        }
 
        ")
 
        .compile()
 
        .expect_ok();
 

	
 
    Tester::new("aliased domain name")
 
        .with_source("
 
        #module external
 
        struct Foo { int field }
 
        ")
 
        .with_source("
 
        import external as aliased;
 
        int caller() {
 
            auto a = aliased::Foo{ field: 0 };
 
            return a.field;
 
        }
 
        ")
 
        .compile()
 
        .expect_ok();
 
}
 

	
 
#[test]
 
fn test_single_symbol_import() {
 
    Tester::new("specific symbol")
 
        .with_source("
 
        #module external
 
        struct Foo { int field }
 
        ")
 
        .with_source("
 
        import external::Foo;
 
        int caller() {
 
            auto a = Foo{ field: 1 };
 
            auto b = Foo{ field: 2 };
 
            return a.field + b.field;
 
        }")
 
        .compile()
 
        .expect_ok();
 

	
 
    Tester::new("specific aliased symbol")
 
        .with_source("
 
        #module external
 
        struct Foo { int field }
 
        ")
 
        .with_source("
 
        import external::Foo as Bar;
 
        int caller() {
 
            return Bar{ field: 0 }.field;
 
        }
 
        ")
 
        .compile()
 
        .expect_ok();
 

	
 
    // TODO: Re-enable once std lib is properly implemented
 
    // Tester::new("import all")
 
    //     .with_source("
 
    //     #module external
 
    //     struct Foo { int field }
 
    //     ")
 
    //     .with_source("
 
    //     import external::*;
 
    //     int caller() { return Foo{field:0}.field; }
 
    //     ")
 
    //     .compile()
 
    //     .expect_ok();
 
}
 

	
 
#[test]
 
fn test_multi_symbol_import() {
 
    Tester::new("specific symbols")
 
        .with_source("
 
        #module external
 
        struct Foo { byte f }
 
        struct Bar { byte b }
 
        ")
 
        .with_source("
 
        import external::{Foo, Bar};
 
        byte caller() {
 
            return Foo{f:0}.f + Bar{b:1}.b;
 
        }
 
        ")
 
        .compile()
 
        .expect_ok();
 

	
 
    Tester::new("aliased symbols")
 
        .with_source("
 
        #module external
 
        struct Foo { byte in_foo }
 
        struct Bar { byte in_bar }
 
        ")
 
        .with_source("
 
        import external::{Foo as Bar, Bar as Foo};
 
        byte caller() {
 
            return Foo{in_bar:0}.in_bar + Bar{in_foo:0}.in_foo;    
 
        }")
 
        .compile()
 
        .expect_ok();
 

	
 
    // TODO: Re-enable once std lib is properly implemented
 
    // Tester::new("import all")
 
    //     .with_source("
 
    //     #module external
 
    //     struct Foo { byte f };
 
    //     struct Bar { byte b };
 
    //     ")
 
    //     .with_source("
 
    //     import external::*;
 
    //     byte caller() {
 
    //         auto f = Foo{f:0};
 
    //         auto b = Bar{b:0};
 
    //         return f.f + b.b;
 
    //     }
 
    //     ")
 
    //     .compile()
 
    //     .expect_ok();
 
}
 
\ No newline at end of file
src/protocol/tests/parser_inference.rs
Show inline comments
 
/// parser_inference.rs
 
///
 
/// Simple tests for the type inferences
 

	
 
use super::*;
 

	
 
#[test]
 
fn test_integer_inference() {
 
    Tester::new_single_source_expect_ok(
 
        "by arguments",
 
        "
 
        int call(byte b, short s, int i, long l) {
 
            auto b2 = b;
 
            auto s2 = s;
 
            auto i2 = i;
 
            auto l2 = l;
 
            return i2;
 
        }
 
        "
 
    ).for_function("call", |f| { f
 
        .for_variable("b2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("byte");
 
        })
 
        .for_variable("s2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("short");
 
        })
 
        .for_variable("i2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("int");
 
        })
 
        .for_variable("l2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("long");
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "by assignment",
 
        "
 
        int call() {
 
            byte b1 = 0; short s1 = 0; int i1 = 0; long l1 = 0;
 
            auto b2 = b1;
 
            auto s2 = s1;
 
            auto i2 = i1;
 
            auto l2 = l1;
 
            return 0;
 
        }"
 
    ).for_function("call", |f| { f
 
        .for_variable("b2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("byte");
 
        })
 
        .for_variable("s2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("short");
 
        })
 
        .for_variable("i2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("int");
 
        })
 
        .for_variable("l2", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("long");
 
        });
 
    });
 
}
 

	
 
#[test]
 
fn test_binary_expr_inference() {
 
    Tester::new_single_source_expect_ok(
 
        "compatible types",
 
        "int call() {
 
            byte b0 = 0;
 
            byte b1 = 1;
 
            short s0 = 0;
 
            short s1 = 1;
 
            int i0 = 0;
 
            int i1 = 1;
 
            long l0 = 0;
 
            long l1 = 1;
 
            auto b = b0 + b1;
 
            auto s = s0 + s1;
 
            auto i = i0 + i1;
 
            auto l = l0 + l1;
 
            return i;
 
        }"
 
    ).for_function("call", |f| { f
 
        .for_expression_by_source(
 
            "b0 + b1", "+", 
 
            |e| { e.assert_concrete_type("byte"); }
 
        )
 
        .for_expression_by_source(
 
            "s0 + s1", "+", 
 
            |e| { e.assert_concrete_type("short"); }
 
        )
 
        .for_expression_by_source(
 
            "i0 + i1", "+", 
 
            |e| { e.assert_concrete_type("int"); }
 
        )
 
        .for_expression_by_source(
 
            "l0 + l1", "+", 
 
            |e| { e.assert_concrete_type("long"); }
 
        );
 
    });
 

	
 
    Tester::new_single_source_expect_err(
 
        "incompatible types", 
 
        "int call() {
 
            byte b = 0;
 
            long l = 1;
 
            auto r = b + l;
 
            return 0;
 
        }"
 
    ).error(|e| { e
 
        .assert_ctx_has(0, "b + l")
 
        .assert_msg_has(0, "cannot apply")
 
        .assert_occurs_at(0, "+")
 
        .assert_msg_has(1, "has type 'byte'")
 
        .assert_msg_has(2, "has type 'long'");
 
    });
 
}
 

	
 

	
 

	
 
#[test]
 
fn test_struct_inference() {
 
    Tester::new_single_source_expect_ok(
 
        "by function calls",
 
        "
 
        struct Pair<T1, T2>{ T1 first, T2 second }
 
        Pair<T1, T2> construct<T1, T2>(T1 first, T2 second) { 
 
            return Pair{ first: first, second: second };
 
        }
 
        int fix_t1<T2>(Pair<byte, T2> arg) { return 0; }
 
        int fix_t2<T1>(Pair<T1, int> arg) { return 0; }
 
        int test() {
 
            auto first = 0;
 
            auto second = 1;
 
            auto pair = construct(first, second);
 
            fix_t1(pair);
 
            fix_t2(pair);
 
            return 0;
 
        }
 
        "
 
    ).for_function("test", |f| { f
 
        .for_variable("first", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("byte");
 
        })
 
        .for_variable("second", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("int");
 
        })
 
        .for_variable("pair", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Pair<byte,int>");
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "by field access",
 
        "
 
        struct Pair<T1, T2>{ T1 first, T2 second }
 
        Pair<T1, T2> construct<T1, T2>(T1 first, T2 second) {
 
            return Pair{ first: first, second: second };
 
        }
 
        int test() {
 
            auto first = 0;
 
            auto second = 1;
 
            auto pair = construct(first, second);
 
            byte assign_first = 0;
 
            long assign_second = 1;
 
            pair.first = assign_first;
 
            pair.second = assign_second;
 
            return 0;
 
        }
 
        "
 
    ).for_function("test", |f| { f
 
        .for_variable("first", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("byte");
 
        })
 
        .for_variable("second", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("long");
 
        })
 
        .for_variable("pair", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Pair<byte,long>");
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "by nested field access",
 
        "
 
        struct Node<T1, T2>{ T1 l, T2 r }
 
        Node<T1, T2> construct<T1, T2>(T1 l, T2 r) { return Node{ l: l, r: r }; }
 
        int fix_poly<T>(Node<T, T> a) { return 0; }
 
        int test() {
 
            byte assigned = 0;
 
            auto thing = construct(assigned, construct(0, 1));
 
            fix_poly(thing.r);
 
            thing.r.r = assigned;
 
            return 0;
 
        }
 
        ",
 
    ).for_function("test", |f| { f
 
        .for_variable("thing", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Node<byte,Node<byte,byte>>");
 
        });
 
    });
 
}
 

	
 
#[test]
 
fn test_failed_polymorph_inference() {
 
    Tester::new_single_source_expect_err(
 
        "function call inference mismatch",
 
        "
 
        int poly<T>(T a, T b) { return 0; }
 
        int call() {
 
            byte first_arg = 5;
 
            long second_arg = 2;
 
            return poly(first_arg, second_arg);
 
        }
 
        "
 
    ).error(|e| { e
 
        .assert_num(3)
 
        .assert_ctx_has(0, "poly(first_arg, second_arg)")
 
        .assert_occurs_at(0, "poly")
 
        .assert_msg_has(0, "Conflicting type for polymorphic variable 'T'")
 
        .assert_occurs_at(1, "second_arg")
 
        .assert_msg_has(1, "inferred it to 'long'")
 
        .assert_occurs_at(2, "first_arg")
 
        .assert_msg_has(2, "inferred it to 'byte'");
 
    });
 

	
 
    Tester::new_single_source_expect_err(
 
        "struct literal inference mismatch",
 
        "
 
        struct Pair<T>{ T first, T second }
 
        int call() {
 
            byte first_arg = 5;
 
            long second_arg = 2;
 
            auto pair = Pair{ first: first_arg, second: second_arg };
 
            return 3;
 
        }
 
        "
 
    ).error(|e| { e
 
        .assert_num(3)
 
        .assert_ctx_has(0, "Pair{ first: first_arg, second: second_arg }")
 
        .assert_occurs_at(0, "Pair{")
 
        .assert_msg_has(0, "Conflicting type for polymorphic variable 'T'")
 
        .assert_occurs_at(1, "second_arg")
 
        .assert_msg_has(1, "inferred it to 'long'")
 
        .assert_occurs_at(2, "first_arg")
 
        .assert_msg_has(2, "inferred it to 'byte'");
 
    });
 

	
 
    Tester::new_single_source_expect_err(
 
        "field access inference mismatch",
 
        "
 
        struct Holder<Shazam>{ Shazam a }
 
        int call() {
 
            byte to_hold = 0;
 
            auto holder = Holder{ a: to_hold };
 
            return holder.a;
 
        }
 
        "
 
    ).error(|e| { e
 
        .assert_num(3)
 
        .assert_ctx_has(0, "holder.a")
 
        .assert_occurs_at(0, ".")
 
        .assert_msg_has(0, "Conflicting type for polymorphic variable 'Shazam'")
 
        .assert_msg_has(1, "inferred it to 'byte'")
 
        .assert_msg_has(2, "inferred it to 'int'");
 
    });
 

	
 
    // TODO: Needs better error messages anyway, but this failed before
 
    Tester::new_single_source_expect_err(
 
        "by nested field access",
 
        "
 
        struct Node<T1, T2>{ T1 l, T2 r }
 
        Node<T1, T2> construct<T1, T2>(T1 l, T2 r) { return Node{ l: l, r: r }; }
 
        int fix_poly<T>(Node<T, T> a) { return 0; }
 
        int test() {
 
            byte assigned = 0;
 
            long another = 1;
 
            auto thing = construct(assigned, construct(another, 1));
 
            fix_poly(thing.r);
 
            thing.r.r = assigned;
 
            return 0;
 
        }
 
        ",
 
    );
 
}
 
\ No newline at end of file
src/protocol/tests/parser_monomorphs.rs
Show inline comments
 
/// parser_monomorphs.rs
 
///
 
/// Simple tests to make sure that all of the appropriate monomorphs are 
 
/// instantiated
 

	
 
use super::*;
 

	
 
#[test]
 
fn test_struct_monomorphs() {
 
    Tester::new_single_source_expect_ok(
 
        "no polymorph",
 
        "struct Integer{ int field }"
 
    ).for_struct("Integer", |s| { s
 
        .assert_num_monomorphs(0);
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "single polymorph",
 
        "
 
        struct Number<T>{ T number }
 
        int instantiator() {
 
            auto a = Number<byte>{ number: 0 };
 
            auto b = Number<byte>{ number: 1 };
 
            auto c = Number<int>{ number: 2 };
 
            auto d = Number<long>{ number: 3 };
 
            auto e = Number<Number<short>>{ number: Number{ number: 4 }};
 
            return 0;
 
        }
 
        "
 
    ).for_struct("Number", |s| { s
 
        .assert_has_monomorph("byte")
 
        .assert_has_monomorph("short")
 
        .assert_has_monomorph("int")
 
        .assert_has_monomorph("long")
 
        .assert_has_monomorph("Number<short>")
 
        .assert_num_monomorphs(5);
 
    }).for_function("instantiator", |f| { f
 
        .for_variable("a", |v| {v.assert_concrete_type("Number<byte>");} )
 
        .for_variable("e", |v| {v.assert_concrete_type("Number<Number<short>>");} );
 
    });
 
}
 
\ No newline at end of file
src/protocol/tests/utils.rs
Show inline comments
 
@@ -305,460 +305,563 @@ pub(crate) struct StructFieldTester<'a> {
 
impl<'a> StructFieldTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a StructFieldDefinition) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn assert_parser_type(self, expected: &str) -> Self {
 
        let mut serialized_type = String::new();
 
        serialize_parser_type(&mut serialized_type, &self.ctx.heap, self.def.parser_type);
 
        assert_eq!(
 
            expected, &serialized_type,
 
            "[{}] Expected type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized_type, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut serialized_type = String::new();
 
        serialize_parser_type(&mut serialized_type, &self.ctx.heap, self.def.parser_type);
 
        format!(
 
            "StructField{{ name: {}, parser_type: {} }}",
 
            String::from_utf8_lossy(&self.def.field.value), serialized_type
 
        )
 
    }
 
}
 

	
 
pub(crate) struct FunctionTester<'a> {
 
    ctx: TestCtx<'a>,
 
    def: &'a Function,
 
}
 

	
 
impl<'a> FunctionTester<'a> {
 
    fn new(ctx: TestCtx<'a>, def: &'a Function) -> Self {
 
        Self{ ctx, def }
 
    }
 

	
 
    pub(crate) fn for_variable<F: Fn(VariableTester)>(self, name: &str, f: F) -> Self {
 
        // Find the memory statement in order to find the local
 
        let mem_stmt_id = seek_stmt(
 
            self.ctx.heap, self.def.body,
 
            &|stmt| {
 
                if let Statement::Local(local) = stmt {
 
                    if let LocalStatement::Memory(memory) = local {
 
                        let local = &self.ctx.heap[memory.variable];
 
                        if local.identifier.value == name.as_bytes() {
 
                            return true;
 
                        }
 
                    }
 
                }
 

	
 
                false
 
            }
 
        );
 

	
 
        assert!(
 
            mem_stmt_id.is_some(), "[{}] Failed to find variable '{}' in {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 

	
 
        let mem_stmt_id = mem_stmt_id.unwrap();
 
        let local_id = self.ctx.heap[mem_stmt_id].as_memory().variable;
 
        let local = &self.ctx.heap[local_id];
 

	
 
        // Find the assignment expression that follows it
 
        let assignment_id = seek_expr_in_stmt(
 
            self.ctx.heap, self.def.body,
 
            &|expr| {
 
                if let Expression::Assignment(assign_expr) = expr {
 
                    if let Expression::Variable(variable_expr) = &self.ctx.heap[assign_expr.left] {
 
                        if variable_expr.position.offset == local.identifier.position.offset {
 
                            return true;
 
                        }
 
                    }
 
                }
 

	
 
                false
 
            }
 
        );
 

	
 
        assert!(
 
            assignment_id.is_some(), "[{}] Failed to find assignment to variable '{}' in {}",
 
            self.ctx.test_name, name, self.assert_postfix()
 
        );
 

	
 
        let assignment = &self.ctx.heap[assignment_id.unwrap()];
 

	
 
        // Construct tester and pass to tester function
 
        let tester = VariableTester::new(
 
            self.ctx, self.def.this.upcast(), local, 
 
            assignment.as_assignment()
 
        );
 
        f(tester);
 

	
 
        self
 
    }
 

	
 
    /// Finds a specific expression within a function. There are two matchers:
 
    /// one outer matcher (to find a rough indication of the expression) and an
 
    /// inner matcher to find the exact expression. 
 
    ///
 
    /// The reason being that, for example, a function's body might be littered
 
    /// with addition symbols, so we first match on "some_var + some_other_var",
 
    /// and then match exactly on "+".
 
    pub(crate) fn for_expression_by_source<F: Fn(ExpressionTester)>(self, outer_match: &str, inner_match: &str, f: F) -> Self {
 
        // Seek the expression in the source code
 
        assert!(outer_match.contains(inner_match), "improper testing code");
 

	
 
        let module = seek_def_in_modules(
 
            &self.ctx.heap, &self.ctx.modules, self.def.this.upcast()
 
        ).unwrap();
 

	
 
        // Find the first occurrence of the expression after the definition of
 
        // the function, we'll check that it is included in the body later.
 
        let mut outer_match_idx = self.def.position.offset;
 
        while outer_match_idx < module.source.input.len() {
 
            if module.source.input[outer_match_idx..].starts_with(outer_match.as_bytes()) {
 
                break;
 
            }
 
            outer_match_idx += 1
 
        }
 

	
 
        assert!(
 
            outer_match_idx < module.source.input.len(),
 
            "[{}] Failed to find '{}' within the source that contains {}",
 
            self.ctx.test_name, outer_match, self.assert_postfix()
 
        );
 
        let inner_match_idx = outer_match_idx + outer_match.find(inner_match).unwrap();
 

	
 
        // Use the inner match index to find the expression
 
        let expr_id = seek_expr_in_stmt(
 
            &self.ctx.heap, self.def.body,
 
            &|expr| expr.position().offset == inner_match_idx
 
        );
 
        assert!(
 
            expr_id.is_some(),
 
            "[{}] Failed to find '{}' within the source that contains {} \
 
            (note: expression was found, but not within the specified function",
 
            self.ctx.test_name, outer_match, self.assert_postfix()
 
        );
 
        let expr_id = expr_id.unwrap();
 

	
 
        // We have the expression, call the testing function
 
        let tester = ExpressionTester::new(
 
            self.ctx, self.def.this.upcast(), &self.ctx.heap[expr_id]
 
        );
 
        f(tester);
 

	
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!(
 
            "Function{{ name: {} }}",
 
            &String::from_utf8_lossy(&self.def.identifier.value)
 
        )
 
    }
 
}
 

	
 

	
 
pub(crate) struct VariableTester<'a> {
 
    ctx: TestCtx<'a>,
 
    definition_id: DefinitionId,
 
    local: &'a Local,
 
    assignment: &'a AssignmentExpression,
 
}
 

	
 
impl<'a> VariableTester<'a> {
 
    fn new(
 
        ctx: TestCtx<'a>, definition_id: DefinitionId, local: &'a Local, assignment: &'a AssignmentExpression
 
    ) -> Self {
 
        Self{ ctx, definition_id, local, assignment }
 
    }
 

	
 
    pub(crate) fn assert_parser_type(self, expected: &str) -> Self {
 
        let mut serialized = String::new();
 
        serialize_parser_type(&mut serialized, self.ctx.heap, self.local.parser_type);
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected parser type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn assert_concrete_type(self, expected: &str) -> Self {
 
        let mut serialized = String::new();
 
        serialize_concrete_type(
 
            &mut serialized, self.ctx.heap, self.definition_id, 
 
            &self.assignment.concrete_type
 
        );
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected concrete type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!(
 
            "Variable{{ name: {} }}",
 
            &String::from_utf8_lossy(&self.local.identifier.value)
 
        )
 
    }
 
}
 

	
 
pub(crate) struct ExpressionTester<'a> {
 
    ctx: TestCtx<'a>,
 
    definition_id: DefinitionId, // of the enclosing function/component
 
    expr: &'a Expression
 
}
 

	
 
impl<'a> ExpressionTester<'a> {
 
    fn new(
 
        ctx: TestCtx<'a>, definition_id: DefinitionId, expr: &'a Expression
 
    ) -> Self {
 
        Self{ ctx, definition_id, expr }
 
    }
 

	
 
    pub(crate) fn assert_concrete_type(self, expected: &str) -> Self {
 
        let mut serialized = String::new();
 
        serialize_concrete_type(
 
            &mut serialized, self.ctx.heap, self.definition_id,
 
            self.expr.get_type()
 
        );
 

	
 
        assert_eq!(
 
            expected, &serialized,
 
            "[{}] Expected concrete type '{}', but got '{}' for {}",
 
            self.ctx.test_name, expected, &serialized, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        format!(
 
            "Expression{{ debug: {:?} }}",
 
            self.expr
 
        )
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Interface for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct AstErrTester {
 
    test_name: String,
 
    error: ParseError2,
 
}
 

	
 
impl AstErrTester {
 
    fn new(test_name: String, error: ParseError2) -> Self {
 
        Self{ test_name, error }
 
    }
 

	
 
    pub(crate) fn error<F: Fn(ErrorTester)>(&self, f: F) {
 
        // Maybe multiple errors will be supported in the future
 
        let tester = ErrorTester{ test_name: &self.test_name, error: &self.error };
 
        f(tester)
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Utilities for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct ErrorTester<'a> {
 
    test_name: &'a str,
 
    error: &'a ParseError2,
 
}
 

	
 
impl<'a> ErrorTester<'a> {
 
    pub(crate) fn assert_num(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.error.statements.len(),
 
            "[{}] expected error to consist of '{}' parts, but encountered '{}' for {}",
 
            self.test_name, num, self.error.statements.len(), self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    pub(crate) fn assert_ctx_has(self, idx: usize, msg: &str) -> Self {
 
        assert!(
 
            self.error.statements[idx].context.contains(msg),
 
            "[{}] expected error statement {}'s context to contain '{}' for {}",
 
            self.test_name, idx, msg, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    pub(crate) fn assert_msg_has(self, idx: usize, msg: &str) -> Self {
 
        assert!(
 
            self.error.statements[idx].message.contains(msg),
 
            "[{}] expected error statement {}'s message to contain '{}' for {}",
 
            self.test_name, idx, msg, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    /// Seeks the index of the pattern in the context message, then checks if
 
    /// the input position corresponds to that index.
 
    pub (crate) fn assert_occurs_at(self, idx: usize, pattern: &str) -> Self {
 
        let pos = self.error.statements[idx].context.find(pattern);
 
        assert!(
 
            pos.is_some(),
 
            "[{}] incorrect occurs_at: '{}' could not be found in the context for {}",
 
            self.test_name, pattern, self.assert_postfix()
 
        );
 
        let pos = pos.unwrap();
 
        let col = self.error.statements[idx].position.col();
 
        assert_eq!(
 
            pos + 1, col,
 
            "[{}] Expected error to occur at column {}, but found it at {} for {}",
 
            self.test_name, pos + 1, col, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("error: [");
 
        for (idx, stmt) in self.error.statements.iter().enumerate() {
 
            if idx != 0 {
 
                v.push_str(", ");
 
            }
 

	
 
            v.push_str(&format!("{{ context: {}, message: {} }}", &stmt.context, stmt.message));
 
        }
 
        v.push(']');
 
        v
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Generic utilities
 
//------------------------------------------------------------------------------
 

	
 
fn serialize_parser_type(buffer: &mut String, heap: &Heap, id: ParserTypeId) {
 
    use ParserTypeVariant as PTV;
 

	
 
    let p = &heap[id];
 
    match &p.variant {
 
        PTV::Message => buffer.push_str("msg"),
 
        PTV::Bool => buffer.push_str("bool"),
 
        PTV::Byte => buffer.push_str("byte"),
 
        PTV::Short => buffer.push_str("short"),
 
        PTV::Int => buffer.push_str("int"),
 
        PTV::Long => buffer.push_str("long"),
 
        PTV::String => buffer.push_str("string"),
 
        PTV::IntegerLiteral => buffer.push_str("intlit"),
 
        PTV::Inferred => buffer.push_str("auto"),
 
        PTV::Array(sub_id) => {
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            buffer.push_str("[]");
 
        },
 
        PTV::Input(sub_id) => {
 
            buffer.push_str("in<");
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            buffer.push('>');
 
        },
 
        PTV::Output(sub_id) => {
 
            buffer.push_str("out<");
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            buffer.push('>');
 
        },
 
        PTV::Symbolic(symbolic) => {
 
            buffer.push_str(&String::from_utf8_lossy(&symbolic.identifier.value));
 
            if symbolic.poly_args.len() > 0 {
 
                buffer.push('<');
 
                for (poly_idx, poly_arg) in symbolic.poly_args.iter().enumerate() {
 
                    if poly_idx != 0 { buffer.push(','); }
 
                    serialize_parser_type(buffer, heap, *poly_arg);
 
                }
 
                buffer.push('>');
 
            }
 
        }
 
    }
 
}
 

	
 
fn serialize_concrete_type(buffer: &mut String, heap: &Heap, def: DefinitionId, concrete: &ConcreteType) {
 
    // Retrieve polymorphic variables, if present (since we're dealing with a 
 
    // concrete type we only expect procedure types)
 
    let poly_vars = match &heap[def] {
 
        Definition::Function(func) => &func.poly_vars,
 
        Definition::Component(comp) => &comp.poly_vars,
 
        _ => unreachable!("Error in testing utility: did not expect non-procedure type for concrete type serialization"),
 
        Definition::Function(definition) => &definition.poly_vars,
 
        Definition::Component(definition) => &definition.poly_vars,
 
        Definition::Struct(definition) => &definition.poly_vars,
 
        _ => unreachable!("Error in testing utility: unexpected type for concrete type serialization"),
 
    };
 

	
 
    fn serialize_recursive(
 
        buffer: &mut String, heap: &Heap, poly_vars: &Vec<Identifier>, concrete: &ConcreteType, mut idx: usize
 
    ) -> usize {
 
        use ConcreteTypePart as CTP;
 

	
 
        let part = &concrete.parts[idx];
 
        match part {
 
            CTP::Marker(poly_idx) => {
 
                buffer.push_str(&String::from_utf8_lossy(&poly_vars[*poly_idx].value));
 
            },
 
            CTP::Void => buffer.push_str("void"),
 
            CTP::Message => buffer.push_str("msg"),
 
            CTP::Bool => buffer.push_str("bool"),
 
            CTP::Byte => buffer.push_str("byte"),
 
            CTP::Short => buffer.push_str("short"),
 
            CTP::Int => buffer.push_str("int"),
 
            CTP::Long => buffer.push_str("long"),
 
            CTP::String => buffer.push_str("string"),
 
            CTP::Array => {
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push_str("[]");
 
                idx += 1;
 
            },
 
            CTP::Slice => {
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push_str("[..]");
 
                idx += 1;
 
            },
 
            CTP::Input => {
 
                buffer.push_str("in<");
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push('>');
 
                idx += 1;
 
            },
 
            CTP::Output => {
 
                buffer.push_str("out<");
 
                idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                buffer.push('>');
 
                idx += 1
 
            },
 
            CTP::Instance(definition_id, num_sub) => {
 
                let definition_name = heap[*definition_id].identifier();
 
                buffer.push_str(&String::from_utf8_lossy(&definition_name.value));
 
                buffer.push('<');
 
                for sub_idx in 0..*num_sub {
 
                    if sub_idx != 0 { buffer.push(','); }
 
                    idx = serialize_recursive(buffer, heap, poly_vars, concrete, idx + 1);
 
                }
 
                buffer.push('>');
 
                idx += 1;
 
            }
 
        }
 

	
 
        idx
 
    }
 

	
 
    serialize_recursive(buffer, heap, poly_vars, concrete, 0);
 
}
 

	
 
fn seek_def_in_modules<'a>(heap: &Heap, modules: &'a [LexedModule], def_id: DefinitionId) -> Option<&'a LexedModule> {
 
    for module in modules {
 
        let root = &heap.protocol_descriptions[module.root_id];
 
        for definition in &root.definitions {
 
            if *definition == def_id {
 
                return Some(module)
 
            }
 
        }
 
    }
 

	
 
    None
 
}
 

	
 
fn seek_stmt<F: Fn(&Statement) -> bool>(heap: &Heap, start: StatementId, f: &F) -> Option<StatementId> {
 
    let stmt = &heap[start];
 
    if f(stmt) { return Some(start); }
 

	
 
    // This statement wasn't it, try to recurse
 
    let matched = match stmt {
 
        Statement::Block(block) => {
 
            for sub_id in &block.statements {
 
                if let Some(id) = seek_stmt(heap, *sub_id, f) {
 
                    return Some(id);
 
                }
 
            }
 

	
 
            None
 
        },
 
        Statement::Labeled(stmt) => seek_stmt(heap, stmt.body, f),
 
        Statement::If(stmt) => {
 
            if let Some(id) = seek_stmt(heap,stmt.true_body, f) {
 
                return Some(id);
 
            } else if let Some(id) = seek_stmt(heap, stmt.false_body, f) {
 
                return Some(id);
 
            }
 
            None
 
        },
 
        Statement::While(stmt) => seek_stmt(heap, stmt.body, f),
 
        Statement::Synchronous(stmt) => seek_stmt(heap, stmt.body, f),
 
        _ => None
 
    };
 

	
 
    matched
 
}
 

	
 
fn seek_expr_in_expr<F: Fn(&Expression) -> bool>(heap: &Heap, start: ExpressionId, f: &F) -> Option<ExpressionId> {
 
    let expr = &heap[start];
 
    if f(expr) { return Some(start); }
 

	
 
    match expr {
 
        Expression::Assignment(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.left, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.right, f))
 
        },
 
        Expression::Conditional(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.test, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.true_expression, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.false_expression, f))
 
        },
 
        Expression::Binary(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.left, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.right, f))
 
        },
 
        Expression::Unary(expr) => {
 
            seek_expr_in_expr(heap, expr.expression, f)
 
        },
 
        Expression::Indexing(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.subject, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.index, f))
 
        },
 
        Expression::Slicing(expr) => {
 
            None
 
            .or_else(|| seek_expr_in_expr(heap, expr.subject, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.from_index, f))
 
            .or_else(|| seek_expr_in_expr(heap, expr.to_index, f))
 
        },
 
        Expression::Select(expr) => {
 
            seek_expr_in_expr(heap, expr.subject, f)
 
        },
 
        Expression::Array(expr) => {
 
            for element in &expr.elements {
 
                if let Some(id) = seek_expr_in_expr(heap, *element, f) {
 
                    return Some(id)
 
                }
 
            }
 
            None
 
        },
 
        Expression::Literal(expr) => {
 
            if let Literal::Struct(lit) = &expr.value {
 
                for field in &lit.fields {
 
                    if let Some(id) = seek_expr_in_expr(heap, field.value, f) {
 
                        return Some(id)
 
                    }
 
                }
 
            }
 
            None
 
        },
 
        Expression::Call(expr) => {
 
            for arg in &expr.arguments {
 
                if let Some(id) = seek_expr_in_expr(heap, *arg, f) {
 
                    return Some(id)
 
                }
 
            }
 
            None
 
        },
0 comments (0 inline, 0 general)