Changeset - d8148185e205
[Not reviewed]
0 9 0
MH - 4 years ago 2021-03-24 15:17:42
contact@maxhenger.nl
finished initial type inferencer/checker, tests work again
9 files changed with 484 insertions and 662 deletions:
0 comments (0 inline, 0 general)
src/protocol/ast.rs
Show inline comments
 
@@ -695,24 +695,25 @@ impl<'a> Iterator for NamespacedIdentifierIter<'a> {
 
            return Some(&self.value[start..]);
 
        }
 
    }
 
}
 

	
 
impl Display for Identifier {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        // A source identifier is in ASCII range.
 
        write!(f, "{}", String::from_utf8_lossy(&self.value))
 
    }
 
}
 

	
 
/// TODO: @types Remove the Message -> Byte hack at some point...
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum ParserTypeVariant {
 
    // Basic builtin
 
    Message,
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    // Literals (need to get concrete builtin type during typechecking)
 
    IntegerLiteral,
 
@@ -767,56 +768,70 @@ pub struct SymbolicParserType {
 
/// Specifies whether the symbolic type points to an actual user-defined type,
 
/// or whether it points to a polymorphic argument within the definition (e.g.
 
/// a defined variable `T var` within a function `int func<T>()`
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum SymbolicParserTypeVariant {
 
    Definition(DefinitionId),
 
    // TODO: figure out if I need the DefinitionId here
 
    PolyArg(DefinitionId, usize), // index of polyarg in the definition
 
}
 

	
 
/// ConcreteType is the representation of a type after resolving symbolic types
 
/// and performing type inference
 
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
 
#[derive(Debug, Clone, Copy, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
 
pub enum ConcreteTypePart {
 
    // Markers for the use of polymorphic types within a procedure's body that
 
    // refer to polymorphic variables on the procedure's definition. Different
 
    // from markers in the `InferenceType`, these will not contain nested types.
 
    Marker(usize),
 
    // Special types (cannot be explicitly constructed by the programmer)
 
    Void,
 
    // Builtin types without nested types
 
    Message,
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    // Builtin types with one nested type
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // User defined type with any number of nested types
 
    Instance(DefinitionId, usize),
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
#[derive(Debug, Clone, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
 
pub struct ConcreteType {
 
    pub(crate) parts: Vec<ConcreteTypePart>
 
}
 

	
 
impl Default for ConcreteType {
 
    fn default() -> Self {
 
        Self{ parts: Vec::new() }
 
    }
 
}
 

	
 
impl ConcreteType {
 
    pub(crate) fn has_marker(&self) -> bool {
 
        self.parts
 
            .iter()
 
            .any(|p| {
 
                if let ConcreteTypePart::Marker(_) = p { true } else { false }
 
            })
 
    }
 
}
 

	
 
// TODO: Remove at some point
 
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
 
pub enum PrimitiveType {
 
    Unassigned,
 
    Input,
 
    Output,
 
    Message,
 
    Boolean,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
src/protocol/ast_printer.rs
Show inline comments
 
@@ -73,50 +73,50 @@ impl<'a> KV<'a> {
 
    }
 

	
 
    fn with_id(mut self, prefix: &'static str, id: u32) -> Self {
 
        self.prefix = Some((prefix, id));
 
        self
 
    }
 

	
 
    fn with_s_key(self, key: &str) -> Self {
 
        self.temp_key.push_str(key);
 
        self
 
    }
 

	
 
    fn with_d_key<D: Display>(mut self, key: &D) -> Self {
 
    fn with_d_key<D: Display>(self, key: &D) -> Self {
 
        self.temp_key.push_str(&key.to_string());
 
        self
 
    }
 

	
 
    fn with_s_val(self, val: &str) -> Self {
 
        self.temp_val.push_str(val);
 
        self
 
    }
 

	
 
    fn with_disp_val<D: Display>(mut self, val: &D) -> Self {
 
    fn with_disp_val<D: Display>(self, val: &D) -> Self {
 
        self.temp_val.push_str(&format!("{}", val));
 
        self
 
    }
 

	
 
    fn with_debug_val<D: Debug>(mut self, val: &D) -> Self {
 
    fn with_debug_val<D: Debug>(self, val: &D) -> Self {
 
        self.temp_val.push_str(&format!("{:?}", val));
 
        self
 
    }
 

	
 
    fn with_ascii_val(self, val: &[u8]) -> Self {
 
        self.temp_val.push_str(&*String::from_utf8_lossy(val));
 
        self
 
    }
 

	
 
    fn with_opt_disp_val<D: Display>(mut self, val: Option<&D>) -> Self {
 
    fn with_opt_disp_val<D: Display>(self, val: Option<&D>) -> Self {
 
        match val {
 
            Some(v) => { self.temp_val.push_str(&format!("Some({})", v)); },
 
            None => { self.temp_val.push_str("None"); }
 
        }
 
        self
 
    }
 

	
 
    fn with_opt_ascii_val(self, val: Option<&[u8]>) -> Self {
 
        match val {
 
            Some(v) => {
 
                self.temp_val.push_str("Some(");
 
                self.temp_val.push_str(&*String::from_utf8_lossy(v));
 
@@ -155,32 +155,34 @@ impl<'a> Drop for KV<'a> {
 
        self.buffer.push_str(self.temp_key);
 
        if self.temp_val.is_empty() {
 
            self.buffer.push(':');
 
        } else {
 
            self.buffer.push_str(": ");
 
            self.buffer.push_str(&self.temp_val);
 
        }
 
        self.buffer.push('\n');
 
    }
 
}
 

	
 
pub(crate) struct ASTWriter {
 
    cur_definition: Option<DefinitionId>,
 
    buffer: String,
 
    temp1: String,
 
    temp2: String,
 
}
 

	
 
impl ASTWriter {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            cur_definition: None,
 
            buffer: String::with_capacity(4096),
 
            temp1: String::with_capacity(256),
 
            temp2: String::with_capacity(256),
 
        }
 
    }
 
    pub(crate) fn write_ast<W: IOWrite>(&mut self, w: &mut W, heap: &Heap) {
 
        for root_id in heap.protocol_descriptions.iter().map(|v| v.this) {
 
            self.write_module(heap, root_id);
 
            w.write_all(self.buffer.as_bytes()).expect("flush buffer");
 
            self.buffer.clear();
 
        }
 
    }
 
@@ -258,24 +260,25 @@ impl ASTWriter {
 
                    self.kv(indent4).with_s_key("Definition")
 
                        .with_opt_disp_val(symbol.definition_id.as_ref().map(|v| &v.index));
 
                }
 
            }
 
        }
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Top-level definition writing
 
    //--------------------------------------------------------------------------
 

	
 
    fn write_definition(&mut self, heap: &Heap, def_id: DefinitionId, indent: usize) {
 
        self.cur_definition = Some(def_id);
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 
        let indent4 = indent3 + 1;
 

	
 
        match &heap[def_id] {
 
            Definition::Struct(_) => todo!("implement Definition::Struct"),
 
            Definition::Enum(_) => todo!("implement Definition::Enum"),
 
            Definition::Function(def) => {
 
                self.kv(indent).with_id(PREFIX_FUNCTION_ID, def.this.0.index)
 
                    .with_s_key("DefinitionFunction");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
@@ -488,152 +491,153 @@ impl ASTWriter {
 
                    .with_s_key("ExpressionStatement");
 
                self.write_expr(heap, stmt.expression, indent2);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            }
 
        }
 
    }
 

	
 
    fn write_expr(&mut self, heap: &Heap, expr_id: ExpressionId, indent: usize) {
 
        let expr = &heap[expr_id];
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 
        let def_id = self.cur_definition.unwrap();
 

	
 
        match expr {
 
            Expression::Assignment(expr) => {
 
                self.kv(indent).with_id(PREFIX_ASSIGNMENT_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("AssignmentExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Left");
 
                self.write_expr(heap, expr.left, indent3);
 
                self.kv(indent2).with_s_key("Right");
 
                self.write_expr(heap, expr.right, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Conditional(expr) => {
 
                self.kv(indent).with_id(PREFIX_CONDITIONAL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ConditionalExpr");
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, expr.test, indent3);
 
                self.kv(indent2).with_s_key("TrueExpression");
 
                self.write_expr(heap, expr.true_expression, indent3);
 
                self.kv(indent2).with_s_key("FalseExpression");
 
                self.write_expr(heap, expr.false_expression, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Binary(expr) => {
 
                self.kv(indent).with_id(PREFIX_BINARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("BinaryExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Left");
 
                self.write_expr(heap, expr.left, indent3);
 
                self.kv(indent2).with_s_key("Right");
 
                self.write_expr(heap, expr.right, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Unary(expr) => {
 
                self.kv(indent).with_id(PREFIX_UNARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("UnaryExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Argument");
 
                self.write_expr(heap, expr.expression, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Indexing(expr) => {
 
                self.kv(indent).with_id(PREFIX_INDEXING_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("IndexingExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 
                self.kv(indent2).with_s_key("Index");
 
                self.write_expr(heap, expr.index, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Slicing(expr) => {
 
                self.kv(indent).with_id(PREFIX_SLICING_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("SlicingExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 
                self.kv(indent2).with_s_key("FromIndex");
 
                self.write_expr(heap, expr.from_index, indent3);
 
                self.kv(indent2).with_s_key("ToIndex");
 
                self.write_expr(heap, expr.to_index, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Select(expr) => {
 
                self.kv(indent).with_id(PREFIX_SELECT_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("SelectExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 

	
 
                match &expr.field {
 
                    Field::Length => {
 
                        self.kv(indent2).with_s_key("Field").with_s_val("length");
 
                    },
 
                    Field::Symbolic(field) => {
 
                        self.kv(indent2).with_s_key("Field").with_ascii_val(&field.value);
 
                    }
 
                }
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Array(expr) => {
 
                self.kv(indent).with_id(PREFIX_ARRAY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ArrayExpr");
 
                self.kv(indent2).with_s_key("Elements");
 
                for expr_id in &expr.elements {
 
                    self.write_expr(heap, *expr_id, indent3);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Constant(expr) => {
 
                self.kv(indent).with_id(PREFIX_CONST_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ConstantExpr");
 

	
 
                let val = self.kv(indent2).with_s_key("Value");
 
                match &expr.value {
 
                    Constant::Null => { val.with_s_val("null"); },
 
                    Constant::True => { val.with_s_val("true"); },
 
                    Constant::False => { val.with_s_val("false"); },
 
                    Constant::Character(char) => { val.with_ascii_val(char); },
 
                    Constant::Integer(int) => { val.with_disp_val(int); },
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Call(expr) => {
 
                self.kv(indent).with_id(PREFIX_CALL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("CallExpr");
 

	
 
                // Method
 
                let method = self.kv(indent2).with_s_key("Method");
 
                match &expr.method {
 
                    Method::Get => { method.with_s_val("get"); },
 
                    Method::Put => { method.with_s_val("put"); },
 
                    Method::Fires => { method.with_s_val("fires"); },
 
                    Method::Create => { method.with_s_val("create"); },
 
@@ -646,36 +650,36 @@ impl ASTWriter {
 
                }
 

	
 
                // Arguments
 
                self.kv(indent2).with_s_key("Arguments");
 
                for arg_id in &expr.arguments {
 
                    self.write_expr(heap, *arg_id, indent3);
 
                }
 

	
 
                // Parent
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Variable(expr) => {
 
                self.kv(indent).with_id(PREFIX_VARIABLE_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("VariableExpr");
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&expr.identifier.value);
 
                self.kv(indent2).with_s_key("Definition")
 
                    .with_opt_disp_val(expr.declaration.as_ref().map(|v| &v.index));
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            }
 
        }
 
    }
 

	
 
    fn write_local(&mut self, heap: &Heap, local_id: LocalId, indent: usize) {
 
        let local = &heap[local_id];
 
        let indent2 = indent + 1;
 

	
 
        self.kv(indent).with_id(PREFIX_LOCAL_ID, local_id.0.index)
 
            .with_s_key("Local");
 

	
 
        self.kv(indent2).with_s_key("Name").with_ascii_val(&local.identifier.value);
 
@@ -740,78 +744,88 @@ fn write_parser_type(target: &mut String, heap: &Heap, t: &ParserType) {
 
    };
 

	
 
    if !embedded.is_empty() {
 
        target.push_str("<");
 
        for (idx, embedded_id) in embedded.into_iter().enumerate() {
 
            if idx != 0 { target.push_str(", "); }
 
            write_parser_type(target, heap, &heap[embedded_id]);
 
        }
 
        target.push_str(">");
 
    }
 
}
 

	
 
fn write_concrete_type(target: &mut String, heap: &Heap, t: &ConcreteType) {
 
fn write_concrete_type(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType) {
 
    use ConcreteTypePart as CTP;
 

	
 
    fn write_concrete_part(target: &mut String, heap: &Heap, t: &ConcreteType, mut idx: usize) -> usize {
 
    fn write_concrete_part(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType, mut idx: usize) -> usize {
 
        if idx >= t.parts.len() {
 
            target.push_str("Programmer error: invalid concrete type tree");
 
            return idx;
 
        }
 

	
 
        match &t.parts[idx] {
 
            CTP::Marker(marker) => {
 
                // Marker points to polymorphic variable index
 
                let definition = &heap[def_id];
 
                let poly_var_ident = match definition {
 
                    Definition::Struct(_) | Definition::Enum(_) => unreachable!(),
 
                    Definition::Function(definition) => &definition.poly_vars[*marker].value,
 
                    Definition::Component(definition) => &definition.poly_vars[*marker].value,
 
                };
 
                target.push_str(&String::from_utf8_lossy(&poly_var_ident));
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
            },
 
            CTP::Void => target.push_str("void"),
 
            CTP::Message => target.push_str("msg"),
 
            CTP::Bool => target.push_str("bool"),
 
            CTP::Byte => target.push_str("byte"),
 
            CTP::Short => target.push_str("short"),
 
            CTP::Int => target.push_str("int"),
 
            CTP::Long => target.push_str("long"),
 
            CTP::String => target.push_str("string"),
 
            CTP::Array => {
 
                idx = write_concrete_part(target, heap, t, idx + 1);
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push_str("[]");
 
            },
 
            CTP::Slice => {
 
                idx = write_concrete_part(target, heap, t, idx + 1);
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push_str("[..]");
 
            }
 
            CTP::Input => {
 
                target.push_str("in<");
 
                idx = write_concrete_part(target, heap, t, idx + 1);
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push('>');
 
            },
 
            CTP::Output => {
 
                target.push_str("out<");
 
                idx = write_concrete_part(target, heap, t, idx + 1);
 
                idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                target.push('>')
 
            },
 
            CTP::Instance(definition_id, num_embedded) => {
 
                let identifier = heap[*definition_id].identifier();
 
                target.push_str(&String::from_utf8_lossy(&identifier.value));
 
                target.push('<');
 
                for idx_embedded in 0..*num_embedded {
 
                    if idx_embedded != 0 {
 
                        target.push_str(", ");
 
                    }
 
                    idx = write_concrete_part(target, heap, t, idx + 1);
 
                    idx = write_concrete_part(target, heap, def_id, t, idx + 1);
 
                }
 
                target.push('>');
 
            }
 
        }
 

	
 
        idx + 1
 
    }
 

	
 
    write_concrete_part(target, heap, t, 0);
 
    write_concrete_part(target, heap, def_id, t, 0);
 
}
 

	
 
fn write_expression_parent(target: &mut String, parent: &ExpressionParent) {
 
    use ExpressionParent as EP;
 

	
 
    *target = match parent {
 
        EP::None => String::from("None"),
 
        EP::If(id) => format!("IfStmt({})", id.0.index),
 
        EP::While(id) => format!("WhileStmt({})", id.0.index),
 
        EP::Return(id) => format!("ReturnStmt({})", id.0.index),
 
        EP::Assert(id) => format!("AssertStmt({})", id.0.index),
 
        EP::New(id) => format!("NewStmt({})", id.0.index),
src/protocol/eval.rs
Show inline comments
 
@@ -899,43 +899,51 @@ pub struct InputValue(pub PortId);
 

	
 
impl Display for InputValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "#in")
 
    }
 
}
 

	
 
impl ValueImpl for InputValue {
 
    fn exact_type(&self) -> Type {
 
        Type::INPUT
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        return if let ParserTypeVariant::Input(_) = t.variant { true } else { false }
 
        use ParserTypeVariant::*;
 
        match &t.variant {
 
            Input(_) | Inferred | Symbolic(_) => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct OutputValue(pub PortId);
 

	
 
impl Display for OutputValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "#out")
 
    }
 
}
 

	
 
impl ValueImpl for OutputValue {
 
    fn exact_type(&self) -> Type {
 
        Type::OUTPUT
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        return if let ParserTypeVariant::Output(_) = t.variant { true } else { false }
 
        use ParserTypeVariant::*;
 
        match &t.variant {
 
            Output(_) | Inferred | Symbolic(_) => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct MessageValue(pub Option<Payload>);
 

	
 
impl Display for MessageValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        match &self.0 {
 
            None => write!(f, "null"),
 
            Some(payload) => {
 
                // format print up to 10 bytes
 
@@ -945,131 +953,139 @@ impl Display for MessageValue {
 
                }
 
                f.debug_list().entries(slice.iter().copied()).finish()
 
            }
 
        }
 
    }
 
}
 

	
 
impl ValueImpl for MessageValue {
 
    fn exact_type(&self) -> Type {
 
        Type::MESSAGE
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        return if let ParserTypeVariant::Message = t.variant { true } else { false };
 
        use ParserTypeVariant::*;
 
        match &t.variant {
 
            Message | Inferred | Symbolic(_) => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct BooleanValue(bool);
 

	
 
impl Display for BooleanValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for BooleanValue {
 
    fn exact_type(&self) -> Type {
 
        Type::BOOLEAN
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        use ParserTypeVariant::*;
 
        match t.variant {
 
            Bool | Byte | Short | Int | Long => true,
 
            Symbolic(_) | Inferred | Bool | Byte | Short | Int | Long => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct ByteValue(i8);
 

	
 
impl Display for ByteValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for ByteValue {
 
    fn exact_type(&self) -> Type {
 
        Type::BYTE
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        use ParserTypeVariant::*;
 
        match t.variant {
 
            Byte | Short | Int | Long => true,
 
            Symbolic(_) | Inferred | Byte | Short | Int | Long => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct ShortValue(i16);
 

	
 
impl Display for ShortValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for ShortValue {
 
    fn exact_type(&self) -> Type {
 
        Type::SHORT
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        use ParserTypeVariant::*;
 
        match t.variant {
 
            Short | Int | Long => true,
 
            Symbolic(_) | Inferred | Short | Int | Long => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct IntValue(i32);
 

	
 
impl Display for IntValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for IntValue {
 
    fn exact_type(&self) -> Type {
 
        Type::INT
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        use ParserTypeVariant::*;
 
        match t.variant {
 
            Int | Long => true,
 
            Symbolic(_) | Inferred | Int | Long => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct LongValue(i64);
 

	
 
impl Display for LongValue {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}", self.0)
 
    }
 
}
 

	
 
impl ValueImpl for LongValue {
 
    fn exact_type(&self) -> Type {
 
        Type::LONG
 
    }
 
    fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool {
 
        return if let ParserTypeVariant::Long = t.variant { true } else { false }
 
        use ParserTypeVariant::*;
 
        match &t.variant {
 
            Long | Inferred | Symbolic(_) => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
fn get_array_inner(t: &ParserType) -> Option<ParserTypeId> {
 
    match t.variant {
 
        ParserTypeVariant::Array(inner) => Some(inner),
 
        _ => None
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct InputArrayValue(Vec<InputValue>);
 
@@ -1506,31 +1522,28 @@ impl Store {
 
                Method::Get => {
 
                    assert_eq!(1, expr.arguments.len());
 
                    let value = self.eval(h, ctx, expr.arguments[0])?;
 
                    match ctx.get(value.clone()) {
 
                        None => Err(EvalContinuation::BlockGet(value)),
 
                        Some(result) => Ok(result),
 
                    }
 
                }
 
                Method::Put => {
 
                    assert_eq!(2, expr.arguments.len());
 
                    let port_value = self.eval(h, ctx, expr.arguments[0])?;
 
                    let msg_value = self.eval(h, ctx, expr.arguments[1])?;
 
                    println!("DEBUG: Handiling put({:?}, {:?})", port_value, msg_value);
 
                    if ctx.did_put(port_value.clone()) {
 
                        println!("DEBUG: Already put...");
 
                        // Return bogus, replacing this at some point anyway
 
                        Ok(Value::Message(MessageValue(None)))
 
                    } else {
 
                        println!("DEBUG: Did not yet put...");
 
                        Err(EvalContinuation::Put(port_value, msg_value))
 
                    }
 
                }
 
                Method::Fires => {
 
                    assert_eq!(1, expr.arguments.len());
 
                    let value = self.eval(h, ctx, expr.arguments[0])?;
 
                    match ctx.fires(value.clone()) {
 
                        None => Err(EvalContinuation::BlockFires(value)),
 
                        Some(result) => Ok(result),
 
                    }
 
                }
 
                Method::Create => {
src/protocol/lexer.rs
Show inline comments
 
@@ -2446,363 +2446,12 @@ impl Lexer<'_> {
 
        if !self.source.is_eof() {
 
            return Err(self.error_at_pos("Expected end of file"));
 
        }
 
        Ok(h.alloc_protocol_description(|this| Root {
 
            this,
 
            position,
 
            pragmas,
 
            imports,
 
            definitions,
 
        }))
 
    }
 
}
 

	
 
#[cfg(test)]
 
mod tests {
 
    use crate::protocol::ast::*;
 
    use crate::protocol::lexer::*;
 
    use crate::protocol::inputsource::*;
 

	
 
    #[derive(Debug, Eq, PartialEq)]
 
    enum ParserTypeClass {
 
        Message, Bool, Byte, Short, Int, Long, String, Array, Nope
 
    }
 
    impl ParserTypeClass {
 
        fn from(v: &ParserType) -> ParserTypeClass {
 
            use ParserTypeVariant as PTV;
 
            use ParserTypeClass as PTC;
 
            match &v.variant {
 
                PTV::Message => PTC::Message,
 
                PTV::Bool => PTC::Bool,
 
                PTV::Byte => PTC::Byte,
 
                PTV::Short => PTC::Short,
 
                PTV::Int => PTC::Int,
 
                PTV::Long => PTC::Long,
 
                PTV::String => PTC::String,
 
                PTV::Array(_) => PTC::Array,
 
                _ => PTC::Nope,
 
            }
 
        }
 
    }
 

	
 
    #[test]
 
    fn test_pragmas() {
 
        let mut h = Heap::new();
 
        let mut input = InputSource::from_string("
 
        #version 0o7777
 
        #module something.dot.separated
 
        ").expect("new InputSource");
 
        let mut lex = Lexer::new(&mut input);
 
        let lexed = lex.consume_protocol_description(&mut h)
 
            .expect("lex input source");
 
        let root = &h[lexed];
 
        assert_eq!(root.pragmas.len(), 2);
 
        let pv = &h[root.pragmas[0]];
 
        let pm = &h[root.pragmas[1]];
 

	
 
        if let Pragma::Version(v) = pv {
 
            assert_eq!(v.version, 0o7777)
 
        } else {
 
            assert!(false, "first pragma not version");
 
        }
 
        if let Pragma::Module(m) = pm {
 
            assert_eq!(m.value, b"something.dot.separated");
 
        } else {
 
            assert!(false, "second pragma not module");
 
        }
 
    }
 

	
 
    #[test]
 
    fn test_import() {
 
        let mut h = Heap::new();
 
        let mut input = InputSource::from_string("
 
        // Module imports, with optional and explicit aliasing
 
        import single_module;
 
        import std.reo;
 
        import something.other as alias;
 
        // Symbol imports
 
        import some_module::*;
 
        import some_module::{Foo as Bar, Qux, Dix as Flu};
 
        import std.reo::{
 
            Foo as Bar, // because thing
 
            Qux as Mox, // more explanations
 
            Dix, /* yesh, import me */
 
        };
 
        ").unwrap();
 
        let mut lex = Lexer::new(&mut input);
 
        let lexed = lex.consume_protocol_description(&mut h).unwrap();
 
        let root = &h[lexed];
 
        assert_eq!(root.imports.len(), 6);
 
        let no_alias_single = h[root.imports[0]].as_module();
 
        let no_alias_multi = h[root.imports[1]].as_module();
 
        let with_alias = h[root.imports[2]].as_module();
 

	
 
        assert_eq!(no_alias_single.module_name, b"single_module");
 
        assert_eq!(no_alias_single.alias, b"single_module");
 
        assert_eq!(no_alias_multi.module_name, b"std.reo");
 
        assert_eq!(no_alias_multi.alias, b"reo");
 
        assert_eq!(with_alias.module_name, b"something.other");
 
        assert_eq!(with_alias.alias, b"alias");
 

	
 
        let all_symbols = h[root.imports[3]].as_symbols();
 
        let single_line_symbols = h[root.imports[4]].as_symbols();
 
        let multi_line_symbols = h[root.imports[5]].as_symbols();
 

	
 
        assert_eq!(all_symbols.module_name, b"some_module");
 
        assert!(all_symbols.symbols.is_empty());
 
        assert_eq!(single_line_symbols.module_name, b"some_module");
 
        assert_eq!(single_line_symbols.symbols.len(), 3);
 
        assert_eq!(single_line_symbols.symbols[0].name, b"Foo");
 
        assert_eq!(single_line_symbols.symbols[0].alias, b"Bar");
 
        assert_eq!(single_line_symbols.symbols[1].name, b"Qux");
 
        assert_eq!(single_line_symbols.symbols[1].alias, b"Qux");
 
        assert_eq!(single_line_symbols.symbols[2].name, b"Dix");
 
        assert_eq!(single_line_symbols.symbols[2].alias, b"Flu");
 
        assert_eq!(multi_line_symbols.module_name, b"std.reo");
 
        assert_eq!(multi_line_symbols.symbols.len(), 3);
 
        assert_eq!(multi_line_symbols.symbols[0].name, b"Foo");
 
        assert_eq!(multi_line_symbols.symbols[0].alias, b"Bar");
 
        assert_eq!(multi_line_symbols.symbols[1].name, b"Qux");
 
        assert_eq!(multi_line_symbols.symbols[1].alias, b"Mox");
 
        assert_eq!(multi_line_symbols.symbols[2].name, b"Dix");
 
        assert_eq!(multi_line_symbols.symbols[2].alias, b"Dix");
 
    }
 

	
 
    #[test]
 
    fn test_struct_definition() {
 
        let mut h = Heap::new();
 
        let mut input = InputSource::from_string("
 
        struct Foo {
 
            byte one,
 
            short two,
 
            Bar three,
 
        }
 
        struct Bar{int[] one, int[] two, Qux[] three}
 
        ").unwrap();
 
        let mut lex = Lexer::new(&mut input);
 
        let lexed = lex.consume_protocol_description(&mut h);
 
        if let Err(err) = &lexed {
 
            println!("{}", err);
 
        }
 
        let lexed = lexed.unwrap();
 
        let root = &h[lexed];
 

	
 
        assert_eq!(root.definitions.len(), 2);
 

	
 
        // let symbolic_type = |v: &PrimitiveType| -> Vec<u8> {
 
        //     if let PrimitiveType::Symbolic(v) = v {
 
        //         v.identifier.value.clone()
 
        //     } else {
 
        //         assert!(false);
 
        //         unreachable!();
 
        //     }
 
        // };
 

	
 
        let foo_def = h[root.definitions[0]].as_struct();
 
        assert_eq!(foo_def.identifier.value, b"Foo");
 
        assert_eq!(foo_def.fields.len(), 3);
 
        assert_eq!(foo_def.fields[0].field.value, b"one");
 
        assert_eq!(ParserTypeClass::from(&h[foo_def.fields[0].parser_type]), ParserTypeClass::Byte);
 
        assert_eq!(foo_def.fields[1].field.value, b"two");
 
        assert_eq!(ParserTypeClass::from(&h[foo_def.fields[1].parser_type]), ParserTypeClass::Short);
 
        assert_eq!(foo_def.fields[2].field.value, b"three");
 
        // assert_eq!(
 
        //     symbolic_type(&h[foo_def.fields[2].the_type].the_type.primitive),
 
        //     Vec::from("Bar".as_bytes())
 
        // );
 

	
 
        let bar_def = h[root.definitions[1]].as_struct();
 
        assert_eq!(bar_def.identifier.value, b"Bar");
 
        assert_eq!(bar_def.fields.len(), 3);
 
        assert_eq!(bar_def.fields[0].field.value, b"one");
 
        assert_eq!(ParserTypeClass::from(&h[bar_def.fields[0].parser_type]), ParserTypeClass::Array);
 
        assert_eq!(bar_def.fields[1].field.value, b"two");
 
        assert_eq!(ParserTypeClass::from(&h[bar_def.fields[1].parser_type]), ParserTypeClass::Array);
 
        assert_eq!(bar_def.fields[2].field.value, b"three");
 
        assert_eq!(ParserTypeClass::from(&h[bar_def.fields[2].parser_type]), ParserTypeClass::Array);
 
        // assert_eq!(
 
        //     symbolic_type(&h[bar_def.fields[2].parser_type].the_type.primitive),
 
        //     Vec::from("Qux".as_bytes())
 
        // );
 
    }
 

	
 
    #[test]
 
    fn test_enum_definition() {
 
        let mut h = Heap::new();
 
        let mut input = InputSource::from_string("
 
        enum Foo {
 
            A = 0,
 
            B = 5,
 
            C,
 
            D = 0xFF,
 
        }
 
        enum Bar { Ayoo, Byoo, Cyoo,}
 
        enum Qux { A(byte[]), B(Bar[]), C(byte)
 
        }
 
        ").unwrap();
 
        let mut lex = Lexer::new(&mut input);
 
        let lexed = lex.consume_protocol_description(&mut h).unwrap();
 
        let root = &h[lexed];
 

	
 
        assert_eq!(root.definitions.len(), 3);
 

	
 
        let foo_def = h[root.definitions[0]].as_enum();
 
        assert_eq!(foo_def.identifier.value, b"Foo");
 
        assert_eq!(foo_def.variants.len(), 4);
 
        assert_eq!(foo_def.variants[0].identifier.value, b"A");
 
        assert_eq!(foo_def.variants[0].value, EnumVariantValue::Integer(0));
 
        assert_eq!(foo_def.variants[1].identifier.value, b"B");
 
        assert_eq!(foo_def.variants[1].value, EnumVariantValue::Integer(5));
 
        assert_eq!(foo_def.variants[2].identifier.value, b"C");
 
        assert_eq!(foo_def.variants[2].value, EnumVariantValue::None);
 
        assert_eq!(foo_def.variants[3].identifier.value, b"D");
 
        assert_eq!(foo_def.variants[3].value, EnumVariantValue::Integer(0xFF));
 

	
 
        let bar_def = h[root.definitions[1]].as_enum();
 
        assert_eq!(bar_def.identifier.value, b"Bar");
 
        assert_eq!(bar_def.variants.len(), 3);
 
        assert_eq!(bar_def.variants[0].identifier.value, b"Ayoo");
 
        assert_eq!(bar_def.variants[0].value, EnumVariantValue::None);
 
        assert_eq!(bar_def.variants[1].identifier.value, b"Byoo");
 
        assert_eq!(bar_def.variants[1].value, EnumVariantValue::None);
 
        assert_eq!(bar_def.variants[2].identifier.value, b"Cyoo");
 
        assert_eq!(bar_def.variants[2].value, EnumVariantValue::None);
 

	
 
        let qux_def = h[root.definitions[2]].as_enum();
 
        let enum_type = |value: &EnumVariantValue| -> &ParserType {
 
            if let EnumVariantValue::Type(t) = value {
 
                &h[*t]
 
            } else {
 
                assert!(false);
 
                unreachable!();
 
            }
 
        };
 
        assert_eq!(qux_def.identifier.value, b"Qux");
 
        assert_eq!(qux_def.variants.len(), 3);
 
        assert_eq!(qux_def.variants[0].identifier.value, b"A");
 
        assert_eq!(ParserTypeClass::from(enum_type(&qux_def.variants[0].value)), ParserTypeClass::Array);
 
        assert_eq!(qux_def.variants[1].identifier.value, b"B");
 
        assert_eq!(ParserTypeClass::from(enum_type(&qux_def.variants[1].value)), ParserTypeClass::Array);
 
        // if let PrimitiveType::Symbolic(t) = &enum_type(&qux_def.variants[1].value).the_type.primitive {
 
        //     assert_eq!(t.identifier.value, Vec::from("Bar".as_bytes()));
 
        // } else { assert!(false) }
 

	
 
        assert_eq!(qux_def.variants[2].identifier.value, b"C");
 
        assert_eq!(ParserTypeClass::from(enum_type(&qux_def.variants[2].value)), ParserTypeClass::Byte);
 
    }
 

	
 
//     #[test]
 
//     fn test_lowercase() {
 
//         assert_eq!(lowercase(b'a'), b'a');
 
//         assert_eq!(lowercase(b'A'), b'a');
 
//         assert_eq!(lowercase(b'z'), b'z');
 
//         assert_eq!(lowercase(b'Z'), b'z');
 
//     }
 

	
 
//     #[test]
 
//     fn test_basic_expression() {
 
//         let mut h = Heap::new();
 
//         let mut is = InputSource::from_string("a+b;").unwrap();
 
//         let mut lex = Lexer::new(&mut is);
 
//         match lex.consume_expression(&mut h) {
 
//             Ok(expr) => {
 
//                 println!("{:?}", expr);
 
//                 if let Binary(bin) = &h[expr] {
 
//                     if let Variable(left) = &h[bin.left] {
 
//                         if let Variable(right) = &h[bin.right] {
 
//                             assert_eq!("a", format!("{}", h[left.identifier]));
 
//                             assert_eq!("b", format!("{}", h[right.identifier]));
 
//                             assert_eq!(Some(b';'), is.next());
 
//                             return;
 
//                         }
 
//                     }
 
//                 }
 
//                 assert!(false);
 
//             }
 
//             Err(err) => {
 
//                 err.print(&is);
 
//                 assert!(false);
 
//             }
 
//         }
 
//     }
 

	
 
//     #[test]
 
//     fn test_paren_expression() {
 
//         let mut h = Heap::new();
 
//         let mut is = InputSource::from_string("(true)").unwrap();
 
//         let mut lex = Lexer::new(&mut is);
 
//         match lex.consume_paren_expression(&mut h) {
 
//             Ok(expr) => {
 
//                 println!("{:#?}", expr);
 
//                 if let Constant(con) = &h[expr] {
 
//                     if let ast::Constant::True = con.value {
 
//                         return;
 
//                     }
 
//                 }
 
//                 assert!(false);
 
//             }
 
//             Err(err) => {
 
//                 err.print(&is);
 
//                 assert!(false);
 
//             }
 
//         }
 
//     }
 

	
 
//     #[test]
 
//     fn test_expression() {
 
//         let mut h = Heap::new();
 
//         let mut is = InputSource::from_string("(x(1+5,get(y))-w[5])+z++\n").unwrap();
 
//         let mut lex = Lexer::new(&mut is);
 
//         match lex.consume_expression(&mut h) {
 
//             Ok(expr) => {
 
//                 println!("{:#?}", expr);
 
//             }
 
//             Err(err) => {
 
//                 err.print(&is);
 
//                 assert!(false);
 
//             }
 
//         }
 
//     }
 

	
 
//     #[test]
 
//     fn test_basic_statement() {
 
//         let mut h = Heap::new();
 
//         let mut is = InputSource::from_string("while (true) { skip; }").unwrap();
 
//         let mut lex = Lexer::new(&mut is);
 
//         match lex.consume_statement(&mut h) {
 
//             Ok(stmt) => {
 
//                 println!("{:#?}", stmt);
 
//                 if let Statement::While(w) = &h[stmt] {
 
//                     if let Expression::Constant(_) = h[w.test] {
 
//                         if let Statement::Block(_) = h[w.body] {
 
//                             return;
 
//                         }
 
//                     }
 
//                 }
 
//                 assert!(false);
 
//             }
 
//             Err(err) => {
 
//                 err.print(&is);
 
//                 assert!(false);
 
//             }
 
//         }
 
//     }
 

	
 
//     #[test]
 
//     fn test_statement() {
 
//         let mut h = Heap::new();
 
//         let mut is = InputSource::from_string(
 
//             "label: while (true) { if (x++ > y[0]) break label; else continue; }\n",
 
//         )
 
//         .unwrap();
 
//         let mut lex = Lexer::new(&mut is);
 
//         match lex.consume_statement(&mut h) {
 
//             Ok(stmt) => {
 
//                 println!("{:#?}", stmt);
 
//             }
 
//             Err(err) => {
 
//                 err.print(&is);
 
//                 assert!(false);
 
//             }
 
//         }
 
//     }
 
}
src/protocol/parser/mod.rs
Show inline comments
 
@@ -216,27 +216,27 @@ impl Parser {
 
        let mut queue = ResolveQueue::new();
 
        TypeResolvingVisitor::queue_module_definitions(&ctx, &mut queue);
 
        while !queue.is_empty() {
 
            let top = queue.pop().unwrap();
 
            println!("Resolving root={}, def={}, mono={:?}", top.root_id.index, top.definition_id.index, top.monomorph_types);
 
            type_visit.handle_module_definition(&mut ctx, &mut queue, top)?;
 
        }
 

	
 
        if let Err((position, message)) = Self::parse_inner(&mut self.heap, root_id) {
 
            return Err(ParseError2::new_error(&self.modules[0].source, position, &message))
 
        }
 

	
 
        let mut writer = ASTWriter::new();
 
        let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap();
 
        writer.write_ast(&mut file, &self.heap);
 
        // let mut writer = ASTWriter::new();
 
        // let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap();
 
        // writer.write_ast(&mut file, &self.heap);
 

	
 
        Ok(root_id)
 
    }
 

	
 
    pub fn parse_inner(h: &mut Heap, pd: RootId) -> VisitorResult {
 
        // TODO: @cleanup, slowly phasing out old compiler
 
        // NestedSynchronousStatements::new().visit_protocol_description(h, pd)?;
 
        // ChannelStatementOccurrences::new().visit_protocol_description(h, pd)?;
 
        // FunctionStatementReturns::new().visit_protocol_description(h, pd)?;
 
        // ComponentStatementReturnNew::new().visit_protocol_description(h, pd)?;
 
        // CheckBuiltinOccurrences::new().visit_protocol_description(h, pd)?;
 
        // BuildSymbolDeclarations::new().visit_protocol_description(h, pd)?;
src/protocol/parser/type_resolver.rs
Show inline comments
 
/// type_resolver.rs
 
///
 
/// Performs type inference and type checking
 
/// Performs type inference and type checking. Type inference is implemented by
 
/// applying constraints on (sub)trees of types. During this process the
 
/// resolver takes the `ParserType` structs (the representation of the types
 
/// written by the programmer), converts them to `InferenceType` structs (the
 
/// temporary data structure used during type inference) and attempts to arrive
 
/// at `ConcreteType` structs (the representation of a fully checked and
 
/// validated type).
 
///
 
/// The resolver will visit every statement and expression relevant to the
 
/// procedure and insert and determine its initial type based on context (e.g. a
 
/// return statement's expression must match the function's return type, an
 
/// if statement's test expression must evaluate to a boolean). When all are
 
/// visited we attempt to make progress in evaluating the types. Whenever a type
 
/// is progressed we queue the related expressions for further type progression.
 
/// Once no more expressions are in the queue the algorithm is finished. At this
 
/// point either all types are inferred (or can be trivially implicitly
 
/// determined), or we have incomplete types. In the latter casee we return an
 
/// error.
 
///
 
/// Inference may be applied on non-polymorphic procedures and on polymorphic
 
/// procedures. When dealing with a non-polymorphic procedure we apply the type
 
/// resolver and annotate the AST with the `ConcreteType`s. When dealing with
 
/// polymorphic procedures we will only annotate the AST once, preserving
 
/// references to polymorphic variables. Any later pass will perform just the
 
/// type checking.
 
///
 
/// TODO: Needs an optimization pass
 
/// TODO: Needs a cleanup pass
 
/// TODO: Disallow `Void` types in various expressions (and other future types)
 
/// TODO: Maybe remove msg type?
 

	
 
macro_rules! enabled_debug_print {
 
    (false, $name:literal, $format:literal) => {};
 
    (false, $name:literal, $format:literal, $($args:expr),*) => {};
 
    (true, $name:literal, $format:literal) => {
 
        println!("[{}] {}", $name, $format)
 
    };
 
    (true, $name:literal, $format:literal, $($args:expr),*) => {
 
        println!("[{}] {}", $name, format!($format, $($args),*))
 
    };
 
}
 

	
 
@@ -32,69 +58,79 @@ use crate::protocol::inputsource::*;
 
use super::type_table::*;
 
use super::symbol_table::*;
 
use super::visitor::{
 
    STMT_BUFFER_INIT_CAPACITY,
 
    EXPR_BUFFER_INIT_CAPACITY,
 
    Ctx,
 
    Visitor2,
 
    VisitorResult
 
};
 
use std::collections::hash_map::Entry;
 
use crate::protocol::parser::type_resolver::InferenceTypePart::IntegerLike;
 

	
 
const MESSAGE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Message ];
 
const MESSAGE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Message, InferenceTypePart::Byte ];
 
const BOOL_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Bool ];
 
const NUMBERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::NumberLike ];
 
const INTEGERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::IntegerLike ];
 
const ARRAY_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Array, InferenceTypePart::Unknown ];
 
const ARRAYLIKE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::ArrayLike, InferenceTypePart::Unknown ];
 
const PORTLIKE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::PortLike, InferenceTypePart::Unknown ];
 

	
 
/// TODO: @performance Turn into PartialOrd+Ord to simplify checks
 
/// TODO: @types Remove the Message -> Byte hack at some point...
 
#[derive(Debug, Clone, Eq, PartialEq)]
 
pub(crate) enum InferenceTypePart {
 
    // A marker with an identifier which we can use to seek subsections of the 
 
    // inferred type
 
    Marker(usize),
 
    // A marker with an identifier which we can use to retrieve the type subtree
 
    // that follows the marker. This is used to perform type inference on
 
    // polymorphs: an expression may determine the polymorphs type, after we
 
    // need to apply that information to all other places where the polymorph is
 
    // used.
 
    MarkerDefinition(usize), // marker for polymorph types on a procedure's definition
 
    MarkerBody(usize), // marker for polymorph types within a procedure body
 
    // Completely unknown type, needs to be inferred
 
    Unknown,
 
    // Partially known type, may be inferred to to be the appropriate related 
 
    // type.
 
    // IndexLike,      // index into array/slice
 
    NumberLike,     // any kind of integer/float
 
    IntegerLike,    // any kind of integer
 
    ArrayLike,      // array or slice. Note that this must have a subtype
 
    PortLike,       // input or output port
 
    // Special types that cannot be instantiated by the user
 
    Void, // For builtin functions that do not return anything
 
    // Concrete types without subtypes
 
    Message,
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    // One subtype
 
    Message,
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // A user-defined type with any number of subtypes
 
    Instance(DefinitionId, usize)
 
}
 

	
 
impl InferenceTypePart {
 
    fn is_marker(&self) -> bool {
 
        if let InferenceTypePart::Marker(_) = self { true } else { false }
 
        use InferenceTypePart as ITP;
 

	
 
        match self {
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    /// Checks if the type is concrete, markers are interpreted as concrete
 
    /// types.
 
    fn is_concrete(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | 
 
            ITP::ArrayLike | ITP::PortLike => false,
 
            _ => true
 
        }
 
    }
 
@@ -107,190 +143,152 @@ impl InferenceTypePart {
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_integer(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_array_or_slice(&self) -> bool {
 
    fn is_concrete_msg_array_or_slice(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Array | ITP::Slice => true,
 
            ITP::Array | ITP::Slice | ITP::Message => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_port(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Input | ITP::Output => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    /// Checks if a part is less specific than the argument. Only checks for 
 
    /// single-part inference (i.e. not the replacement of an `Unknown` variant 
 
    /// with the argument)
 
    fn may_be_inferred_from(&self, arg: &InferenceTypePart) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        (*self == ITP::IntegerLike && arg.is_concrete_integer()) ||
 
        (*self == ITP::NumberLike && (arg.is_concrete_number() || *arg == ITP::IntegerLike)) ||
 
        (*self == ITP::ArrayLike && arg.is_concrete_array_or_slice()) ||
 
        (*self == ITP::ArrayLike && arg.is_concrete_msg_array_or_slice()) ||
 
        (*self == ITP::PortLike && arg.is_concrete_port())
 
    }
 

	
 
    /// Returns the change in "iteration depth" when traversing this particular
 
    /// part. The iteration depth is used to traverse the tree in a linear 
 
    /// fashion. It is basically `number_of_subtypes - 1`
 
    fn depth_change(&self) -> i32 {
 
        use InferenceTypePart as ITP;
 
        match &self {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike |
 
            ITP::Void | ITP::Message | ITP::Bool | 
 
            ITP::Void | ITP::Bool |
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long | 
 
            ITP::String => {
 
                -1
 
            },
 
            ITP::Marker(_) | ITP::ArrayLike | ITP::Array | ITP::Slice | 
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) |
 
            ITP::ArrayLike | ITP::Message | ITP::Array | ITP::Slice |
 
            ITP::PortLike | ITP::Input | ITP::Output => {
 
                // One subtype, so do not modify depth
 
                0
 
            },
 
            ITP::Instance(_, num_args) => {
 
                (*num_args as i32) - 1
 
            }
 
        }
 
    }
 
}
 

	
 
impl From<ConcreteTypePart> for InferenceTypePart {
 
    fn from(v: ConcreteTypePart) -> InferenceTypePart {
 
        use ConcreteTypePart as CTP;
 
        use InferenceTypePart as ITP;
 

	
 
        match v {
 
            CTP::Marker(_) => {
 
                unreachable!("encountered marker while converting concrete type to inferred type");
 
            }
 
            CTP::Void => ITP::Void,
 
            CTP::Message => ITP::Message,
 
            CTP::Bool => ITP::Bool,
 
            CTP::Byte => ITP::Byte,
 
            CTP::Short => ITP::Short,
 
            CTP::Int => ITP::Int,
 
            CTP::Long => ITP::Long,
 
            CTP::String => ITP::String,
 
            CTP::Array => ITP::Array,
 
            CTP::Slice => ITP::Slice,
 
            CTP::Input => ITP::Input,
 
            CTP::Output => ITP::Output,
 
            CTP::Instance(id, num) => ITP::Instance(id, num),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug)]
 
struct InferenceType {
 
    has_marker: bool,
 
    has_body_marker: bool,
 
    is_done: bool,
 
    parts: Vec<InferenceTypePart>,
 
}
 

	
 
impl InferenceType {
 
    fn new(has_marker: bool, is_done: bool, parts: Vec<InferenceTypePart>) -> Self {
 
    fn new(has_body_marker: bool, is_done: bool, parts: Vec<InferenceTypePart>) -> Self {
 
        if cfg!(debug_assertions) {
 
            debug_assert!(!parts.is_empty());
 
            if !has_marker {
 
                debug_assert!(parts.iter().all(|v| !v.is_marker()));
 
            if !has_body_marker {
 
                debug_assert!(parts.iter().all(|v| {
 
                    if let InferenceTypePart::MarkerBody(_) = v { false } else { true }
 
                }));
 
            }
 
            if is_done {
 
                debug_assert!(parts.iter().all(|v| v.is_concrete()));
 
            }
 
        }
 
        Self{ has_marker, is_done, parts }
 
        Self{ has_body_marker: has_body_marker, is_done, parts }
 
    }
 

	
 
    fn replace_subtree(&mut self, start_idx: usize, with: &[InferenceTypePart]) {
 
         let end_idx = Self::find_subtree_end_idx(&self.parts, start_idx);
 
        debug_assert_eq!(with.len(), Self::find_subtree_end_idx(with, 0));
 
        self.parts.splice(start_idx..end_idx, with.iter().cloned());
 
        self.recompute_is_done();
 
    }
 

	
 
    // TODO: @performance, might all be done inline in the type inference methods
 
    fn recompute_is_done(&mut self) {
 
        self.is_done = self.parts.iter().all(|v| v.is_concrete());
 
    }
 

	
 
    /// Checks if type is, or may be inferred as, a number
 
    // TODO: @float
 
    fn might_be_number(&self) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        // TODO: @marker?
 
        if self.parts.len() != 1 { return false; }
 
        match self.parts[0] {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike |
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long =>
 
                true,
 
            _ =>
 
                false,
 
        }
 
    }
 

	
 
    /// Checks if type is, or may be inferred as, an integer
 
    fn might_be_integer(&self) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        // TODO: @marker?
 
        if self.parts.len() != 1 { return false; }
 
        match self.parts[0] {
 
            ITP::Unknown | ITP::IntegerLike |
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long =>
 
                true,
 
            _ =>
 
                false,
 
        }
 
    }
 

	
 
    /// Checks if type is, or may be inferred as, a boolean
 
    fn might_be_boolean(&self) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        // TODO: @marker?
 
        if self.parts.len() != 1 { return false; }
 
        match self.parts[0] {
 
            ITP::Unknown | ITP::Bool => true,
 
            _ => false
 
        }
 
    }
 

	
 
    /// Returns an iterator over all markers and the partial type tree that
 
    /// Returns an iterator over all body markers and the partial type tree that
 
    /// follows those markers.
 
    fn marker_iter(&self) -> InferenceTypeMarkerIter {
 
    fn body_marker_iter(&self) -> InferenceTypeMarkerIter {
 
        InferenceTypeMarkerIter::new(&self.parts)
 
    }
 

	
 
    /// Attempts to find a marker with a specific value appearing at or after
 
    /// the specified index. If found then the partial type tree's bounding
 
    /// indices that follow that marker are returned.
 
    fn find_subtree_idx_for_marker(&self, marker: usize, mut idx: usize) -> Option<(usize, usize)> {
 
        // Seek ahead to find a marker
 
        let marker = InferenceTypePart::Marker(marker);
 
    /// Attempts to find a specific type part appearing at or after the
 
    /// specified index. If found then the partial type tree's bounding indices
 
    /// that follow that marker are returned.
 
    fn find_subtree_idx_for_part(&self, part: InferenceTypePart, mut idx: usize) -> Option<(usize, usize)> {
 
        debug_assert!(part.depth_change() >= 0, "cannot find subtree for leaf part");
 
        while idx < self.parts.len() {
 
            if marker == self.parts[idx] {
 
                // Found the marker
 
            if part == self.parts[idx] {
 
                // Found the specified part
 
                let start_idx = idx + 1;
 
                let end_idx = Self::find_subtree_end_idx(&self.parts, start_idx);
 
                return Some((start_idx, end_idx))
 
            }
 

	
 
            idx += 1;
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Given that the `parts` are a depth-first serialized tree of types, this
 
@@ -325,63 +323,85 @@ impl InferenceType {
 
    /// `to_infer` parts are checked for inference. It might be that this 
 
    /// function returns `None`, but that that `template` is still compatible
 
    /// with `to_infer`, e.g. when `template` has an `Unknown` part.
 
    fn infer_part_for_single_type(
 
        to_infer: &mut InferenceType, to_infer_idx: &mut usize,
 
        template_parts: &[InferenceTypePart], template_idx: &mut usize,
 
    ) -> Option<i32> {
 
        use InferenceTypePart as ITP;
 

	
 
        let to_infer_part = &to_infer.parts[*to_infer_idx];
 
        let template_part = &template_parts[*template_idx];
 

	
 
        // TODO: Maybe do this differently?
 
        let mut template_definition_marker = None;
 
        if *template_idx > 0 {
 
            if let ITP::MarkerDefinition(marker) = &template_parts[*template_idx - 1] {
 
                template_definition_marker = Some(*marker)
 
            }
 
        }
 

	
 
        // Check for programmer mistakes
 
        debug_assert_ne!(to_infer_part, template_part);
 
        debug_assert!(!to_infer_part.is_marker(), "marker encountered in 'infer part'");
 
        debug_assert!(!template_part.is_marker(), "marker encountered in 'template part'");
 

	
 
        // Inference of a somewhat-specified type
 
        if to_infer_part.may_be_inferred_from(template_part) {
 
            let depth_change = to_infer_part.depth_change();
 
            debug_assert_eq!(depth_change, template_part.depth_change());
 

	
 
            if let Some(marker) = template_definition_marker {
 
                to_infer.parts.insert(*to_infer_idx, ITP::MarkerDefinition(marker));
 
                *to_infer_idx += 1;
 
            }
 

	
 
            to_infer.parts[*to_infer_idx] = template_part.clone();
 

	
 
            *to_infer_idx += 1;
 
            *template_idx += 1;
 
            return Some(depth_change);
 
        }
 

	
 
        // Inference of a completely unknown type
 
        if *to_infer_part == ITP::Unknown {
 
            // template part is different, so cannot be unknown, hence copy the
 
            // entire subtree
 
            let template_end_idx = Self::find_subtree_end_idx(template_parts, *template_idx);
 
            to_infer.parts[*to_infer_idx] = template_part.clone();
 
            *to_infer_idx += 1;
 
            for insert_idx in (*template_idx + 1)..template_end_idx {
 
                to_infer.parts.insert(*to_infer_idx, template_parts[insert_idx].clone());
 
            let erase_offset = if let Some(marker) = template_definition_marker {
 
                to_infer.parts[*to_infer_idx] = ITP::MarkerDefinition(marker);
 
                *to_infer_idx += 1;
 
            }
 
                0
 
            } else {
 
                1
 
            };
 

	
 
            to_infer.parts.splice(
 
                *to_infer_idx..*to_infer_idx + erase_offset,
 
                template_parts[*template_idx..template_end_idx].iter().cloned()
 
            );
 
            *to_infer_idx += (template_end_idx - *template_idx);
 
            *template_idx = template_end_idx;
 

	
 
            // Note: by definition the LHS was Unknown and the RHS traversed a 
 
            // full subtree.
 
            return Some(-1);
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Call that checks if the `to_check` part is compatible with the `infer`
 
    /// part. This essentially implements `infer_part_for_single_type` but skips
 
    /// over the matching parts.
 
    /// part. This is essentially a copy of `infer_part_for_single_type`, but
 
    /// without actually copying the type parts.
 
    fn check_part_for_single_type(
 
        to_check_parts: &[InferenceTypePart], to_check_idx: &mut usize,
 
        template_parts: &[InferenceTypePart], template_idx: &mut usize
 
    ) -> Option<i32> {
 
        use InferenceTypePart as ITP;
 

	
 
        let to_check_part = &to_check_parts[*to_check_idx];
 
        let template_part = &template_parts[*template_idx];
 

	
 
        // Checking programmer errors
 
        debug_assert_ne!(to_check_part, template_part);
 
        debug_assert!(!to_check_part.is_marker(), "marker encountered in 'to_check part'");
 
@@ -406,49 +426,49 @@ impl InferenceType {
 
        None
 
    }
 

	
 
    /// Attempts to infer types between two `InferenceType` instances. This 
 
    /// function is unsafe as it accepts pointers to work around Rust's 
 
    /// borrowing rules. The caller must ensure that the pointers are distinct.
 
    unsafe fn infer_subtrees_for_both_types(
 
        type_a: *mut InferenceType, start_idx_a: usize,
 
        type_b: *mut InferenceType, start_idx_b: usize
 
    ) -> DualInferenceResult {
 
        use InferenceTypePart as ITP;
 

	
 
        debug_assert!(!std::ptr::eq(type_a, type_b), "same inference types");
 
        debug_assert!(!std::ptr::eq(type_a, type_b), "encountered pointers to the same inference type");
 
        let type_a = &mut *type_a;
 
        let type_b = &mut *type_b;
 

	
 
        let mut modified_a = false;
 
        let mut modified_b = false;
 
        let mut idx_a = start_idx_a;
 
        let mut idx_b = start_idx_b;
 
        let mut depth = 1;
 

	
 
        while depth > 0 {
 
            // Advance indices if we encounter markers or equal parts
 
            let part_a = &type_a.parts[idx_a];
 
            let part_b = &type_b.parts[idx_b];
 
            
 
            if part_a == part_b {
 
                let depth_change = part_a.depth_change();
 
                depth += depth_change;
 
                debug_assert_eq!(depth_change, part_b.depth_change());
 
                idx_a += 1;
 
                idx_b += 1;
 
                continue;
 
            }
 
            if let ITP::Marker(_) = part_a { idx_a += 1; continue; }
 
            if let ITP::Marker(_) = part_b { idx_b += 1; continue; }
 
            if part_a.is_marker() { idx_a += 1; continue; }
 
            if part_b.is_marker() { idx_b += 1; continue; }
 

	
 
            // Types are not equal and are both not markers
 
            if let Some(depth_change) = Self::infer_part_for_single_type(type_a, &mut idx_a, &type_b.parts, &mut idx_b) {
 
                depth += depth_change;
 
                modified_a = true;
 
                continue;
 
            }
 
            if let Some(depth_change) = Self::infer_part_for_single_type(type_b, &mut idx_b, &type_a.parts, &mut idx_a) {
 
                depth += depth_change;
 
                modified_b = true;
 
                continue;
 
            }
 
@@ -567,83 +587,104 @@ impl InferenceType {
 
        }
 

	
 
        true
 
    }
 

	
 
    /// Performs the conversion of the inference type into a concrete type.
 
    /// By calling this function you must make sure that no unspecified types
 
    /// (e.g. Unknown or IntegerLike) exist in the type.
 
    fn write_concrete_type(&self, concrete_type: &mut ConcreteType) {
 
        use InferenceTypePart as ITP;
 
        use ConcreteTypePart as CTP;
 

	
 
        // Make sure inference type is specified but concrete type is not yet specified
 
        debug_assert!(!self.parts.is_empty());
 
        debug_assert!(concrete_type.parts.is_empty());
 
        concrete_type.parts.reserve(self.parts.len());
 

	
 
        for part in &self.parts {
 
        let mut idx = 0;
 
        while idx < self.parts.len() {
 
            let part = &self.parts[idx];
 
            let converted_part = match part {
 
                ITP::Marker(_) => { continue; },
 
                ITP::MarkerDefinition(marker) => {
 
                    // Outer markers are converted to regular markers, we
 
                    // completely remove the type subtree that follows it
 
                    idx = InferenceType::find_subtree_end_idx(&self.parts, idx + 1);
 
                    concrete_type.parts.push(CTP::Marker(*marker));
 
                    continue;
 
                },
 
                ITP::MarkerBody(_) => {
 
                    // Inner markers are removed when writing to the concrete
 
                    // type.
 
                    idx += 1;
 
                    continue;
 
                },
 
                ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | ITP::ArrayLike | ITP::PortLike => {
 
                    debug_assert!(false, "Attempted to convert inference type part {:?} into concrete type", part);
 
                    unreachable!();
 
                    unreachable!("Attempted to convert inference type part {:?} into concrete type", part);
 
                },
 
                ITP::Void => CTP::Void,
 
                ITP::Message => CTP::Message,
 
                ITP::Bool => CTP::Bool,
 
                ITP::Byte => CTP::Byte,
 
                ITP::Short => CTP::Short,
 
                ITP::Int => CTP::Int,
 
                ITP::Long => CTP::Long,
 
                ITP::String => CTP::String,
 
                ITP::Array => CTP::Array,
 
                ITP::Slice => CTP::Slice,
 
                ITP::Input => CTP::Input,
 
                ITP::Output => CTP::Output,
 
                ITP::Instance(id, num) => CTP::Instance(*id, *num),
 
            };
 

	
 
            concrete_type.parts.push(converted_part);
 
            idx += 1;
 
        }
 
    }
 

	
 
    /// Writes a human-readable version of the type to a string. Mostly a
 
    /// function for interior use.
 
    fn write_display_name(
 
        buffer: &mut String, heap: &Heap, parts: &[InferenceTypePart], mut idx: usize
 
    ) -> usize {
 
        use InferenceTypePart as ITP;
 

	
 
        match &parts[idx] {
 
            ITP::Marker(_) => {
 
            ITP::MarkerDefinition(_) | ITP::MarkerBody(_) => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1)
 
            },
 
            ITP::Unknown => buffer.push_str("?"),
 
            ITP::NumberLike => buffer.push_str("num?"),
 
            ITP::IntegerLike => buffer.push_str("int?"),
 
            ITP::ArrayLike => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[?]");
 
            },
 
            ITP::PortLike => {
 
                buffer.push_str("port?<");
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            }
 
            ITP::Void => buffer.push_str("void"),
 
            ITP::Message => buffer.push_str("msg"),
 
            ITP::Bool => buffer.push_str("bool"),
 
            ITP::Byte => buffer.push_str("byte"),
 
            ITP::Short => buffer.push_str("short"),
 
            ITP::Int => buffer.push_str("int"),
 
            ITP::Long => buffer.push_str("long"),
 
            ITP::String => buffer.push_str("str"),
 
            ITP::Message => {
 
                buffer.push_str("msg<");
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Array => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[]");
 
            },
 
            ITP::Slice => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[..]");
 
            },
 
            ITP::Input => {
 
                buffer.push_str("in<");
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
@@ -695,25 +736,25 @@ struct InferenceTypeMarkerIter<'a> {
 
impl<'a> InferenceTypeMarkerIter<'a> {
 
    fn new(parts: &'a [InferenceTypePart]) -> Self {
 
        Self{ parts, idx: 0 }
 
    }
 
}
 

	
 
impl<'a> Iterator for InferenceTypeMarkerIter<'a> {
 
    type Item = (usize, &'a [InferenceTypePart]);
 

	
 
    fn next(&mut self) -> Option<Self::Item> {
 
        // Iterate until we find a marker
 
        while self.idx < self.parts.len() {
 
            if let InferenceTypePart::Marker(marker) = self.parts[self.idx] {
 
            if let InferenceTypePart::MarkerBody(marker) = self.parts[self.idx] {
 
                // Found a marker, find the subtree end
 
                let start_idx = self.idx + 1;
 
                let end_idx = InferenceType::find_subtree_end_idx(self.parts, start_idx);
 

	
 
                // Modify internal index, then return items
 
                self.idx = end_idx;
 
                return Some((marker, &self.parts[start_idx..end_idx]))
 
            }
 

	
 
            self.idx += 1;
 
        }
 

	
 
@@ -794,26 +835,40 @@ pub(crate) struct TypeResolvingVisitor {
 
}
 

	
 
// TODO: @rename used for calls and struct literals, maybe union literals?
 
struct ExtraData {
 
    /// Progression of polymorphic variables (if any)
 
    poly_vars: Vec<InferenceType>,
 
    /// Progression of types of call arguments or struct members
 
    embedded: Vec<InferenceType>,
 
    returned: InferenceType,
 
}
 

	
 
struct VarData {
 
    /// Type of the variable
 
    var_type: InferenceType,
 
    /// VariableExpressions that use the variable
 
    used_at: Vec<ExpressionId>,
 
    /// For channel statements we link to the other variable such that when one
 
    /// channel's interior type is resolved, we can also resolve the other one.
 
    linked_var: Option<VariableId>,
 
}
 

	
 
impl VarData {
 
    fn new_channel(var_type: InferenceType, other_port: VariableId) -> Self {
 
        Self{ var_type, used_at: Vec::new(), linked_var: Some(other_port) }
 
    }
 
    fn new_local(var_type: InferenceType) -> Self {
 
        Self{ var_type, used_at: Vec::new(), linked_var: None }
 
    }
 
}
 

	
 
impl TypeResolvingVisitor {
 
    pub(crate) fn new() -> Self {
 
        TypeResolvingVisitor{
 
            definition_type: DefinitionType::None,
 
            poly_vars: Vec::new(),
 
            stmt_buffer: Vec::with_capacity(STMT_BUFFER_INIT_CAPACITY),
 
            expr_buffer: Vec::with_capacity(EXPR_BUFFER_INIT_CAPACITY),
 
            var_types: HashMap::new(),
 
            expr_types: HashMap::new(),
 
            extra_data: HashMap::new(),
 
@@ -879,93 +934,93 @@ impl TypeResolvingVisitor {
 
    }
 
}
 

	
 
impl Visitor2 for TypeResolvingVisitor {
 
    // Definitions
 

	
 
    fn visit_component_definition(&mut self, ctx: &mut Ctx, id: ComponentId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Component(id);
 

	
 
        let comp_def = &ctx.heap[id];
 
        debug_assert_eq!(comp_def.poly_vars.len(), self.poly_vars.len(), "component polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(80));
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting component '{}': {}", &String::from_utf8_lossy(&comp_def.identifier.value), id.0.index);
 
        debug_log!("{}", "-".repeat(80));
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        for param_id in comp_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type(ctx, param.parser_type, true);
 
            debug_assert!(var_type.is_done, "expected component arguments to be concrete types");
 
            self.var_types.insert(param_id.upcast(), VarData{ var_type, used_at: Vec::new() });
 
            self.var_types.insert(param_id.upcast(), VarData::new_local(var_type));
 
        }
 

	
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    fn visit_function_definition(&mut self, ctx: &mut Ctx, id: FunctionId) -> VisitorResult {
 
        self.definition_type = DefinitionType::Function(id);
 

	
 
        let func_def = &ctx.heap[id];
 
        debug_assert_eq!(func_def.poly_vars.len(), self.poly_vars.len(), "function polyvars do not match imposed polyvars");
 

	
 
        debug_log!("{}", "-".repeat(80));
 
        debug_log!("{}", "-".repeat(50));
 
        debug_log!("Visiting function '{}': {}", &String::from_utf8_lossy(&func_def.identifier.value), id.0.index);
 
        debug_log!("{}", "-".repeat(80));
 
        debug_log!("{}", "-".repeat(50));
 

	
 
        for param_id in func_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type(ctx, param.parser_type, true);
 
            debug_assert!(var_type.is_done, "expected function arguments to be concrete types");
 
            self.var_types.insert(param_id.upcast(), VarData{ var_type, used_at: Vec::new() });
 
            self.var_types.insert(param_id.upcast(), VarData::new_local(var_type));
 
        }
 

	
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    // Statements
 

	
 
    fn visit_block_stmt(&mut self, ctx: &mut Ctx, id: BlockStatementId) -> VisitorResult {
 
        // Transfer statements for traversal
 
        let block = &ctx.heap[id];
 

	
 
        for stmt_id in block.statements.clone() {
 
            self.visit_stmt(ctx, stmt_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_memory_stmt(&mut self, ctx: &mut Ctx, id: MemoryStatementId) -> VisitorResult {
 
        let memory_stmt = &ctx.heap[id];
 

	
 
        let local = &ctx.heap[memory_stmt.variable];
 
        let var_type = self.determine_inference_type_from_parser_type(ctx, local.parser_type, true);
 
        self.var_types.insert(memory_stmt.variable.upcast(), VarData{ var_type, used_at: Vec::new() });
 
        self.var_types.insert(memory_stmt.variable.upcast(), VarData::new_local(var_type));
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_channel_stmt(&mut self, ctx: &mut Ctx, id: ChannelStatementId) -> VisitorResult {
 
        let channel_stmt = &ctx.heap[id];
 

	
 
        let from_local = &ctx.heap[channel_stmt.from];
 
        let from_var_type = self.determine_inference_type_from_parser_type(ctx, from_local.parser_type, true);
 
        self.var_types.insert(from_local.this.upcast(), VarData{ var_type: from_var_type, used_at: Vec::new() });
 
        self.var_types.insert(from_local.this.upcast(), VarData::new_channel(from_var_type, channel_stmt.to.upcast()));
 

	
 
        let to_local = &ctx.heap[channel_stmt.to];
 
        let to_var_type = self.determine_inference_type_from_parser_type(ctx, to_local.parser_type, true);
 
        self.var_types.insert(to_local.this.upcast(), VarData{ var_type: to_var_type, used_at: Vec::new() });
 
        self.var_types.insert(to_local.this.upcast(), VarData::new_channel(to_var_type, channel_stmt.from.upcast()));
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_labeled_stmt(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> VisitorResult {
 
        let labeled_stmt = &ctx.heap[id];
 
        let substmt_id = labeled_stmt.body;
 
        self.visit_stmt(ctx, substmt_id)
 
    }
 

	
 
    fn visit_if_stmt(&mut self, ctx: &mut Ctx, id: IfStatementId) -> VisitorResult {
 
        let if_stmt = &ctx.heap[id];
 
@@ -1208,48 +1263,53 @@ macro_rules! debug_assert_ptrs_distinct {
 
    };
 
}
 

	
 
impl TypeResolvingVisitor {
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError2> {
 
        // Keep inferring until we can no longer make any progress
 
        while let Some(next_expr_id) = self.expr_queued.iter().next() {
 
            let next_expr_id = *next_expr_id;
 
            self.expr_queued.remove(&next_expr_id);
 
            self.progress_expr(ctx, next_expr_id)?;
 
        }
 

	
 
        // Should have inferred everything
 
        for (expr_id, expr_type) in self.expr_types.iter() {
 
        // Should have inferred everything. Check for this and optionally
 
        // auto-infer the remaining types
 
        for (expr_id, expr_type) in self.expr_types.iter_mut() {
 
            if !expr_type.is_done {
 
                // TODO: Auto-inference of integerlike types
 
                let expr = &ctx.heap[*expr_id];
 
                return Err(ParseError2::new_error(
 
                    &ctx.module.source, expr.position(),
 
                    &format!(
 
                        "Could not fully infer the type of this expression (got '{}')",
 
                        expr_type.display_name(&ctx.heap)
 
                    )
 
                ))
 
                // Auto-infer numberlike/integerlike types to a regular int
 
                if expr_type.parts.len() == 1 && expr_type.parts[0] == InferenceTypePart::IntegerLike {
 
                    expr_type.parts[0] = InferenceTypePart::Int;
 
                } else {
 
                    let expr = &ctx.heap[*expr_id];
 
                    return Err(ParseError2::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Could not fully infer the type of this expression (got '{}')",
 
                            expr_type.display_name(&ctx.heap)
 
                        )
 
                    ))
 
                }
 
            }
 

	
 
            let concrete_type = ctx.heap[*expr_id].get_type_mut();
 
            expr_type.write_concrete_type(concrete_type);
 
        }
 

	
 
        // Check all things we need to monomorphize
 
        // TODO: Struct/enum/union monomorphization
 
        for (call_expr_id, extra_data) in self.extra_data.iter() {
 
            if extra_data.poly_vars.is_empty() { continue; }
 

	
 
            // We have a polymorph
 
            // Retrieve polymorph variable specification
 
            let mut monomorph_types = Vec::with_capacity(extra_data.poly_vars.len());
 
            for (poly_idx, poly_type) in extra_data.poly_vars.iter().enumerate() {
 
                if !poly_type.is_done {
 
                    // TODO: Single clean function for function signatures and polyvars.
 
                    // TODO: Better error message
 
                    let expr = &ctx.heap[*call_expr_id];
 
                    return Err(ParseError2::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Could not fully infer the type of polymorphic variable {} of this expression (got '{}')",
 
                            poly_idx, poly_type.display_name(&ctx.heap)
 
                        )
 
@@ -1259,50 +1319,45 @@ impl TypeResolvingVisitor {
 
                let mut concrete_type = ConcreteType::default();
 
                poly_type.write_concrete_type(&mut concrete_type);
 
                monomorph_types.insert(poly_idx, concrete_type);
 
            }
 

	
 
            // Resolve to call expression's definition
 
            let call_expr = if let Expression::Call(call_expr) = &ctx.heap[*call_expr_id] {
 
                call_expr
 
            } else {
 
                todo!("implement different kinds of polymorph expressions");
 
            };
 

	
 
            // Add to type table if not yet typechecked
 
            if let Method::Symbolic(symbolic) = &call_expr.method {
 
                let definition_id = symbolic.definition.unwrap();
 
                let root_id = ctx.types
 
                    .get_base_definition(&definition_id)
 
                    .unwrap()
 
                    .ast_root;
 

	
 
                queue.push(ResolveQueueElement{
 
                    root_id,
 
                    definition_id,
 
                    monomorph_types,
 
                })
 
                if !ctx.types.has_monomorph(&definition_id, &monomorph_types) {
 
                    let root_id = ctx.types
 
                        .get_base_definition(&definition_id)
 
                        .unwrap()
 
                        .ast_root;
 

	
 
                    // Pre-emptively add the monomorph to the type table, but
 
                    // we still need to perform typechecking on it
 
                    ctx.types.add_monomorph(&definition_id, monomorph_types.clone());
 
                    queue.push(ResolveQueueElement {
 
                        root_id,
 
                        definition_id,
 
                        monomorph_types,
 
                    })
 
                }
 
            }
 
        }
 

	
 
        // Finally, if the currently resolved definition is a monomoprh, then we
 
        // add it to the type table
 
        if !self.poly_vars.is_empty() {
 
            let definition_id = match &self.definition_type {
 
                DefinitionType::None => unreachable!(),
 
                DefinitionType::Function(id) => id.upcast(),
 
                DefinitionType::Component(id) => id.upcast(),
 
            };
 
            ctx.types.instantiate_monomorph(&definition_id, &self.poly_vars)
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> Result<(), ParseError2> {
 
        match &ctx.heap[id] {
 
            Expression::Assignment(expr) => {
 
                let id = expr.this;
 
                self.progress_assignment_expr(ctx, id)
 
            },
 
            Expression::Conditional(expr) => {
 
                let id = expr.this;
 
                self.progress_conditional_expr(ctx, id)
 
@@ -1687,27 +1742,27 @@ impl TypeResolvingVisitor {
 
        debug_log!(" * After:");
 
        debug_log!("   - Expr type [{}]: {}", expr_progress, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 
        if expr_progress { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_constant_expr(&mut self, ctx: &mut Ctx, id: ConstantExpressionId) -> Result<(), ParseError2> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let template = match &expr.value {
 
            Constant::Null => &MESSAGE_TEMPLATE,
 
            Constant::Integer(_) => &INTEGERLIKE_TEMPLATE,
 
            Constant::True | Constant::False => &BOOL_TEMPLATE,
 
            Constant::Null => &MESSAGE_TEMPLATE[..],
 
            Constant::Integer(_) => &INTEGERLIKE_TEMPLATE[..],
 
            Constant::True | Constant::False => &BOOL_TEMPLATE[..],
 
            Constant::Character(_) => todo!("character literals")
 
        };
 

	
 
        let progress = self.apply_forced_constraint(ctx, upcast_id, template)?;
 
        if progress { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    // TODO: @cleanup, see how this can be cleaned up once I implement
 
    //  polymorphic struct/enum/union literals. These likely follow the same
 
    //  pattern as here.
 
@@ -1727,66 +1782,66 @@ impl TypeResolvingVisitor {
 
        debug_log!("   - Expr type: {}", self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 
        debug_log!(" * During (inferring types from arguments and return type):");
 

	
 
        // Check if we can make progress using the arguments and/or return types
 
        // while keeping track of the polyvars we've extended
 
        let mut poly_progress = HashSet::new();
 
        debug_assert_eq!(extra.embedded.len(), expr.arguments.len());
 
        let mut poly_infer_error = false;
 

	
 
        for (arg_idx, arg_id) in expr.arguments.clone().into_iter().enumerate() {
 
            let signature_type = &mut extra.embedded[arg_idx];
 
            let argument_type: *mut _ = self.expr_types.get_mut(&arg_id).unwrap();
 
            let (progress_sig, progress_arg) = Self::apply_equal2_constraint_types(
 
                ctx, upcast_id, signature_type, 0, argument_type, 0
 
            let (progress_sig, progress_arg) = Self::apply_equal2_signature_constraint(
 
                ctx, upcast_id, Some(arg_id), signature_type, 0, argument_type, 0
 
            )?;
 

	
 
            debug_log!("   - Arg {} type | sig: {}, arg: {}", arg_idx, signature_type.display_name(&ctx.heap), unsafe{&*argument_type}.display_name(&ctx.heap));
 

	
 
            if progress_sig {
 
                // Progressed signature, so also apply inference to the 
 
                // polymorph types using the markers 
 
                debug_assert!(signature_type.has_marker, "progress on signature argument type without markers");
 
                for (poly_idx, poly_section) in signature_type.marker_iter() {
 
                debug_assert!(signature_type.has_body_marker, "progress on signature argument type without markers");
 
                for (poly_idx, poly_section) in signature_type.body_marker_iter() {
 
                    let polymorph_type = &mut extra.poly_vars[poly_idx];
 
                    match Self::apply_forced_constraint_types(
 
                        polymorph_type, 0, poly_section, 0
 
                    ) {
 
                        Ok(true) => { poly_progress.insert(poly_idx); },
 
                        Ok(false) => {},
 
                        Err(()) => { poly_infer_error = true; }
 
                    }
 

	
 
                    debug_log!("   - Poly {} type | sig: {}, arg: {}", poly_idx, polymorph_type.display_name(&ctx.heap), InferenceType::partial_display_name(&ctx.heap, poly_section));
 
                }
 
            }
 
            if progress_arg {
 
                // Progressed argument expression
 
                self.expr_queued.insert(arg_id);
 
            }
 
        }
 

	
 
        // Do the same for the return type
 
        let signature_type = &mut extra.returned;
 
        let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 
        let (progress_sig, progress_expr) = Self::apply_equal2_constraint_types(
 
            ctx, upcast_id, signature_type, 0, expr_type, 0
 
        let (progress_sig, progress_expr) = Self::apply_equal2_signature_constraint(
 
            ctx, upcast_id, None, signature_type, 0, expr_type, 0
 
        )?;
 

	
 
        debug_log!("   - Ret type | sig: {}, arg: {}", signature_type.display_name(&ctx.heap), unsafe{&*expr_type}.display_name(&ctx.heap));
 

	
 
        if progress_sig {
 
            // As above: apply inference to polyargs as well
 
            debug_assert!(signature_type.has_marker, "progress on signature return type without markers");
 
            for (poly_idx, poly_section) in signature_type.marker_iter() {
 
            debug_assert!(signature_type.has_body_marker, "progress on signature return type without markers");
 
            for (poly_idx, poly_section) in signature_type.body_marker_iter() {
 
                let polymorph_type = &mut extra.poly_vars[poly_idx];
 
                match Self::apply_forced_constraint_types(
 
                    polymorph_type, 0, poly_section, 0
 
                ) {
 
                    Ok(true) => { poly_progress.insert(poly_idx); },
 
                    Ok(false) => {},
 
                    Err(()) => { poly_infer_error = true; }
 
                }
 
                debug_log!("   - Poly {} type | sig: {}, arg: {}", poly_idx, polymorph_type.display_name(&ctx.heap), InferenceType::partial_display_name(&ctx.heap, poly_section));
 
            }
 
        }
 
        if progress_expr {
 
@@ -1806,64 +1861,68 @@ impl TypeResolvingVisitor {
 
        // type should always succeed.
 
        debug_log!(" * During (reinferring from progress polyvars):");
 
        // TODO: @performance If the algorithm is changed to be more "on demand
 
        //  argument re-evaluation", instead of "all-argument re-evaluation",
 
        //  then this is no longer true
 
        for poly_idx in poly_progress.into_iter() {
 
            // For each polymorphic argument: first extend the signature type,
 
            // then reapply the equal2 constraint to the expressions
 
            let poly_type = &extra.poly_vars[poly_idx];
 
            for (arg_idx, sig_type) in extra.embedded.iter_mut().enumerate() {
 
                let mut seek_idx = 0;
 
                let mut modified_sig = false;
 
                while let Some((start_idx, end_idx)) = sig_type.find_subtree_idx_for_marker(poly_idx, seek_idx) {
 
                while let Some((start_idx, end_idx)) = sig_type.find_subtree_idx_for_part(
 
                    InferenceTypePart::MarkerBody(poly_idx), seek_idx
 
                ) {
 
                    let modified_at_marker = Self::apply_forced_constraint_types(
 
                        sig_type, start_idx, &poly_type.parts, 0
 
                    ).unwrap();
 
                    modified_sig = modified_sig || modified_at_marker;
 
                    seek_idx = end_idx;
 
                }
 

	
 
                if !modified_sig {
 
                    debug_log!("   - Poly {} | Arg {} type | signature has not changed", poly_idx, arg_idx);
 
                    continue;
 
                }
 

	
 
                // Part of signature was modified, so update expression used as
 
                // argument as well
 
                let arg_expr_id = expr.arguments[arg_idx];
 
                let arg_type: *mut _ = self.expr_types.get_mut(&arg_expr_id).unwrap();
 
                let (progress_arg, _) = Self::apply_equal2_constraint_types(
 
                    ctx, arg_expr_id, arg_type, 0, sig_type, 0
 
                let (_, progress_arg) = Self::apply_equal2_signature_constraint(
 
                    ctx, arg_expr_id, Some(arg_expr_id), sig_type, 0, arg_type, 0
 
                ).expect("no inference error at argument type");
 
                if progress_arg { self.expr_queued.insert(arg_expr_id); }
 
                debug_log!("   - Poly {} | Arg {} type | sig: {}, arg: {}", poly_idx, arg_idx, sig_type.display_name(&ctx.heap), unsafe{&*arg_type}.display_name(&ctx.heap));
 
            }
 

	
 
            // Again: do the same for the return type
 
            let sig_type = &mut extra.returned;
 
            let mut seek_idx = 0;
 
            let mut modified_sig = false;
 
            while let Some((start_idx, end_idx)) = sig_type.find_subtree_idx_for_marker(poly_idx, seek_idx) {
 
            while let Some((start_idx, end_idx)) = sig_type.find_subtree_idx_for_part(
 
                InferenceTypePart::MarkerBody(poly_idx), seek_idx
 
            ) {
 
                let modified_at_marker = Self::apply_forced_constraint_types(
 
                    sig_type, start_idx, &poly_type.parts, 0
 
                ).unwrap();
 
                modified_sig = modified_sig || modified_at_marker;
 
                seek_idx = end_idx;
 
            }
 

	
 
            if modified_sig {
 
                let ret_type = self.expr_types.get_mut(&upcast_id).unwrap();
 
                let (progress_ret, _) = Self::apply_equal2_constraint_types(
 
                    ctx, upcast_id, ret_type, 0, sig_type, 0
 
                let (_, progress_ret) = Self::apply_equal2_signature_constraint(
 
                    ctx, upcast_id, None, sig_type, 0, ret_type, 0
 
                ).expect("no inference error at return type");
 
                if progress_ret {
 
                    if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                        self.expr_queued.insert(parent_id);
 
                    }
 
                }
 
                debug_log!("   - Poly {} | Ret type | sig: {}, arg: {}", poly_idx, sig_type.display_name(&ctx.heap), ret_type.display_name(&ctx.heap));
 
            } else {
 
                debug_log!("   - Poly {} | Ret type | signature has not changed", poly_idx);
 
            }
 
        }
 

	
 
@@ -1902,29 +1961,75 @@ impl TypeResolvingVisitor {
 
                &ctx.module.source, var_expr.position,
 
                &format!(
 
                    "But inferred to have incompatible type '{}' here",
 
                    expr_type.display_name(&ctx.heap)
 
                )
 
            ))
 
        }
 

	
 
        let progress_var = infer_res.modified_lhs();
 
        let progress_expr = infer_res.modified_rhs();
 

	
 
        if progress_var {
 
            // Let other variable expressions using this type progress as well
 
            for other_expr in var_data.used_at.iter() {
 
                if *other_expr != upcast_id {
 
                    self.expr_queued.insert(*other_expr);
 
                }
 
            }
 

	
 
            // Let a linked port know that our type has updated
 
            if let Some(linked_id) = var_data.linked_var {
 
                // Only perform one-way inference to prevent updating our type, this
 
                // would lead to an inconsistency
 
                let var_type: *mut _ = &mut var_data.var_type;
 
                let mut link_data = self.var_types.get_mut(&linked_id).unwrap();
 

	
 
                debug_assert!(
 
                    unsafe{&*var_type}.parts[0] == InferenceTypePart::Input ||
 
                    unsafe{&*var_type}.parts[0] == InferenceTypePart::Output
 
                );
 
                debug_assert!(
 
                    link_data.var_type.parts[0] == InferenceTypePart::Input ||
 
                    link_data.var_type.parts[0] == InferenceTypePart::Output
 
                );
 
                match InferenceType::infer_subtree_for_single_type(&mut link_data.var_type, 1, &unsafe{&*var_type}.parts, 1) {
 
                    SingleInferenceResult::Modified => {
 
                        for other_expr in &link_data.used_at {
 
                            self.expr_queued.insert(*other_expr);
 
                        }
 
                    },
 
                    SingleInferenceResult::Unmodified => {},
 
                    SingleInferenceResult::Incompatible => {
 
                        let var_data = self.var_types.get(&var_id).unwrap();
 
                        let link_data = self.var_types.get(&linked_id).unwrap();
 
                        let var_decl = &ctx.heap[var_id];
 
                        let link_decl = &ctx.heap[linked_id];
 

	
 
                        return Err(ParseError2::new_error(
 
                            &ctx.module.source, var_decl.position(),
 
                            &format!(
 
                                "Conflicting types for this variable, assigned the type '{}'",
 
                                var_data.var_type.display_name(&ctx.heap)
 
                            )
 
                        ).with_postfixed_info(
 
                            &ctx.module.source, link_decl.position(),
 
                            &format!(
 
                                "Because it is incompatible with this variable, assigned the type '{}'",
 
                                link_data.var_type.display_name(&ctx.heap)
 
                            )
 
                        ));
 
                    }
 
                }
 
            }
 
        }
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        debug_log!(" * After:");
 
        debug_log!("   - Var  type [{}]: {}", progress_var, self.var_types.get(&var_id).unwrap().var_type.display_name(&ctx.heap));
 
        debug_log!("   - Expr type [{}]: {}", progress_expr, self.expr_types.get(&upcast_id).unwrap().display_name(&ctx.heap));
 

	
 

	
 
        Ok(())
 
    }
 

	
 
    fn queue_expr_parent(&mut self, ctx: &Ctx, expr_id: ExpressionId) {
 
@@ -1984,41 +2089,58 @@ impl TypeResolvingVisitor {
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            arg1_type, arg1_start_idx,
 
            arg2_type, arg2_start_idx
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
        }
 

	
 
        Ok((infer_res.modified_lhs(), infer_res.modified_rhs()))
 
    }
 

	
 
    fn apply_equal2_constraint_types(
 
        ctx: &Ctx, expr_id: ExpressionId,
 
        type1: *mut InferenceType, type1_start_idx: usize, 
 
        type2: *mut InferenceType, type2_start_idx: usize
 
    fn apply_equal2_signature_constraint(
 
        ctx: &Ctx, outer_expr_id: ExpressionId, expr_id: Option<ExpressionId>,
 
        signature_type: *mut InferenceType, signature_start_idx: usize,
 
        expression_type: *mut InferenceType, expression_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError2> {
 
        debug_assert_ptrs_distinct!(type1, type2);
 
        debug_assert_ptrs_distinct!(signature_type, expression_type);
 
        let infer_res = unsafe { 
 
            InferenceType::infer_subtrees_for_both_types(
 
                type1, type1_start_idx, 
 
                type2, type2_start_idx
 
                signature_type, signature_start_idx,
 
                expression_type, expression_start_idx
 
            ) 
 
        };
 

	
 
        if infer_res == DualInferenceResult::Incompatible {
 
            // TODO: Check if I still need to use this
 
            let outer_position = ctx.heap[outer_expr_id].position();
 
            let (position_name, position) = match expr_id {
 
                Some(expr_id) => ("argument's", ctx.heap[expr_id].position()),
 
                None => ("return type's", outer_position)
 
            };
 
            let (signature_display_type, expression_display_type) = unsafe { (
 
                (&*signature_type).display_name(&ctx.heap),
 
                (&*expression_type).display_name(&ctx.heap)
 
            ) };
 

	
 
            return Err(ParseError2::new_error(
 
                &ctx.module.source, ctx.heap[expr_id].position(),
 
                "TODO: Write me, apply_equal2_constraint_types"
 
                &ctx.module.source, outer_position,
 
                "Failed to fully resolve the types of this expression"
 
            ).with_postfixed_info(
 
                &ctx.module.source, position,
 
                &format!(
 
                    "Because the {} signature has been resolved to '{}', but the expression has been resolved to '{}'",
 
                    position_name, signature_display_type, expression_display_type
 
                )
 
            ));
 
        }
 

	
 
        Ok((infer_res.modified_lhs(), infer_res.modified_rhs()))
 
    }
 

	
 
    /// Applies a type constraint that expects all three provided types to be
 
    /// equal. In case we can make progress in inferring the types then we
 
    /// attempt to do so. If the call is successful then the composition of all
 
    /// types is made equal.
 
    fn apply_equal3_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
@@ -2040,25 +2162,25 @@ impl TypeResolvingVisitor {
 
        }
 

	
 
        let args_res = unsafe{
 
            InferenceType::infer_subtrees_for_both_types(arg1_type, start_idx, arg2_type, start_idx) };
 
        if args_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
        }
 

	
 
        // If all types are compatible, but the second call caused the arg1_type
 
        // to be expanded, then we must also assign this to expr_type.
 
        let mut progress_expr = expr_res.modified_lhs();
 
        let mut progress_arg1 = expr_res.modified_rhs();
 
        let mut progress_arg2 = args_res.modified_rhs();
 
        let progress_arg2 = args_res.modified_rhs();
 

	
 
        if args_res.modified_lhs() { 
 
            unsafe {
 
                let end_idx = InferenceType::find_subtree_end_idx(&(*arg2_type).parts, start_idx);
 
                let subtree = &((*arg2_type).parts[start_idx..end_idx]);
 
                (*expr_type).replace_subtree(start_idx, subtree);
 
            }
 
            progress_expr = true;
 
            progress_arg1 = true;
 
        }
 

	
 
        Ok((progress_expr, progress_arg1, progress_arg2))
 
@@ -2204,46 +2326,49 @@ impl TypeResolvingVisitor {
 
        // Handle the polymorphic variables themselves
 
        let mut poly_vars = Vec::with_capacity(call.poly_args.len());
 
        for poly_arg_type_id in call.poly_args.clone() { // TODO: @performance
 
            poly_vars.push(self.determine_inference_type_from_parser_type(ctx, poly_arg_type_id, true));
 
        }
 

	
 
        // Handle the arguments
 
        // TODO: @cleanup: Maybe factor this out for reuse in the validator/linker, should also
 
        //  make the code slightly more robust.
 
        let (embedded_types, return_type) = match &call.method {
 
            Method::Create => {
 
                // Not polymorphic
 
                unreachable!("insert initial polymorph data for builtin 'create()' call")
 
                (
 
                    vec![InferenceType::new(false, true, vec![ITP::Int])],
 
                    InferenceType::new(false, true, vec![ITP::Message, ITP::Byte])
 
                )
 
            },
 
            Method::Fires => {
 
                // bool fires<T>(PortLike<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::PortLike, ITP::Marker(0), ITP::Unknown])],
 
                    vec![InferenceType::new(true, false, vec![ITP::PortLike, ITP::MarkerBody(0), ITP::Unknown])],
 
                    InferenceType::new(false, true, vec![ITP::Bool])
 
                )
 
            },
 
            Method::Get => {
 
                // T get<T>(input<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::Input, ITP::Marker(0), ITP::Unknown])],
 
                    InferenceType::new(true, false, vec![ITP::Marker(0), ITP::Unknown])
 
                    vec![InferenceType::new(true, false, vec![ITP::Input, ITP::MarkerBody(0), ITP::Unknown])],
 
                    InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown])
 
                )
 
            },
 
            Method::Put => {
 
                // void Put<T>(output<T> port, T msg)
 
                (
 
                    vec![
 
                        InferenceType::new(true, false, vec![ITP::Output, ITP::Marker(0), ITP::Unknown]),
 
                        InferenceType::new(true, false, vec![ITP::Marker(0), ITP::Unknown])
 
                        InferenceType::new(true, false, vec![ITP::Output, ITP::MarkerBody(0), ITP::Unknown]),
 
                        InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown])
 
                    ],
 
                    InferenceType::new(false, true, vec![ITP::Void])
 
                )
 
            }
 
            Method::Symbolic(symbolic) => {
 
                let definition = &ctx.heap[symbolic.definition.unwrap()];
 

	
 
                match definition {
 
                    Definition::Component(definition) => {
 
                        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
                        for param_id in definition.parameters.clone() {
 
                            let param = &ctx.heap[param_id];
 
@@ -2299,25 +2424,29 @@ impl TypeResolvingVisitor {
 

	
 
        let mut to_consider = VecDeque::with_capacity(16);
 
        to_consider.push_back(parser_type_id);
 

	
 
        let mut infer_type = Vec::new();
 
        let mut has_inferred = false;
 
        let mut has_markers = false;
 

	
 
        while !to_consider.is_empty() {
 
            let parser_type_id = to_consider.pop_front().unwrap();
 
            let parser_type = &ctx.heap[parser_type_id];
 
            match &parser_type.variant {
 
                PTV::Message => { infer_type.push(ITP::Message); },
 
                PTV::Message => {
 
                    /// TODO: @types Remove the Message -> Byte hack at some point...
 
                    infer_type.push(ITP::Message);
 
                    infer_type.push(ITP::Byte);
 
                },
 
                PTV::Bool => { infer_type.push(ITP::Bool); },
 
                PTV::Byte => { infer_type.push(ITP::Byte); },
 
                PTV::Short => { infer_type.push(ITP::Short); },
 
                PTV::Int => { infer_type.push(ITP::Int); },
 
                PTV::Long => { infer_type.push(ITP::Long); },
 
                PTV::String => { infer_type.push(ITP::String); },
 
                PTV::IntegerLiteral => { unreachable!("integer literal type on variable type"); },
 
                PTV::Inferred => {
 
                    infer_type.push(ITP::Unknown);
 
                    has_inferred = true;
 
                },
 
                PTV::Array(subtype_id) => {
 
@@ -2327,38 +2456,41 @@ impl TypeResolvingVisitor {
 
                PTV::Input(subtype_id) => {
 
                    infer_type.push(ITP::Input);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Output(subtype_id) => {
 
                    infer_type.push(ITP::Output);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Symbolic(symbolic) => {
 
                    debug_assert!(symbolic.variant.is_some(), "symbolic variant not yet determined");
 
                    match symbolic.variant.as_ref().unwrap() {
 
                        SymbolicParserTypeVariant::PolyArg(_, arg_idx) => {
 
                            // Retrieve concrete type of argument and add it to
 
                            // the inference type.
 
                            let arg_idx = *arg_idx;
 
                            debug_assert!(symbolic.poly_args.is_empty()); // TODO: @hkt
 

	
 
                            if parser_type_in_body {
 
                                // Polymorphic argument refers to definition's
 
                                // polymorphic variables
 
                                debug_assert!(arg_idx < self.poly_vars.len());
 
                                debug_assert!(!self.poly_vars[arg_idx].has_marker());
 
                                infer_type.push(ITP::MarkerDefinition(arg_idx));
 
                                for concrete_part in &self.poly_vars[arg_idx].parts {
 
                                    infer_type.push(ITP::from(*concrete_part));
 
                                }
 
                            } else {
 
                                // Polymorphic argument has to be inferred
 
                                has_markers = true;
 
                                has_inferred = true;
 
                                infer_type.push(ITP::Marker(arg_idx));
 
                                infer_type.push(ITP::MarkerBody(arg_idx));
 
                                infer_type.push(ITP::Unknown);
 
                            }
 
                        },
 
                        SymbolicParserTypeVariant::Definition(definition_id) => {
 
                            // TODO: @cleanup
 
                            if cfg!(debug_assertions) {
 
                                let definition = &ctx.heap[*definition_id];
 
                                debug_assert!(definition.is_struct() || definition.is_enum()); // TODO: @function_ptrs
 
                                let num_poly = match definition {
 
                                    Definition::Struct(v) => v.poly_vars.len(),
 
                                    Definition::Enum(v) => v.poly_vars.len(),
 
                                    _ => unreachable!(),
 
@@ -2460,30 +2592,30 @@ impl TypeResolvingVisitor {
 
    /// caused by a pair of inference types (which may come from arguments or
 
    /// the return type) having two different inferred values for that
 
    /// polymorphic variable.
 
    ///
 
    /// So we find this pair (which may be a argument type or return type
 
    /// conflicting with itself) and construct the error using it.
 
    fn construct_poly_arg_error(
 
        &self, ctx: &Ctx, call_id: CallExpressionId
 
    ) -> ParseError2 {
 
        // Helper function to check for polymorph mismatch between two inference
 
        // types.
 
        fn has_poly_mismatch<'a>(type_a: &'a InferenceType, type_b: &'a InferenceType) -> Option<(usize, &'a [InferenceTypePart], &'a [InferenceTypePart])> {
 
            if !type_a.has_marker || !type_b.has_marker {
 
            if !type_a.has_body_marker || !type_b.has_body_marker {
 
                return None
 
            }
 

	
 
            for (marker_a, section_a) in type_a.marker_iter() {
 
                for (marker_b, section_b) in type_b.marker_iter() {
 
            for (marker_a, section_a) in type_a.body_marker_iter() {
 
                for (marker_b, section_b) in type_b.body_marker_iter() {
 
                    if marker_a != marker_b {
 
                        // Not the same polymorphic variable
 
                        continue;
 
                    }
 

	
 
                    if !InferenceType::check_subtrees(section_a, 0, section_b, 0) {
 
                        // Not compatible
 
                        return Some((marker_a, section_a, section_b))
 
                    }
 
                }
 
            }
 

	
 
@@ -2612,43 +2744,43 @@ impl TypeResolvingVisitor {
 
mod tests {
 
    use super::*;
 
    use crate::protocol::arena::Id;
 
    use InferenceTypePart as ITP;
 
    use InferenceType as IT;
 

	
 
    #[test]
 
    fn test_single_part_inference() {
 
        // lhs argument inferred from rhs
 
        let pairs = [
 
            (ITP::NumberLike, ITP::Byte),
 
            (ITP::IntegerLike, ITP::Int),
 
            (ITP::Unknown, ITP::Message),
 
            (ITP::Unknown, ITP::Long),
 
            (ITP::Unknown, ITP::String)
 
        ];
 
        for (lhs, rhs) in pairs.iter() {
 
            // Using infer-both
 
            let mut lhs_type = IT::new(false, false, vec![lhs.clone()]);
 
            let mut rhs_type = IT::new(false, true, vec![rhs.clone()]);
 
            let result = unsafe{ IT::infer_subtrees_for_both_types(
 
                &mut lhs_type, 0, &mut rhs_type, 0
 
            ) };
 
            assert_eq!(DualInferenceResult::First, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts);
 

	
 
            // Using infer-single
 
            let mut lhs_type = IT::new(false, false, vec![lhs.clone()]);
 
            let mut rhs_type = IT::new(false, true, vec![rhs.clone()]);
 
            let result = unsafe{ IT::infer_subtree_for_single_type(
 
            let rhs_type = IT::new(false, true, vec![rhs.clone()]);
 
            let result = IT::infer_subtree_for_single_type(
 
                &mut lhs_type, 0, &rhs_type.parts, 0
 
            ) };
 
            );
 
            assert_eq!(SingleInferenceResult::Modified, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts);
 
        }
 
    }
 

	
 
    #[test]
 
    fn test_multi_part_inference() {
 
        let pairs = [
 
            (vec![ITP::ArrayLike, ITP::NumberLike], vec![ITP::Slice, ITP::Byte]),
 
            (vec![ITP::Unknown], vec![ITP::Input, ITP::Array, ITP::String]),
 
            (vec![ITP::PortLike, ITP::Int], vec![ITP::Input, ITP::Int]),
 
            (vec![ITP::Unknown], vec![ITP::Output, ITP::Int]),
 
@@ -2659,21 +2791,21 @@ mod tests {
 
        ];
 

	
 
        for (lhs, rhs) in pairs.iter() {
 
            let mut lhs_type = IT::new(false, false, lhs.clone());
 
            let mut rhs_type = IT::new(false, false, rhs.clone());
 
            let result = unsafe{ IT::infer_subtrees_for_both_types(
 
                &mut lhs_type, 0, &mut rhs_type, 0
 
            ) };
 
            assert_eq!(DualInferenceResult::First, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts);
 

	
 
            let mut lhs_type = IT::new(false, false, lhs.clone());
 
            let mut rhs_type = IT::new(false, false, rhs.clone());
 
            let result = unsafe{ IT::infer_subtree_for_single_type(
 
            let rhs_type = IT::new(false, false, rhs.clone());
 
            let result = IT::infer_subtree_for_single_type(
 
                &mut lhs_type, 0, &rhs_type.parts, 0
 
            ) };
 
            );
 
            assert_eq!(SingleInferenceResult::Modified, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts)
 
        }
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/type_table.rs
Show inline comments
 
@@ -107,24 +107,40 @@ impl std::fmt::Display for TypeClass {
 
/// marker and does not influence the bytesize of the type.
 
pub struct DefinedType {
 
    pub(crate) ast_root: RootId,
 
    pub(crate) ast_definition: DefinitionId,
 
    pub(crate) definition: DefinedTypeVariant,
 
    pub(crate) poly_args: Vec<PolyArg>,
 
    pub(crate) is_polymorph: bool,
 
    pub(crate) is_pointerlike: bool,
 
    // TODO: @optimize
 
    pub(crate) monomorphs: Vec<Vec<ConcreteType>>,
 
}
 

	
 
impl DefinedType {
 
    fn add_monomorph(&mut self, types: Vec<ConcreteType>) {
 
        debug_assert!(!self.has_monomorph(&types), "monomorph already exists");
 
        self.monomorphs.push(types);
 
    }
 

	
 
    fn has_monomorph(&self, types: &Vec<ConcreteType>) -> bool {
 
        debug_assert_eq!(self.poly_args.len(), types.len(), "mismatch in number of polymorphic types");
 
        for monomorph in &self.monomorphs {
 
            if monomorph == types { return true; }
 
        }
 

	
 
        return false;
 
    }
 
}
 

	
 
pub enum DefinedTypeVariant {
 
    Enum(EnumType),
 
    Union(UnionType),
 
    Struct(StructType),
 
    Function(FunctionType),
 
    Component(ComponentType)
 
}
 

	
 
pub struct PolyArg {
 
    identifier: Identifier,
 
    /// Whether the polymorphic argument is used directly in the definition of
 
    /// the type (not including bodies of function/component types)
 
@@ -179,26 +195,26 @@ pub struct StructType {
 

	
 
pub struct StructField {
 
    identifier: Identifier,
 
    parser_type: ParserTypeId,
 
}
 

	
 
pub struct FunctionType {
 
    pub return_type: ParserTypeId,
 
    pub arguments: Vec<FunctionArgument>
 
}
 

	
 
pub struct ComponentType {
 
    variant: ComponentVariant,
 
    arguments: Vec<FunctionArgument>
 
    pub variant: ComponentVariant,
 
    pub arguments: Vec<FunctionArgument>
 
}
 

	
 
pub struct FunctionArgument {
 
    identifier: Identifier,
 
    parser_type: ParserTypeId,
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Type table
 
//------------------------------------------------------------------------------
 

	
 
// TODO: @cleanup Do I really need this, doesn't make the code that much cleaner
 
@@ -314,37 +330,43 @@ impl TypeTable {
 
        Ok(table)
 
    }
 

	
 
    /// Retrieves base definition from type table. We must be able to retrieve
 
    /// it as we resolve all base types upon type table construction (for now).
 
    /// However, in the future we might do on-demand type resolving, so return
 
    /// an option anyway
 
    pub(crate) fn get_base_definition(&self, definition_id: &DefinitionId) -> Option<&DefinedType> {
 
        self.lookup.get(&definition_id)
 
    }
 

	
 
    /// Instantiates a monomorph for a given base definition.
 
    pub(crate) fn instantiate_monomorph(&mut self, definition_id: &DefinitionId, monomorph: &Vec<ConcreteType>) {
 
    pub(crate) fn add_monomorph(&mut self, definition_id: &DefinitionId, types: Vec<ConcreteType>) {
 
        debug_assert!(
 
            self.lookup.contains_key(definition_id),
 
            "attempting to instantiate monomorph of definition unknown to type table"
 
        );
 

	
 
        let definition = self.lookup.get_mut(definition_id).unwrap();
 
        debug_assert_eq!(
 
            monomorph.len(), definition.poly_args.len(),
 
            "attempting to instantiate monomorph with {} types, but definition requires {}",
 
            monomorph.len(), definition.poly_args.len()
 
        definition.add_monomorph(types);
 
    }
 

	
 
    /// Checks if a given definition already has a specific monomorph
 
    pub(crate) fn has_monomorph(&mut self, definition_id: &DefinitionId, types: &Vec<ConcreteType>) -> bool {
 
        debug_assert!(
 
            self.lookup.contains_key(definition_id),
 
            "attempting to check monomorph existence of definition unknown to type table"
 
        );
 

	
 
        definition.monomorphs.push(monomorph.clone())
 
        let definition = self.lookup.get(definition_id).unwrap();
 
        definition.has_monomorph(types)
 
    }
 

	
 
    /// This function will resolve just the basic definition of the type, it
 
    /// will not handle any of the monomorphized instances of the type.
 
    fn resolve_base_definition<'a>(&'a mut self, ctx: &mut TypeCtx, definition_id: DefinitionId) -> Result<(), ParseError2> {
 
        // Check if we have already resolved the base definition
 
        if self.lookup.contains_key(&definition_id) { return Ok(()); }
 

	
 
        let root_id = Self::find_root_id(ctx, definition_id);
 
        self.iter.reset(root_id, definition_id);
 

	
 
        while let Some((root_id, definition_id)) = self.iter.top() {
src/protocol/parser/visitor_linker.rs
Show inline comments
 
@@ -483,85 +483,43 @@ impl Visitor2 for ValidityAndLinkerVisitor {
 
        // Link the call expression following the new statement
 
        if self.performing_breadth_pass {
 
            // TODO: Cleanup error messages, can be done cleaner
 
            // Make sure new statement occurs within a composite component
 
            let call_expr_id = ctx.heap[id].expression;
 
            if !self.def_type.is_composite() {
 
                let new_stmt = &ctx.heap[id];
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, new_stmt.position, "Instantiating components may only be done in composite components")
 
                );
 
            }
 

	
 
            // No fancy recursive parsing, must be followed by a call expression
 
            let definition_id = {
 
                let call_expr = &ctx.heap[call_expr_id];
 
                if let Method::Symbolic(symbolic) = &call_expr.method {
 
                    let found_symbol = self.find_symbol_of_type(
 
                        ctx.module.root_id, &ctx.symbols, &ctx.types,
 
                        &symbolic.identifier, TypeClass::Component
 
                    );
 

	
 
                    match found_symbol {
 
                        FindOfTypeResult::Found(definition_id) => definition_id,
 
                        FindOfTypeResult::TypeMismatch(got_type_class) => {
 
                            return Err(ParseError2::new_error(
 
                                &ctx.module.source, symbolic.identifier.position,
 
                                &format!("New must instantiate a component, this identifier points to a {}", got_type_class)
 
                            ))
 
                        },
 
                        FindOfTypeResult::NotFound => {
 
                            return Err(ParseError2::new_error(
 
                                &ctx.module.source, symbolic.identifier.position,
 
                                "Could not find a defined component with this name"
 
                            ))
 
                        }
 
                    }
 
                } else {
 
                    return Err(
 
                        ParseError2::new_error(&ctx.module.source, call_expr.position, "Must instantiate a component")
 
                    );
 
                }
 
            };
 

	
 
            // Modify new statement's symbolic call to point to the appropriate
 
            // definition.
 
            let call_expr = &mut ctx.heap[call_expr_id];
 
            match &mut call_expr.method {
 
                Method::Symbolic(method) => method.definition = Some(definition_id),
 
                _ => unreachable!()
 
            // We make sure that we point to a symbolic method. Checking that it
 
            // points to a component is done in the depth pass.
 
            let call_expr = &ctx.heap[call_expr_id];
 
            if let Method::Symbolic(_) = &call_expr.method {
 
                // We're fine
 
            } else {
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, call_expr.position, "Must instantiate a component")
 
                );
 
            }
 
        } else {
 
            // Performing depth pass. The function definition should have been
 
            // resolved in the breadth pass, now we recurse to evaluate the
 
            // arguments
 
            // TODO: @cleanup Maybe just call `visit_call_expr`?
 
            // Just call `visit_call_expr`. We do some extra work we don't have
 
            // to, but this prevents silly mistakes.
 
            let call_expr_id = ctx.heap[id].expression;
 
            let call_expr = &mut ctx.heap[call_expr_id];
 
            call_expr.parent = ExpressionParent::New(id);
 

	
 
            let old_num_exprs = self.expression_buffer.len();
 
            self.expression_buffer.extend(&call_expr.arguments);
 
            let new_num_exprs = self.expression_buffer.len();
 

	
 
            let old_expr_parent = self.expr_parent;
 

	
 
            for arg_expr_idx in old_num_exprs..new_num_exprs {
 
                let arg_expr_id = self.expression_buffer[arg_expr_idx];
 
                self.expr_parent = ExpressionParent::Expression(call_expr_id.upcast(), arg_expr_idx as u32);
 
                self.visit_expr(ctx, arg_expr_id)?;
 
            }
 

	
 
            self.expression_buffer.truncate(old_num_exprs);
 
            self.expr_parent = old_expr_parent;
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::New(id);
 
            self.visit_call_expr(ctx, call_expr_id)?;
 
            self.expr_parent = ExpressionParent::None;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_expr_stmt(&mut self, ctx: &mut Ctx, id: ExpressionStatementId) -> VisitorResult {
 
        if !self.performing_breadth_pass {
 
            let expr_id = ctx.heap[id].expression;
 

	
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::ExpressionStmt(id);
 
            self.visit_expr(ctx, expr_id)?;
 
@@ -799,49 +757,63 @@ impl Visitor2 for ValidityAndLinkerVisitor {
 
                    ));
 
                }
 
                if self.in_sync.is_none() {
 
                    return Err(ParseError2::new_error(
 
                        &ctx.module.source, call_expr.position,
 
                        "A call to 'put' may only occur inside synchronous blocks"
 
                    ));
 
                }
 
                num_definition_args = 2;
 
            }
 
            Method::Symbolic(symbolic) => {
 
                // Find symbolic method
 
                let (verb, expected_type) = if let ExpressionParent::New(_) = self.expr_parent {
 
                    // Expect to find a component
 
                    ("instantiated", TypeClass::Component)
 
                } else {
 
                    // Expect to find a function
 
                    ("called", TypeClass::Function)
 
                };
 

	
 
                let found_symbol = self.find_symbol_of_type(
 
                    ctx.module.root_id, &ctx.symbols, &ctx.types,
 
                    &symbolic.identifier, TypeClass::Function
 
                    &symbolic.identifier, expected_type
 
                );
 
                let definition_id = match found_symbol {
 
                    FindOfTypeResult::Found(definition_id) => definition_id,
 
                    FindOfTypeResult::TypeMismatch(got_type_class) => {
 
                        return Err(ParseError2::new_error(
 
                            &ctx.module.source, symbolic.identifier.position,
 
                            &format!("Only functions can be called, this identifier points to a {}", got_type_class)
 
                            &format!(
 
                                "Only {}s can be {}, this identifier points to a {}",
 
                                expected_type, verb, got_type_class
 
                            )
 
                        ))
 
                    },
 
                    FindOfTypeResult::NotFound => {
 
                        return Err(ParseError2::new_error(
 
                            &ctx.module.source, symbolic.identifier.position,
 
                            &format!("Could not find a function with this name")
 
                            &format!("Could not find a {} with this name", expected_type)
 
                        ))
 
                    }
 
                };
 

	
 
                symbolic.definition = Some(definition_id);
 
                match &ctx.types.get_base_definition(&definition_id).unwrap().definition {
 
                    DefinedTypeVariant::Function(definition) => {
 
                        num_definition_args = definition.arguments.len();
 
                    },
 
                    DefinedTypeVariant::Component(definition) => {
 
                        num_definition_args = definition.arguments.len();
 
                    }
 
                    _ => unreachable!(),
 
                }
 
            }
 
        }
 

	
 
        // Check the poly args and the number of variables in the call
 
        // expression
 
        self.visit_call_poly_args(ctx, id)?;
 
        let call_expr = &mut ctx.heap[id];
 
        if num_expr_args != num_definition_args {
 
            return Err(ParseError2::new_error(
 
                &ctx.module.source, call_expr.position,
 
@@ -1113,25 +1085,25 @@ impl ValidityAndLinkerVisitor {
 
                // TODO: @hack, not very user friendly to manually allocate
 
                //  `inferred` ParserTypes with the InputPosition of the
 
                //  symbolic type's identifier.
 
                // We reuse the `parser_type_buffer` to temporarily store these
 
                // and we'll take them out later
 
                self.parser_type_buffer.push(ctx.heap.alloc_parser_type(|this| ParserType{
 
                    this,
 
                    pos: symbolic_pos,
 
                    variant: ParserTypeVariant::Inferred,
 
                }));
 
            }
 

	
 
            if let PTV::Symbolic(symbolic) = &mut ctx.heap[id].variant {
 
            if let PTV::Symbolic(symbolic) = &mut ctx.heap[parser_type_id].variant {
 
                for _ in 0..num_inferred_to_allocate {
 
                    symbolic.poly_args.push(self.parser_type_buffer.pop().unwrap());
 
                }
 
                symbolic.variant = Some(symbolic_variant);
 
            } else {
 
                unreachable!();
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
@@ -1473,29 +1445,31 @@ impl ValidityAndLinkerVisitor {
 
            },
 
            Method::Fires => {
 
                1
 
            },
 
            Method::Get => {
 
                1
 
            },
 
            Method::Put => {
 
                1
 
            }
 
            Method::Symbolic(symbolic) => {
 
                let definition = &ctx.heap[symbolic.definition.unwrap()];
 
                if let Definition::Function(definition) = definition {
 
                    definition.poly_vars.len()
 
                } else {
 
                    debug_assert!(false, "expected function while visiting call poly args");
 
                    unreachable!();
 
                match definition {
 
                    Definition::Function(definition) => definition.poly_vars.len(),
 
                    Definition::Component(definition) => definition.poly_vars.len(),
 
                    _ => {
 
                        debug_assert!(false, "expected function or component definition while visiting call poly args");
 
                        unreachable!();
 
                    }
 
                }
 
            }
 
        };
 

	
 
        // We allow zero polyargs to imply all args are inferred. Otherwise the
 
        // number of arguments must be equal
 
        if call_expr.poly_args.is_empty() {
 
            if num_expected_poly_args != 0 {
 
                // Infer all polyargs
 
                // TODO: @cleanup Not nice to use method position as implicitly
 
                //  inferred parser type pos.
 
                let pos = call_expr.position();
src/runtime/tests.rs
Show inline comments
 
@@ -28,25 +28,25 @@ fn file_logged_connector(connector_id: ConnectorId, dir_path: &Path) -> Connecto
 
fn file_logged_configured_connector(
 
    connector_id: ConnectorId,
 
    dir_path: &Path,
 
    pd: Arc<ProtocolDescription>,
 
) -> Connector {
 
    let _ = std::fs::create_dir_all(dir_path).expect("Failed to create log output dir");
 
    let path = dir_path.join(format!("cid_{:?}.txt", connector_id));
 
    let file = File::create(path).expect("Failed to create log output file!");
 
    let file_logger = Box::new(FileLogger::new(connector_id, file));
 
    Connector::new(file_logger, pd, connector_id)
 
}
 
static MINIMAL_PDL: &'static [u8] = b"
 
primitive together(in ia, in ib, out oa, out ob){
 
primitive together(in<msg> ia, in<msg> ib, out<msg> oa, out<msg> ob){
 
  while(true) synchronous {
 
    if(fires(ia)) {
 
      put(oa, get(ia));
 
      put(ob, get(ib));
 
    }
 
  } 
 
}
 
";
 
lazy_static::lazy_static! {
 
    static ref MINIMAL_PROTO: Arc<ProtocolDescription> = {
 
        Arc::new(reowolf::ProtocolDescription::parse(MINIMAL_PDL).unwrap())
 
    };
 
@@ -848,44 +848,46 @@ fn example_pres_3() {
 
}
 

	
 
#[test]
 
fn ac_not_b() {
 
    let test_log_path = Path::new("./logs/ac_not_b");
 
    let sock_addrs = [next_test_addr(), next_test_addr()];
 
    scope(|s| {
 
        s.spawn(|_| {
 
            // "amy"
 
            let mut c = file_logged_connector(0, test_log_path);
 
            let p0 = c.new_net_port(Putter, sock_addrs[0], Active).unwrap();
 
            let p1 = c.new_net_port(Putter, sock_addrs[1], Active).unwrap();
 
            c.connect(SEC1).unwrap();
 
            c.connect(SEC5).unwrap();
 

	
 
            // put both A and B
 
            c.put(p0, TEST_MSG.clone()).unwrap();
 
            c.put(p1, TEST_MSG.clone()).unwrap();
 
            c.sync(SEC1).unwrap_err();
 
        });
 
        s.spawn(|_| {
 
            // "bob"
 
            let pdl = b"
 
            primitive ac_not_b(in a, in b, out c){
 
            primitive ac_not_b(in<msg> a, in<msg> b, out<msg> c){
 
                // forward A to C but keep B silent
 
                synchronous{ put(c, get(a)); }
 
            }";
 
            let pd = Arc::new(reowolf::ProtocolDescription::parse(pdl).unwrap());
 
            let mut c = file_logged_configured_connector(1, test_log_path, pd);
 
            let p0 = c.new_net_port(Getter, sock_addrs[0], Passive).unwrap();
 
            let p1 = c.new_net_port(Getter, sock_addrs[1], Passive).unwrap();
 
            let [a, b] = c.new_port_pair();
 

	
 
            c.add_component(b"ac_not_b", &[p0, p1, a]).unwrap();
 

	
 
            c.connect(SEC1).unwrap();
 

	
 
            c.get(b).unwrap();
 
            c.sync(SEC1).unwrap_err();
 
        });
 
    })
 
    .unwrap();
 
}
 

	
 
#[test]
 
fn many_rounds_net() {
 
    let test_log_path = Path::new("./logs/many_rounds_net");
 
@@ -921,62 +923,62 @@ fn many_rounds_mem() {
 
    let [p0, p1] = c.new_port_pair();
 
    c.connect(SEC1).unwrap();
 
    for _ in 0..NUM_ROUNDS {
 
        c.put(p0, TEST_MSG.clone()).unwrap();
 
        c.get(p1).unwrap();
 
        c.sync(SEC1).unwrap();
 
    }
 
}
 

	
 
#[test]
 
fn pdl_reo_lossy() {
 
    let pdl = b"
 
    primitive lossy(in a, out b) {
 
    primitive lossy(in<msg> a, out<msg> b) {
 
        while(true) synchronous {
 
            msg m = null;
 
            if(fires(a)) {
 
                m = get(a);
 
                if(fires(b)) {
 
                    put(b, m);
 
                }
 
            }
 
        }
 
    }
 
    ";
 
    reowolf::ProtocolDescription::parse(pdl).unwrap();
 
}
 

	
 
#[test]
 
fn pdl_reo_fifo1() {
 
    let pdl = b"
 
    primitive fifo1(in a, out b) {
 
    primitive fifo1(in<msg> a, out<msg> b) {
 
        msg m = null;
 
        while(true) synchronous {
 
            if(m == null) {
 
                if(fires(a)) m=get(a);
 
            } else {
 
                if(fires(b)) put(b, m);
 
                m = null;
 
            }
 
        }
 
    }
 
    ";
 
    reowolf::ProtocolDescription::parse(pdl).unwrap();
 
}
 

	
 
#[test]
 
fn pdl_reo_fifo1full() {
 
    let test_log_path = Path::new("./logs/pdl_reo_fifo1full");
 
    let pdl = b"
 
    primitive fifo1full(in a, out b) {
 
    primitive fifo1full(in<msg> a, out<msg> b) {
 
        msg m = create(0);
 
        while(true) synchronous {
 
            if(m == null) {
 
                if(fires(a)) m=get(a);
 
            } else {
 
                if(fires(b)) put(b, m);
 
                m = null;
 
            }
 
        }
 
    }
 
    ";
 
    let pd = reowolf::ProtocolDescription::parse(pdl).unwrap();
 
@@ -985,25 +987,25 @@ fn pdl_reo_fifo1full() {
 
    let [p1, g1] = c.new_port_pair();
 
    c.add_component(b"fifo1full", &[g0, p1]).unwrap();
 
    c.connect(None).unwrap();
 
    c.get(g1).unwrap();
 
    c.sync(None).unwrap();
 
    assert_eq!(0, c.gotten(g1).unwrap().len());
 
}
 

	
 
#[test]
 
fn pdl_msg_consensus() {
 
    let test_log_path = Path::new("./logs/pdl_msg_consensus");
 
    let pdl = b"
 
    primitive msgconsensus(in a, in b) {
 
    primitive msgconsensus(in<msg> a, in<msg> b) {
 
        while(true) synchronous {
 
            msg x = get(a);
 
            msg y = get(b);
 
            assert(x == y);
 
        }
 
    }
 
    ";
 
    let pd = reowolf::ProtocolDescription::parse(pdl).unwrap();
 
    let mut c = file_logged_configured_connector(0, test_log_path, Arc::new(pd));
 
    let [p0, g0] = c.new_port_pair();
 
    let [p1, g1] = c.new_port_pair();
 
    c.add_component(b"msgconsensus", &[g0, g1]).unwrap();
 
@@ -1012,25 +1014,25 @@ fn pdl_msg_consensus() {
 
    c.put(p1, Payload::from(b"HELLO" as &[_])).unwrap();
 
    c.sync(SEC1).unwrap();
 

	
 
    c.put(p0, Payload::from(b"HEY" as &[_])).unwrap();
 
    c.put(p1, Payload::from(b"HELLO" as &[_])).unwrap();
 
    c.sync(SEC1).unwrap_err();
 
}
 

	
 
#[test]
 
fn sequencer3_prim() {
 
    let test_log_path = Path::new("./logs/sequencer3_prim");
 
    let pdl = b"
 
    primitive sequencer3(out a, out b, out c) {
 
    primitive sequencer3(out<msg> a, out<msg> b, out<msg> c) {
 
        int i = 0;
 
        while(true) synchronous {
 
            out to = a;
 
            if     (i==1) to = b;
 
            else if(i==2) to = c;
 
            if(fires(to)) {
 
                put(to, create(0));
 
                i = (i + 1)%3;
 
            }
 
        }
 
    }
 
    ";
 
@@ -1059,41 +1061,41 @@ fn sequencer3_prim() {
 
    for expected_batch_idx in (0..=2).cycle().take(TEST_ROUNDS) {
 
        // silent round
 
        assert_eq!(0, c.sync(None).unwrap());
 
        // non silent round
 
        assert_eq!(expected_batch_idx, which_of_three(&mut c));
 
    }
 
}
 

	
 
#[test]
 
fn sequencer3_comp() {
 
    let test_log_path = Path::new("./logs/sequencer3_comp");
 
    let pdl = b"
 
    primitive fifo1_init(msg m, in a, out b) {
 
    primitive fifo1_init<T>(T m, in<T> a, out<T> b) {
 
        while(true) synchronous {
 
            if(m != null && fires(b)) {
 
                put(b, m);
 
                m = null;
 
            } else if (m == null && fires(a)) {
 
                m = get(a);
 
            }
 
        }
 
    }
 
    composite fifo1_full(in a, out b) {
 
    composite fifo1_full<T>(in<T> a, out<T> b) {
 
        new fifo1_init(create(0), a, b);
 
    }
 
    composite fifo1(in a, out b) {
 
    composite fifo1<T>(in<T> a, out<T> b) {
 
        new fifo1_init(null, a, b);
 
    }
 
    composite sequencer3(out a, out b, out c) {
 
    composite sequencer3(out<msg> a, out<msg> b, out<msg> c) {
 
        channel d -> e;
 
        channel f -> g;
 
        channel h -> i;
 
        channel j -> k;
 
        channel l -> m;
 
        channel n -> o;
 

	
 
        new fifo1_full(o, d);
 
        new replicator(e, f, a);
 
        new fifo1(g, h);
 
        new replicator(i, j, b);
 
        new fifo1(k, l);
 
@@ -1140,25 +1142,25 @@ const XROUTER_ITEMS: &[XRouterItem] = {
 
    use XRouterItem::{GetA as A, GetB as B, Silent as S};
 
    &[
 
        B, A, S, B, A, A, B, S, B, S, A, A, S, B, B, S, B, S, B, B, S, B, B, A, B, B, A, B, A, B,
 
        S, B, S, B, S, A, S, B, A, S, B, A, B, S, B, S, B, S, S, B, B, A, A, A, S, S, S, B, A, A,
 
        A, S, S, B, B, B, A, B, S, S, A, A, B, A, B, B, A, A, A, B, A, B, S, A, B, S, A, A, B, S,
 
    ]
 
};
 

	
 
#[test]
 
fn xrouter_prim() {
 
    let test_log_path = Path::new("./logs/xrouter_prim");
 
    let pdl = b"
 
    primitive xrouter(in a, out b, out c) {
 
    primitive xrouter(in<msg> a, out<msg> b, out<msg> c) {
 
        while(true) synchronous {
 
            if(fires(a)) {
 
                if(fires(b)) put(b, get(a));
 
                else         put(c, get(a));
 
            }
 
        }
 
    }
 
    ";
 
    let pd = reowolf::ProtocolDescription::parse(pdl).unwrap();
 
    let mut c = file_logged_configured_connector(0, test_log_path, Arc::new(pd));
 

	
 
    // setup a session between (a) native, and (b) xrouter2, connected by 3 ports.
 
@@ -1180,41 +1182,41 @@ fn xrouter_prim() {
 
                c.put(p0, TEST_MSG.clone()).unwrap();
 
                c.get(g2).unwrap();
 
            }
 
        }
 
        assert_eq!(0, c.sync(SEC1).unwrap());
 
    }
 
    println!("PRIM {:?}", now.elapsed());
 
}
 
#[test]
 
fn xrouter_comp() {
 
    let test_log_path = Path::new("./logs/xrouter_comp");
 
    let pdl = b"
 
    primitive lossy(in a, out b) {
 
    primitive lossy<T>(in<T> a, out<T> b) {
 
        while(true) synchronous {
 
            if(fires(a)) {
 
                msg m = get(a);
 
                auto m = get(a);
 
                if(fires(b)) put(b, m);
 
            }
 
        }
 
    }
 
    primitive sync_drain(in a, in b) {
 
    primitive sync_drain<T>(in<T> a, in<T> b) {
 
        while(true) synchronous {
 
            if(fires(a)) {
 
                get(a);
 
                get(b);
 
            }
 
        }
 
    }
 
    composite xrouter(in a, out b, out c) {
 
    composite xrouter(in<msg> a, out<msg> b, out<msg> c) {
 
        channel d -> e;
 
        channel f -> g;
 
        channel h -> i;
 
        channel j -> k;
 
        channel l -> m;
 
        channel n -> o;
 
        channel p -> q;
 
        channel r -> s;
 
        channel t -> u;
 

	
 
        new replicator(a, d, f);
 
        new replicator(g, t, h);
 
@@ -1249,25 +1251,25 @@ fn xrouter_comp() {
 
                c.get(g2).unwrap();
 
            }
 
        }
 
        assert_eq!(0, c.sync(SEC1).unwrap());
 
    }
 
    println!("COMP {:?}", now.elapsed());
 
}
 

	
 
#[test]
 
fn count_stream() {
 
    let test_log_path = Path::new("./logs/count_stream");
 
    let pdl = b"
 
    primitive count_stream(out o) {
 
    primitive count_stream(out<msg> o) {
 
        msg m = create(1);
 
        m[0] = 0;
 
        while(true) synchronous {
 
            put(o, m);
 
            m[0] += 1;
 
        }
 
    }
 
    ";
 
    let pd = reowolf::ProtocolDescription::parse(pdl).unwrap();
 
    let mut c = file_logged_configured_connector(0, test_log_path, Arc::new(pd));
 

	
 
    // setup a session between (a) native, and (b) sequencer3, connected by 3 ports.
 
@@ -1277,29 +1279,30 @@ fn count_stream() {
 

	
 
    for expecting in 0u8..16 {
 
        c.get(g0).unwrap();
 
        c.sync(None).unwrap();
 
        assert_eq!(&[expecting], c.gotten(g0).unwrap().as_slice());
 
    }
 
}
 

	
 
#[test]
 
fn for_msg_byte() {
 
    let test_log_path = Path::new("./logs/for_msg_byte");
 
    let pdl = b"
 
    primitive for_msg_byte(out o) {
 
    primitive for_msg_byte(out<msg> o) {
 
        byte i = 0;
 
        int idx = 0;
 
        while(i<8) {
 
            msg m = create(1);
 
            m[0] = i;
 
            m[idx] = i;
 
            synchronous put(o, m);
 
            i++;
 
        }
 
    }
 
    ";
 
    let pd = reowolf::ProtocolDescription::parse(pdl).unwrap();
 
    let mut c = file_logged_configured_connector(0, test_log_path, Arc::new(pd));
 

	
 
    // setup a session between (a) native, and (b) sequencer3, connected by 3 ports.
 
    let [p0, g0] = c.new_port_pair();
 
    c.add_component(b"for_msg_byte", &[p0]).unwrap();
 
    c.connect(None).unwrap();
 
@@ -1307,25 +1310,25 @@ fn for_msg_byte() {
 
    for expecting in 0u8..8 {
 
        c.get(g0).unwrap();
 
        c.sync(None).unwrap();
 
        assert_eq!(&[expecting], c.gotten(g0).unwrap().as_slice());
 
    }
 
    c.sync(None).unwrap();
 
}
 

	
 
#[test]
 
fn eq_causality() {
 
    let test_log_path = Path::new("./logs/eq_causality");
 
    let pdl = b"
 
    primitive eq(in a, in b, out c) {
 
    primitive eq(in<msg> a, in<msg> b, out<msg> c) {
 
        msg ma = null;
 
        msg mb = null;
 
        while(true) synchronous {
 
            if(fires(a)) {
 
                // b and c also fire!
 
                // left first!
 
                ma = get(a);
 
                put(c, ma);
 
                mb = get(b);
 
                assert(ma == mb);
 
            }
 
        }
0 comments (0 inline, 0 general)