Changeset - 6ec2e0261a03
[Not reviewed]
0 14 0
MH - 4 years ago 2021-03-22 18:24:54
contact@maxhenger.nl
debugging type inference
14 files changed with 629 insertions and 142 deletions:
0 comments (0 inline, 0 general)
src/protocol/arena.rs
Show inline comments
 
use crate::common::*;
 
use core::hash::Hash;
 
use core::marker::PhantomData;
 

	
 
#[derive(serde::Serialize, serde::Deserialize)]
 
pub struct Id<T> {
 
    pub(crate) index: u32,
 
    _phantom: PhantomData<T>,
 
}
 

	
 
impl<T> Id<T> {
 
    pub(crate) fn new(index: u32) -> Self {
 
        Self{ index, _phantom: Default::default() }
 
    }
 
}
 

	
 
#[derive(Debug, serde::Serialize, serde::Deserialize)]
 
pub(crate) struct Arena<T> {
 
    store: Vec<T>,
 
}
 
//////////////////////////////////
 

	
 
impl<T> Debug for Id<T> {
 
    fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
 
        f.debug_struct("Id").field("index", &self.index).finish()
 
    }
 
}
 
impl<T> Clone for Id<T> {
 
    fn clone(&self) -> Self {
 
        *self
 
    }
 
}
 
impl<T> Copy for Id<T> {}
 
impl<T> PartialEq for Id<T> {
 
    fn eq(&self, other: &Self) -> bool {
 
        self.index.eq(&other.index)
 
    }
 
}
 
impl<T> Eq for Id<T> {}
 
impl<T> Hash for Id<T> {
 
    fn hash<H: std::hash::Hasher>(&self, h: &mut H) {
 
        self.index.hash(h);
 
    }
 
}
 

	
 
impl<T> Arena<T> {
 
    pub fn new() -> Self {
 
        Self { store: vec![] }
 
    }
 
    pub fn alloc_with_id(&mut self, f: impl FnOnce(Id<T>) -> T) -> Id<T> {
 
        use std::convert::TryFrom;
 
        let id = Id {
 
            index: u32::try_from(self.store.len()).expect("Out of capacity!"),
 
            _phantom: Default::default(),
 
        };
 
        let id = Id::new(u32::try_from(self.store.len()).expect("Out of capacity!"));
 
        self.store.push(f(id));
 
        id
 
    }
 
    pub fn iter(&self) -> impl Iterator<Item = &T> {
 
        self.store.iter()
 
    }
 
    pub fn iter_mut(&mut self) -> impl Iterator<Item = &mut T> {
 
        self.store.iter_mut()
 
    }
 
    pub fn len(&self) -> usize {
 
        self.store.len()
 
    }
 
}
 
impl<T> core::ops::Index<Id<T>> for Arena<T> {
 
    type Output = T;
 
    fn index(&self, id: Id<T>) -> &Self::Output {
 
        self.store.index(id.index as usize)
 
    }
 
}
 
impl<T> core::ops::IndexMut<Id<T>> for Arena<T> {
 
    fn index_mut(&mut self, id: Id<T>) -> &mut Self::Output {
 
        self.store.index_mut(id.index as usize)
 
    }
 
}
 
\ No newline at end of file
src/protocol/ast.rs
Show inline comments
 
@@ -515,457 +515,488 @@ impl SyntaxElement for Root {
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Pragma {
 
    Version(PragmaVersion),
 
    Module(PragmaModule)
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct PragmaVersion {
 
    pub this: PragmaId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub version: u64,
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct PragmaModule {
 
    pub this: PragmaId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub value: Vec<u8>,
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct PragmaOld {
 
    pub this: PragmaId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub value: Vec<u8>,
 
}
 

	
 
impl SyntaxElement for PragmaOld {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Import {
 
    Module(ImportModule),
 
    Symbols(ImportSymbols)
 
}
 

	
 
impl Import {
 
    pub(crate) fn as_module(&self) -> &ImportModule {
 
        match self {
 
            Import::Module(m) => m,
 
            _ => panic!("Unable to cast 'Import' to 'ImportModule'")
 
        }
 
    }
 
    pub(crate) fn as_symbols(&self) -> &ImportSymbols {
 
        match self {
 
            Import::Symbols(m) => m,
 
            _ => panic!("Unable to cast 'Import' to 'ImportSymbols'")
 
        }
 
    }
 
}
 

	
 
impl SyntaxElement for Import {
 
    fn position(&self) -> InputPosition {
 
        match self {
 
            Import::Module(m) => m.position,
 
            Import::Symbols(m) => m.position
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct ImportModule {
 
    pub this: ImportId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub module_name: Vec<u8>,
 
    pub alias: Vec<u8>,
 
    // Phase 2: module resolving
 
    pub module_id: Option<RootId>,
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct AliasedSymbol {
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub name: Vec<u8>,
 
    pub alias: Vec<u8>,
 
    // Phase 2: symbol resolving
 
    pub definition_id: Option<DefinitionId>,
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct ImportSymbols {
 
    pub this: ImportId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub module_name: Vec<u8>,
 
    // Phase 2: module resolving
 
    pub module_id: Option<RootId>,
 
    // Phase 1&2
 
    // if symbols is empty, then we implicitly import all symbols without any
 
    // aliases for them. If it is not empty, then symbols are explicitly
 
    // specified, and optionally given an alias.
 
    pub symbols: Vec<AliasedSymbol>,
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct Identifier {
 
    pub position: InputPosition,
 
    pub value: Vec<u8>
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct NamespacedIdentifier {
 
    pub position: InputPosition,
 
    pub num_namespaces: u8,
 
    pub value: Vec<u8>,
 
}
 

	
 
impl NamespacedIdentifier {
 
    pub(crate) fn iter(&self) -> NamespacedIdentifierIter {
 
        NamespacedIdentifierIter{
 
            value: &self.value,
 
            cur_offset: 0,
 
            num_returned: 0,
 
            num_total: self.num_namespaces
 
        }
 
    }
 
}
 

	
 
impl PartialEq for NamespacedIdentifier {
 
    fn eq(&self, other: &Self) -> bool {
 
        return self.value == other.value
 
    }
 
}
 
impl Eq for NamespacedIdentifier{}
 

	
 
// TODO: Just keep ref to NamespacedIdentifier
 
pub(crate) struct NamespacedIdentifierIter<'a> {
 
    value: &'a Vec<u8>,
 
    cur_offset: usize,
 
    num_returned: u8,
 
    num_total: u8,
 
}
 

	
 
impl<'a> NamespacedIdentifierIter<'a> {
 
    pub(crate) fn num_returned(&self) -> u8 {
 
        return self.num_returned;
 
    }
 
    pub(crate) fn num_remaining(&self) -> u8 {
 
        return self.num_total - self.num_returned
 
    }
 
    pub(crate) fn returned_section(&self) -> &[u8] {
 
        // Offset always includes the two trailing ':' characters
 
        let end = if self.cur_offset >= 2 { self.cur_offset - 2 } else { self.cur_offset };
 
        return &self.value[..end]
 
    }
 
}
 

	
 
impl<'a> Iterator for NamespacedIdentifierIter<'a> {
 
    type Item = &'a [u8];
 
    fn next(&mut self) -> Option<Self::Item> {
 
        if self.cur_offset >= self.value.len() {
 
            debug_assert_eq!(self.num_returned, self.num_total);
 
            None
 
        } else {
 
            debug_assert!(self.num_returned < self.num_total);
 
            let start = self.cur_offset;
 
            let mut end = start;
 
            while end < self.value.len() - 1 {
 
                if self.value[end] == b':' && self.value[end + 1] == b':' {
 
                    self.cur_offset = end + 2;
 
                    self.num_returned += 1;
 
                    return Some(&self.value[start..end]);
 
                }
 
                end += 1;
 
            }
 

	
 
            // If NamespacedIdentifier is constructed properly, then we cannot
 
            // end with "::" in the value, so
 
            debug_assert!(end == 0 || (self.value[end - 1] != b':' && self.value[end] != b':'));
 
            debug_assert_eq!(self.num_returned + 1, self.num_total);
 
            self.cur_offset = self.value.len();
 
            self.num_returned += 1;
 
            return Some(&self.value[start..]);
 
        }
 
    }
 
}
 

	
 
impl Display for Identifier {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        // A source identifier is in ASCII range.
 
        write!(f, "{}", String::from_utf8_lossy(&self.value))
 
    }
 
}
 

	
 
/// TODO: @cleanup Maybe handle this differently, preallocate in heap? The
 
///     reason I'm handling it like this now is so we don't allocate types in
 
///     the `Arena` structure if they're the common types defined here.
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum ParserTypeVariant {
 
    // Basic builtin
 
    Message,
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    // Literals (need to get concrete builtin type during typechecking)
 
    IntegerLiteral,
 
    Inferred,
 
    // Complex builtins
 
    Array(ParserTypeId), // array of a type
 
    Input(ParserTypeId), // typed input endpoint of a channel
 
    Output(ParserTypeId), // typed output endpoint of a channel
 
    Symbolic(SymbolicParserType), // symbolic type (definition or polyarg)
 
}
 

	
 
impl ParserTypeVariant {
 
    pub(crate) fn supports_polymorphic_args(&self) -> bool {
 
        use ParserTypeVariant::*;
 
        match self {
 
            Message | Bool | Byte | Short | Int | Long | String | IntegerLiteral | Inferred => false,
 
            _ => true
 
        }
 
    }
 
}
 

	
 
/// ParserType is a specification of a type during the parsing phase and initial
 
/// linker/validator phase of the compilation process. These types may be
 
/// (partially) inferred or represent literals (e.g. a integer whose bytesize is
 
/// not yet determined).
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct ParserType {
 
    pub this: ParserTypeId,
 
    pub pos: InputPosition,
 
    pub variant: ParserTypeVariant,
 
}
 

	
 
/// SymbolicParserType is the specification of a symbolic type. During the
 
/// parsing phase we will only store the identifier of the type. During the
 
/// validation phase we will determine whether it refers to a user-defined type,
 
/// or a polymorphic argument. After the validation phase it may still be the
 
/// case that the resulting `variant` will not pass the typechecker.
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct SymbolicParserType {
 
    // Phase 1: parser
 
    pub identifier: NamespacedIdentifier,
 
    /// The user-specified polymorphic arguments. Zero-length implies that the
 
    /// user did not specify any of them, and they're either not needed or all
 
    /// need to be inferred. Otherwise the number of polymorphic arguments must
 
    /// match those of the corresponding definition
 
    pub poly_args: Vec<ParserTypeId>,
 
    // Phase 2: validation/linking (for types in function/component bodies) and
 
    //  type table construction (for embedded types of structs/unions)
 
    pub variant: Option<SymbolicParserTypeVariant>
 
}
 

	
 
/// Specifies whether the symbolic type points to an actual user-defined type,
 
/// or whether it points to a polymorphic argument within the definition (e.g.
 
/// a defined variable `T var` within a function `int func<T>()`
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum SymbolicParserTypeVariant {
 
    Definition(DefinitionId),
 
    // TODO: figure out if I need the DefinitionId here
 
    PolyArg(DefinitionId, usize), // index of polyarg in the definition
 
}
 

	
 
/// ConcreteType is the representation of a type after resolving symbolic types
 
/// and performing type inference
 
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
 
pub enum ConcreteTypePart {
 
    // Special types (cannot be explicitly constructed by the programmer)
 
    Void,
 
    // Builtin types without nested types
 
    Message,
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    // Builtin types with one nested type
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // User defined type with any number of nested types
 
    Instance(DefinitionId, usize),
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct ConcreteType {
 
    pub(crate) parts: Vec<ConcreteTypePart>
 
}
 

	
 
impl Default for ConcreteType {
 
    fn default() -> Self {
 
        Self{ parts: Vec::new() }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
 
pub enum PrimitiveType {
 
    Input,
 
    Output,
 
    Message,
 
    Boolean,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    Symbolic(PrimitiveSymbolic)
 
}
 

	
 
// TODO: @cleanup, remove PartialEq implementations
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct PrimitiveSymbolic {
 
    // Phase 1: parser
 
    pub(crate) identifier: NamespacedIdentifier,
 
    // Phase 2: typing
 
    pub(crate) definition: Option<DefinitionId>
 
}
 

	
 
impl PartialEq for PrimitiveSymbolic {
 
    fn eq(&self, other: &Self) -> bool {
 
        self.identifier == other.identifier
 
    }
 
}
 
impl Eq for PrimitiveSymbolic{}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
 
pub struct Type {
 
    pub primitive: PrimitiveType,
 
    pub array: bool,
 
}
 

	
 
#[allow(dead_code)]
 
impl Type {
 
    pub const INPUT: Type = Type { primitive: PrimitiveType::Input, array: false };
 
    pub const OUTPUT: Type = Type { primitive: PrimitiveType::Output, array: false };
 
    pub const MESSAGE: Type = Type { primitive: PrimitiveType::Message, array: false };
 
    pub const BOOLEAN: Type = Type { primitive: PrimitiveType::Boolean, array: false };
 
    pub const BYTE: Type = Type { primitive: PrimitiveType::Byte, array: false };
 
    pub const SHORT: Type = Type { primitive: PrimitiveType::Short, array: false };
 
    pub const INT: Type = Type { primitive: PrimitiveType::Int, array: false };
 
    pub const LONG: Type = Type { primitive: PrimitiveType::Long, array: false };
 

	
 
    pub const INPUT_ARRAY: Type = Type { primitive: PrimitiveType::Input, array: true };
 
    pub const OUTPUT_ARRAY: Type = Type { primitive: PrimitiveType::Output, array: true };
 
    pub const MESSAGE_ARRAY: Type = Type { primitive: PrimitiveType::Message, array: true };
 
    pub const BOOLEAN_ARRAY: Type = Type { primitive: PrimitiveType::Boolean, array: true };
 
    pub const BYTE_ARRAY: Type = Type { primitive: PrimitiveType::Byte, array: true };
 
    pub const SHORT_ARRAY: Type = Type { primitive: PrimitiveType::Short, array: true };
 
    pub const INT_ARRAY: Type = Type { primitive: PrimitiveType::Int, array: true };
 
    pub const LONG_ARRAY: Type = Type { primitive: PrimitiveType::Long, array: true };
 
}
 

	
 
impl Display for Type {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
 
        match &self.primitive {
 
            PrimitiveType::Input => {
 
                write!(f, "in")?;
 
            }
 
            PrimitiveType::Output => {
 
                write!(f, "out")?;
 
            }
 
            PrimitiveType::Message => {
 
                write!(f, "msg")?;
 
            }
 
            PrimitiveType::Boolean => {
 
                write!(f, "boolean")?;
 
            }
 
            PrimitiveType::Byte => {
 
                write!(f, "byte")?;
 
            }
 
            PrimitiveType::Short => {
 
                write!(f, "short")?;
 
            }
 
            PrimitiveType::Int => {
 
                write!(f, "int")?;
 
            }
 
            PrimitiveType::Long => {
 
                write!(f, "long")?;
 
            }
 
            PrimitiveType::Symbolic(data) => {
 
                // Type data is in ASCII range.
 
                if let Some(id) = &data.definition {
 
                    write!(
 
                        f, "Symbolic({}, id: {})", 
 
                        String::from_utf8_lossy(&data.identifier.value),
 
                        id.index
 
                    )?;
 
                } else {
 
                    write!(
 
                        f, "Symbolic({}, id: Unresolved)",
 
                        String::from_utf8_lossy(&data.identifier.value)
 
                    )?;
 
                }
 
            }
 
        }
 
        if self.array {
 
            write!(f, "[]")
 
        } else {
 
            Ok(())
 
        }
 
    }
 
}
 

	
 
type CharacterData = Vec<u8>;
 
type IntegerData = i64;
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Constant {
 
    Null, // message
 
    True,
 
    False,
 
    Character(CharacterData),
 
    Integer(IntegerData),
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Method {
 
    Get,
 
    Put,
 
    Fires,
 
    Create,
 
    Symbolic(MethodSymbolic)
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct MethodSymbolic {
 
    pub(crate) identifier: NamespacedIdentifier,
 
    pub(crate) definition: Option<DefinitionId>
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Field {
 
    Length,
 
    Symbolic(Identifier),
 
}
 
impl Field {
 
    pub fn is_length(&self) -> bool {
 
        match self {
 
            Field::Length => true,
 
            _ => false,
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
 
pub enum Scope {
 
    Definition(DefinitionId),
 
    Regular(BlockStatementId),
 
    Synchronous((SynchronousStatementId, BlockStatementId)),
 
}
 

	
 
impl Scope {
 
    pub fn is_block(&self) -> bool {
 
        match &self {
 
            Scope::Definition(_) => false,
 
            Scope::Regular(_) => true,
 
            Scope::Synchronous(_) => true,
 
        }
 
    }
 
    pub fn to_block(&self) -> BlockStatementId {
 
        match &self {
 
            Scope::Regular(id) => *id,
 
            Scope::Synchronous((_, id)) => *id,
 
            _ => panic!("unable to get BlockStatement from Scope")
 
        }
 
    }
 
}
 

	
 
pub trait VariableScope {
 
    fn parent_scope(&self, h: &Heap) -> Option<Scope>;
 
    fn get_variable(&self, h: &Heap, id: &Identifier) -> Option<VariableId>;
 
}
 

	
 
impl VariableScope for Scope {
 
    fn parent_scope(&self, h: &Heap) -> Option<Scope> {
 
        match self {
 
            Scope::Definition(def) => h[*def].parent_scope(h),
 
            Scope::Regular(stmt) => h[*stmt].parent_scope(h),
 
            Scope::Synchronous((stmt, _)) => h[*stmt].parent_scope(h),
 
        }
 
    }
 
    fn get_variable(&self, h: &Heap, id: &Identifier) -> Option<VariableId> {
 
        match self {
 
            Scope::Definition(def) => h[*def].get_variable(h, id),
 
            Scope::Regular(stmt) => h[*stmt].get_variable(h, id),
 
            Scope::Synchronous((stmt, _)) => h[*stmt].get_variable(h, id),
 
        }
 
    }
 
@@ -1816,473 +1847,513 @@ impl SyntaxElement for ReturnStatement {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct AssertStatement {
 
    pub this: AssertStatementId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub expression: ExpressionId,
 
    // Phase 2: linker
 
    pub next: Option<StatementId>,
 
}
 

	
 
impl SyntaxElement for AssertStatement {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct GotoStatement {
 
    pub this: GotoStatementId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub label: Identifier,
 
    // Phase 2: linker
 
    pub target: Option<LabeledStatementId>,
 
}
 

	
 
impl SyntaxElement for GotoStatement {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct NewStatement {
 
    pub this: NewStatementId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub expression: CallExpressionId,
 
    // Phase 2: linker
 
    pub next: Option<StatementId>,
 
}
 

	
 
impl SyntaxElement for NewStatement {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct ExpressionStatement {
 
    pub this: ExpressionStatementId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub expression: ExpressionId,
 
    // Phase 2: linker
 
    pub next: Option<StatementId>,
 
}
 

	
 
impl SyntaxElement for ExpressionStatement {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, PartialEq, Eq, Clone, Copy, serde::Serialize, serde::Deserialize)]
 
pub enum ExpressionParent {
 
    None, // only set during initial parsing
 
    Memory(MemoryStatementId),
 
    If(IfStatementId),
 
    While(WhileStatementId),
 
    Return(ReturnStatementId),
 
    Assert(AssertStatementId),
 
    New(NewStatementId),
 
    ExpressionStmt(ExpressionStatementId),
 
    Expression(ExpressionId, u32) // index within expression (e.g LHS or RHS of expression)
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Expression {
 
    Assignment(AssignmentExpression),
 
    Conditional(ConditionalExpression),
 
    Binary(BinaryExpression),
 
    Unary(UnaryExpression),
 
    Indexing(IndexingExpression),
 
    Slicing(SlicingExpression),
 
    Select(SelectExpression),
 
    Array(ArrayExpression),
 
    Constant(ConstantExpression),
 
    Call(CallExpression),
 
    Variable(VariableExpression),
 
}
 

	
 
impl Expression {
 
    pub fn as_assignment(&self) -> &AssignmentExpression {
 
        match self {
 
            Expression::Assignment(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `AssignmentExpression`"),
 
        }
 
    }
 
    pub fn as_conditional(&self) -> &ConditionalExpression {
 
        match self {
 
            Expression::Conditional(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `ConditionalExpression`"),
 
        }
 
    }
 
    pub fn as_binary(&self) -> &BinaryExpression {
 
        match self {
 
            Expression::Binary(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `BinaryExpression`"),
 
        }
 
    }
 
    pub fn as_unary(&self) -> &UnaryExpression {
 
        match self {
 
            Expression::Unary(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `UnaryExpression`"),
 
        }
 
    }
 
    pub fn as_indexing(&self) -> &IndexingExpression {
 
        match self {
 
            Expression::Indexing(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `IndexingExpression`"),
 
        }
 
    }
 
    pub fn as_slicing(&self) -> &SlicingExpression {
 
        match self {
 
            Expression::Slicing(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `SlicingExpression`"),
 
        }
 
    }
 
    pub fn as_select(&self) -> &SelectExpression {
 
        match self {
 
            Expression::Select(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `SelectExpression`"),
 
        }
 
    }
 
    pub fn as_array(&self) -> &ArrayExpression {
 
        match self {
 
            Expression::Array(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `ArrayExpression`"),
 
        }
 
    }
 
    pub fn as_constant(&self) -> &ConstantExpression {
 
        match self {
 
            Expression::Constant(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `ConstantExpression`"),
 
        }
 
    }
 
    pub fn as_call(&self) -> &CallExpression {
 
        match self {
 
            Expression::Call(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `CallExpression`"),
 
        }
 
    }
 
    pub fn as_call_mut(&mut self) -> &mut CallExpression {
 
        match self {
 
            Expression::Call(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `CallExpression`"),
 
        }
 
    }
 
    pub fn as_variable(&self) -> &VariableExpression {
 
        match self {
 
            Expression::Variable(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `VariableExpression`"),
 
        }
 
    }
 
    pub fn as_variable_mut(&mut self) -> &mut VariableExpression {
 
        match self {
 
            Expression::Variable(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `VariableExpression`"),
 
        }
 
    }
 
    // TODO: @cleanup
 
    pub fn parent(&self) -> &ExpressionParent {
 
        match self {
 
            Expression::Assignment(expr) => &expr.parent,
 
            Expression::Conditional(expr) => &expr.parent,
 
            Expression::Binary(expr) => &expr.parent,
 
            Expression::Unary(expr) => &expr.parent,
 
            Expression::Indexing(expr) => &expr.parent,
 
            Expression::Slicing(expr) => &expr.parent,
 
            Expression::Select(expr) => &expr.parent,
 
            Expression::Array(expr) => &expr.parent,
 
            Expression::Constant(expr) => &expr.parent,
 
            Expression::Call(expr) => &expr.parent,
 
            Expression::Variable(expr) => &expr.parent,
 
        }
 
    }
 
    // TODO: @cleanup
 
    pub fn parent_expr_id(&self) -> Option<ExpressionId> {
 
        if let ExpressionParent::Expression(id, _) = self.parent() {
 
            Some(*id)
 
        } else {
 
            None
 
        }
 
    }
 
    // TODO: @cleanup
 
    pub fn set_parent(&mut self, parent: ExpressionParent) {
 
        match self {
 
            Expression::Assignment(expr) => expr.parent = parent,
 
            Expression::Conditional(expr) => expr.parent = parent,
 
            Expression::Binary(expr) => expr.parent = parent,
 
            Expression::Unary(expr) => expr.parent = parent,
 
            Expression::Indexing(expr) => expr.parent = parent,
 
            Expression::Slicing(expr) => expr.parent = parent,
 
            Expression::Select(expr) => expr.parent = parent,
 
            Expression::Array(expr) => expr.parent = parent,
 
            Expression::Constant(expr) => expr.parent = parent,
 
            Expression::Call(expr) => expr.parent = parent,
 
            Expression::Variable(expr) => expr.parent = parent,
 
        }
 
    }
 
    // TODO: @cleanup
 
    pub fn get_type_mut(&mut self) -> &mut ConcreteType {
 
        match self {
 
            Expression::Assignment(expr) => &mut expr.concrete_type,
 
            Expression::Conditional(expr) => &mut expr.concrete_type,
 
            Expression::Binary(expr) => &mut expr.concrete_type,
 
            Expression::Unary(expr) => &mut expr.concrete_type,
 
            Expression::Indexing(expr) => &mut expr.concrete_type,
 
            Expression::Slicing(expr) => &mut expr.concrete_type,
 
            Expression::Select(expr) => &mut expr.concrete_type,
 
            Expression::Array(expr) => &mut expr.concrete_type,
 
            Expression::Constant(expr) => &mut expr.concrete_type,
 
            Expression::Call(expr) => &mut expr.concrete_type,
 
            Expression::Variable(expr) => &mut expr.concrete_type,
 
        }
 
    }
 
}
 

	
 
impl SyntaxElement for Expression {
 
    fn position(&self) -> InputPosition {
 
        match self {
 
            Expression::Assignment(expr) => expr.position(),
 
            Expression::Conditional(expr) => expr.position(),
 
            Expression::Binary(expr) => expr.position(),
 
            Expression::Unary(expr) => expr.position(),
 
            Expression::Indexing(expr) => expr.position(),
 
            Expression::Slicing(expr) => expr.position(),
 
            Expression::Select(expr) => expr.position(),
 
            Expression::Array(expr) => expr.position(),
 
            Expression::Constant(expr) => expr.position(),
 
            Expression::Call(expr) => expr.position(),
 
            Expression::Variable(expr) => expr.position(),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum AssignmentOperator {
 
    Set,
 
    Multiplied,
 
    Divided,
 
    Remained,
 
    Added,
 
    Subtracted,
 
    ShiftedLeft,
 
    ShiftedRight,
 
    BitwiseAnded,
 
    BitwiseXored,
 
    BitwiseOred,
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct AssignmentExpression {
 
    pub this: AssignmentExpressionId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub left: ExpressionId,
 
    pub operation: AssignmentOperator,
 
    pub right: ExpressionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
impl SyntaxElement for AssignmentExpression {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct ConditionalExpression {
 
    pub this: ConditionalExpressionId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub test: ExpressionId,
 
    pub true_expression: ExpressionId,
 
    pub false_expression: ExpressionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
impl SyntaxElement for ConditionalExpression {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
 
pub enum BinaryOperator {
 
    Concatenate,
 
    LogicalOr,
 
    LogicalAnd,
 
    BitwiseOr,
 
    BitwiseXor,
 
    BitwiseAnd,
 
    Equality,
 
    Inequality,
 
    LessThan,
 
    GreaterThan,
 
    LessThanEqual,
 
    GreaterThanEqual,
 
    ShiftLeft,
 
    ShiftRight,
 
    Add,
 
    Subtract,
 
    Multiply,
 
    Divide,
 
    Remainder,
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct BinaryExpression {
 
    pub this: BinaryExpressionId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub left: ExpressionId,
 
    pub operation: BinaryOperator,
 
    pub right: ExpressionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
impl SyntaxElement for BinaryExpression {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)]
 
pub enum UnaryOperation {
 
    Positive,
 
    Negative,
 
    BitwiseNot,
 
    LogicalNot,
 
    PreIncrement,
 
    PreDecrement,
 
    PostIncrement,
 
    PostDecrement,
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct UnaryExpression {
 
    pub this: UnaryExpressionId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub operation: UnaryOperation,
 
    pub expression: ExpressionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
impl SyntaxElement for UnaryExpression {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct IndexingExpression {
 
    pub this: IndexingExpressionId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub subject: ExpressionId,
 
    pub index: ExpressionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
impl SyntaxElement for IndexingExpression {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct SlicingExpression {
 
    pub this: SlicingExpressionId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub subject: ExpressionId,
 
    pub from_index: ExpressionId,
 
    pub to_index: ExpressionId,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
impl SyntaxElement for SlicingExpression {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct SelectExpression {
 
    pub this: SelectExpressionId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub subject: ExpressionId,
 
    pub field: Field,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
impl SyntaxElement for SelectExpression {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct ArrayExpression {
 
    pub this: ArrayExpressionId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub elements: Vec<ExpressionId>,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
impl SyntaxElement for ArrayExpression {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct CallExpression {
 
    pub this: CallExpressionId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub method: Method,
 
    pub arguments: Vec<ExpressionId>,
 
    pub poly_args: Vec<ParserTypeId>,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
impl SyntaxElement for CallExpression {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct ConstantExpression {
 
    pub this: ConstantExpressionId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub value: Constant,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
impl SyntaxElement for ConstantExpression {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct VariableExpression {
 
    pub this: VariableExpressionId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub identifier: NamespacedIdentifier,
 
    // Phase 2: linker
 
    pub declaration: Option<VariableId>,
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
impl SyntaxElement for VariableExpression {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
src/protocol/ast_printer.rs
Show inline comments
 
use std::fmt::{Debug, Display, Write};
 
use std::io::Write as IOWrite;
 

	
 
use super::ast::*;
 

	
 
const INDENT: usize = 2;
 

	
 
const PREFIX_EMPTY: &'static str = "    ";
 
const PREFIX_ROOT_ID: &'static str = "Root";
 
const PREFIX_PRAGMA_ID: &'static str = "Prag";
 
const PREFIX_IMPORT_ID: &'static str = "Imp ";
 
const PREFIX_TYPE_ANNOT_ID: &'static str = "TyAn";
 
const PREFIX_VARIABLE_ID: &'static str = "Var ";
 
const PREFIX_PARAMETER_ID: &'static str = "Par ";
 
const PREFIX_LOCAL_ID: &'static str = "Loc ";
 
const PREFIX_DEFINITION_ID: &'static str = "Def ";
 
const PREFIX_STRUCT_ID: &'static str = "DefS";
 
const PREFIX_ENUM_ID: &'static str = "DefE";
 
const PREFIX_COMPONENT_ID: &'static str = "DefC";
 
const PREFIX_FUNCTION_ID: &'static str = "DefF";
 
const PREFIX_STMT_ID: &'static str = "Stmt";
 
const PREFIX_BLOCK_STMT_ID: &'static str = "SBl ";
 
const PREFIX_LOCAL_STMT_ID: &'static str = "SLoc";
 
const PREFIX_MEM_STMT_ID: &'static str = "SMem";
 
const PREFIX_CHANNEL_STMT_ID: &'static str = "SCha";
 
const PREFIX_SKIP_STMT_ID: &'static str = "SSki";
 
const PREFIX_LABELED_STMT_ID: &'static str = "SLab";
 
const PREFIX_IF_STMT_ID: &'static str = "SIf ";
 
const PREFIX_ENDIF_STMT_ID: &'static str = "SEIf";
 
const PREFIX_WHILE_STMT_ID: &'static str = "SWhi";
 
const PREFIX_ENDWHILE_STMT_ID: &'static str = "SEWh";
 
const PREFIX_BREAK_STMT_ID: &'static str = "SBre";
 
const PREFIX_CONTINUE_STMT_ID: &'static str = "SCon";
 
const PREFIX_SYNC_STMT_ID: &'static str = "SSyn";
 
const PREFIX_ENDSYNC_STMT_ID: &'static str = "SESy";
 
const PREFIX_RETURN_STMT_ID: &'static str = "SRet";
 
const PREFIX_ASSERT_STMT_ID: &'static str = "SAsr";
 
const PREFIX_GOTO_STMT_ID: &'static str = "SGot";
 
const PREFIX_NEW_STMT_ID: &'static str = "SNew";
 
const PREFIX_PUT_STMT_ID: &'static str = "SPut";
 
const PREFIX_EXPR_STMT_ID: &'static str = "SExp";
 
const PREFIX_ASSIGNMENT_EXPR_ID: &'static str = "EAsi";
 
const PREFIX_CONDITIONAL_EXPR_ID: &'static str = "ECnd";
 
const PREFIX_BINARY_EXPR_ID: &'static str = "EBin";
 
const PREFIX_UNARY_EXPR_ID: &'static str = "EUna";
 
const PREFIX_INDEXING_EXPR_ID: &'static str = "EIdx";
 
const PREFIX_SLICING_EXPR_ID: &'static str = "ESli";
 
const PREFIX_SELECT_EXPR_ID: &'static str = "ESel";
 
const PREFIX_ARRAY_EXPR_ID: &'static str = "EArr";
 
const PREFIX_CONST_EXPR_ID: &'static str = "ECns";
 
const PREFIX_CALL_EXPR_ID: &'static str = "ECll";
 
const PREFIX_VARIABLE_EXPR_ID: &'static str = "EVar";
 

	
 
struct KV<'a> {
 
    buffer: &'a mut String,
 
    prefix: Option<(&'static str, u32)>,
 
    indent: usize,
 
    temp_key: &'a mut String,
 
    temp_val: &'a mut String,
 
}
 

	
 
impl<'a> KV<'a> {
 
    fn new(buffer: &'a mut String, temp_key: &'a mut String, temp_val: &'a mut String, indent: usize) -> Self {
 
        temp_key.clear();
 
        temp_val.clear();
 
        KV{
 
            buffer,
 
            prefix: None,
 
            indent,
 
            temp_key,
 
            temp_val
 
        }
 
    }
 

	
 
    fn with_id(mut self, prefix: &'static str, id: u32) -> Self {
 
        self.prefix = Some((prefix, id));
 
        self
 
    }
 

	
 
    fn with_s_key(self, key: &str) -> Self {
 
        self.temp_key.push_str(key);
 
        self
 
    }
 

	
 
    fn with_d_key<D: Display>(mut self, key: &D) -> Self {
 
        write!(&mut self.temp_key, "{}", key);
 
        self.temp_key.push_str(&key.to_string());
 
        self
 
    }
 

	
 
    fn with_s_val(self, val: &str) -> Self {
 
        self.temp_val.push_str(val);
 
        self
 
    }
 

	
 
    fn with_disp_val<D: Display>(mut self, val: &D) -> Self {
 
        write!(&mut self.temp_val, "{}", val);
 
        self.temp_val.push_str(&format!("{}", val));
 
        self
 
    }
 

	
 
    fn with_debug_val<D: Debug>(mut self, val: &D) -> Self {
 
        write!(&mut self.temp_val, "{:?}", val);
 
        self.temp_val.push_str(&format!("{:?}", val));
 
        self
 
    }
 

	
 
    fn with_ascii_val(self, val: &[u8]) -> Self {
 
        self.temp_val.write_str(&*String::from_utf8_lossy(val));
 
        self.temp_val.push_str(&*String::from_utf8_lossy(val));
 
        self
 
    }
 

	
 
    fn with_opt_disp_val<D: Display>(mut self, val: Option<&D>) -> Self {
 
        match val {
 
            Some(v) => { write!(&mut self.temp_val, "Some({})", v); },
 
            None => { self.temp_val.write_str("None"); }
 
            Some(v) => { self.temp_val.push_str(&format!("Some({})", v)); },
 
            None => { self.temp_val.push_str("None"); }
 
        }
 
        self
 
    }
 

	
 
    fn with_opt_ascii_val(self, val: Option<&[u8]>) -> Self {
 
        match val {
 
            Some(v) => {
 
                self.temp_val.write_str("Some(");
 
                self.temp_val.write_str(&*String::from_utf8_lossy(v));
 
                self.temp_val.write_char(')');
 
                self.temp_val.push_str("Some(");
 
                self.temp_val.push_str(&*String::from_utf8_lossy(v));
 
                self.temp_val.push(')');
 
            },
 
            None => {
 
                self.temp_val.write_str("None");
 
                self.temp_val.push_str("None");
 
            }
 
        }
 
        self
 
    }
 

	
 
    fn with_custom_val<F: Fn(&mut String)>(mut self, val_fn: F) -> Self {
 
        val_fn(&mut self.temp_val);
 
        self
 
    }
 
}
 

	
 
impl<'a> Drop for KV<'a> {
 
    fn drop(&mut self) {
 
        // Prefix and indent
 
        if let Some((prefix, id)) = &self.prefix {
 
            write!(&mut self.buffer, "{}[{:04}] ", prefix, id);
 
            self.buffer.push_str(&format!("{}[{:04}]", prefix, id));
 
        } else {
 
            write!(&mut self.buffer, "           ");
 
            self.buffer.push_str("           ");
 
        }
 

	
 
        for _ in 0..self.indent * INDENT {
 
            self.buffer.push(' ');
 
        }
 

	
 
        // Leading dash
 
        self.buffer.write_str("- ");
 
        self.buffer.push_str("- ");
 

	
 
        // Key and value
 
        self.buffer.write_str(self.temp_key);
 
        self.buffer.push_str(self.temp_key);
 
        if self.temp_val.is_empty() {
 
            self.buffer.push(':');
 
        } else {
 
            self.buffer.push_str(": ");
 
            self.buffer.push_str(&self.temp_val);
 
        }
 
        self.buffer.push('\n');
 
    }
 
}
 

	
 
pub(crate) struct ASTWriter {
 
    buffer: String,
 
    temp1: String,
 
    temp2: String,
 
}
 

	
 
impl ASTWriter {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            buffer: String::with_capacity(4096),
 
            temp1: String::with_capacity(256),
 
            temp2: String::with_capacity(256),
 
        }
 
    }
 
    pub(crate) fn write_ast<W: IOWrite>(&mut self, w: &mut W, heap: &Heap) {
 
        for root_id in heap.protocol_descriptions.iter().map(|v| v.this) {
 
            self.write_module(heap, root_id);
 
            w.write_all(self.buffer.as_bytes()).expect("flush buffer");
 
            self.buffer.clear();
 
        }
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Top-level module writing
 
    //--------------------------------------------------------------------------
 

	
 
    fn write_module(&mut self, heap: &Heap, root_id: RootId) {
 
        self.kv(0).with_id(PREFIX_ROOT_ID, root_id.index)
 
            .with_s_key("Module");
 

	
 
        let root = &heap[root_id];
 
        self.kv(1).with_s_key("Pragmas");
 
        for pragma_id in &root.pragmas {
 
            self.write_pragma(heap, *pragma_id, 2);
 
        }
 

	
 
        self.kv(1).with_s_key("Imports");
 
        for import_id in &root.imports {
 
            self.write_import(heap, *import_id, 2);
 
        }
 

	
 
        self.kv(1).with_s_key("Definitions");
 
        for def_id in &root.definitions {
 
            self.write_definition(heap, *def_id, 2);
 
        }
 
    }
 

	
 
    fn write_pragma(&mut self, heap: &Heap, pragma_id: PragmaId, indent: usize) {
 
        match &heap[pragma_id] {
 
            Pragma::Version(pragma) => {
 
                self.kv(indent).with_id(PREFIX_PRAGMA_ID, pragma.this.index)
 
                    .with_s_key("PragmaVersion")
 
                    .with_disp_val(&pragma.version);
 
            },
 
            Pragma::Module(pragma) => {
 
                self.kv(indent).with_id(PREFIX_PRAGMA_ID, pragma.this.index)
 
                    .with_s_key("PragmaModule")
 
                    .with_ascii_val(&pragma.value);
 
            }
 
        }
 
    }
 

	
 
    fn write_import(&mut self, heap: &Heap, import_id: ImportId, indent: usize) {
 
        let import = &heap[import_id];
 
        let indent2 = indent + 1;
 

	
 
        match import {
 
            Import::Module(import) => {
 
                self.kv(indent).with_id(PREFIX_IMPORT_ID, import.this.index)
 
                    .with_s_key("ImportModule");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&import.module_name);
 
                self.kv(indent2).with_s_key("Alias").with_ascii_val(&import.alias);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(import.module_id.as_ref().map(|v| &v.index));
 
            },
 
            Import::Symbols(import) => {
 
                self.kv(indent).with_id(PREFIX_IMPORT_ID, import.this.index)
 
                    .with_s_key("ImportSymbol");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&import.module_name);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(import.module_id.as_ref().map(|v| &v.index));
 

	
 
                self.kv(indent2).with_s_key("Symbols");
 

	
 
                let indent3 = indent2 + 1;
 
                let indent4 = indent3 + 1;
 
                for symbol in &import.symbols {
 
                    self.kv(indent3).with_s_key("AliasedSymbol");
 
                    self.kv(indent4).with_s_key("Name").with_ascii_val(&symbol.name);
 
                    self.kv(indent4).with_s_key("Alias").with_ascii_val(&symbol.alias);
 
                    self.kv(indent4).with_s_key("Definition")
 
                        .with_opt_disp_val(symbol.definition_id.as_ref().map(|v| &v.index));
 
                }
 
            }
 
        }
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Top-level definition writing
 
    //--------------------------------------------------------------------------
 

	
 
    fn write_definition(&mut self, heap: &Heap, def_id: DefinitionId, indent: usize) {
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 
        let indent4 = indent3 + 1;
 

	
 
        match &heap[def_id] {
 
            Definition::Struct(_) => todo!("implement Definition::Struct"),
 
            Definition::Enum(_) => todo!("implement Definition::Enum"),
 
            Definition::Function(def) => {
 
                self.kv(indent).with_id(PREFIX_FUNCTION_ID, def.this.0.index)
 
                    .with_s_key("DefinitionFunction");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar");
 
                    self.kv(indent4).with_s_key("Name").with_ascii_val(&poly_var_id.value);
 
                }
 

	
 
                self.kv(indent2).with_s_key("ReturnType").with_custom_val(|s| write_type(s, heap, &heap[def.return_type]));
 
                self.kv(indent2).with_s_key("ReturnParserType").with_custom_val(|s| write_parser_type(s, heap, &heap[def.return_type]));
 

	
 
                self.kv(indent2).with_s_key("Parameters");
 
                for param_id in &def.parameters {
 
                    self.write_parameter(heap, *param_id, indent3);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, def.body, indent3);
 
            },
 
            Definition::Component(def) => {
 
                self.kv(indent).with_id(PREFIX_COMPONENT_ID,def.this.0.index)
 
                    .with_s_key("DefinitionComponent");
 

	
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&def.identifier.value);
 
                self.kv(indent2).with_s_key("Variant").with_debug_val(&def.variant);
 

	
 
                self.kv(indent2).with_s_key("PolymorphicVariables");
 
                for poly_var_id in &def.poly_vars {
 
                    self.kv(indent3).with_s_key("PolyVar");
 
                    self.kv(indent4).with_s_key("Name").with_ascii_val(&poly_var_id.value);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parameters");
 
                for param_id in &def.parameters {
 
                    self.write_parameter(heap, *param_id, indent3)
 
                }
 

	
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, def.body, indent3);
 
            }
 
        }
 
    }
 

	
 
    fn write_parameter(&mut self, heap: &Heap, param_id: ParameterId, indent: usize) {
 
        let indent2 = indent + 1;
 
        let param = &heap[param_id];
 

	
 
        self.kv(indent).with_id(PREFIX_PARAMETER_ID, param_id.0.index)
 
            .with_s_key("Parameter");
 
        self.kv(indent2).with_s_key("Name").with_ascii_val(&param.identifier.value);
 
        self.kv(indent2).with_s_key("Type").with_custom_val(|w| write_type(w, heap, &heap[param.parser_type]));
 
        self.kv(indent2).with_s_key("ParserType").with_custom_val(|w| write_parser_type(w, heap, &heap[param.parser_type]));
 
    }
 

	
 
    fn write_stmt(&mut self, heap: &Heap, stmt_id: StatementId, indent: usize) {
 
        let stmt = &heap[stmt_id];
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 

	
 
        match stmt {
 
            Statement::Block(stmt) => {
 
                self.kv(indent).with_id(PREFIX_BLOCK_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Block");
 

	
 
                for stmt_id in &stmt.statements {
 
                    self.write_stmt(heap, *stmt_id, indent2);
 
                }
 
            },
 
            Statement::Local(stmt) => {
 
                match stmt {
 
                    LocalStatement::Channel(stmt) => {
 
                        self.kv(indent).with_id(PREFIX_CHANNEL_STMT_ID, stmt.this.0.0.index)
 
                            .with_s_key("LocalChannel");
 

	
 
                        self.kv(indent2).with_s_key("From");
 
                        self.write_local(heap, stmt.from, indent3);
 
                        self.kv(indent2).with_s_key("To");
 
                        self.write_local(heap, stmt.to, indent3);
 
                        self.kv(indent2).with_s_key("Next")
 
                            .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
                    },
 
                    LocalStatement::Memory(stmt) => {
 
                        self.kv(indent).with_id(PREFIX_MEM_STMT_ID, stmt.this.0.0.index)
 
                            .with_s_key("LocalMemory");
 

	
 
                        self.kv(indent2).with_s_key("Variable");
 
                        self.write_local(heap, stmt.variable, indent3);
 
                        self.kv(indent2).with_s_key("initial");
 
                        self.write_expr(heap, stmt.initial, indent3);
 
                        self.kv(indent2).with_s_key("Next")
 
                            .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
                    }
 
                }
 
            },
 
            Statement::Skip(stmt) => {
 
                self.kv(indent).with_id(PREFIX_SKIP_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Skip");
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Labeled(stmt) => {
 
                self.kv(indent).with_id(PREFIX_LABELED_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Labeled");
 

	
 
                self.kv(indent2).with_s_key("Label").with_ascii_val(&stmt.label.value);
 
                self.kv(indent2).with_s_key("Statement");
 
                self.write_stmt(heap, stmt.body, indent3);
 
            },
 
            Statement::If(stmt) => {
 
                self.kv(indent).with_id(PREFIX_IF_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("If");
 

	
 
                self.kv(indent2).with_s_key("EndIf")
 
                    .with_opt_disp_val(stmt.end_if.as_ref().map(|v| &v.0.index));
 

	
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, stmt.test, indent3);
 

	
 
                self.kv(indent2).with_s_key("TrueBody");
 
                self.write_stmt(heap, stmt.true_body, indent3);
 

	
 
                self.kv(indent2).with_s_key("FalseBody");
 
                self.write_stmt(heap, stmt.false_body, indent3);
 
            },
 
            Statement::EndIf(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDIF_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndIf");
 
                self.kv(indent2).with_s_key("StartIf").with_disp_val(&stmt.start_if.0.index);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::While(stmt) => {
 
                self.kv(indent).with_id(PREFIX_WHILE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("While");
 

	
 
                self.kv(indent2).with_s_key("EndWhile")
 
                    .with_opt_disp_val(stmt.end_while.as_ref().map(|v| &v.0.index));
 
                self.kv(indent2).with_s_key("InSync")
 
                    .with_opt_disp_val(stmt.in_sync.as_ref().map(|v| &v.0.index));
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, stmt.test, indent3);
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, stmt.body, indent3);
 
            },
 
            Statement::EndWhile(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDWHILE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndWhile");
 
                self.kv(indent2).with_s_key("StartWhile").with_disp_val(&stmt.start_while.0.index);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Break(stmt) => {
 
                self.kv(indent).with_id(PREFIX_BREAK_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Break");
 
                self.kv(indent2).with_s_key("Label")
 
                    .with_opt_ascii_val(stmt.label.as_ref().map(|v| v.value.as_slice()));
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::Continue(stmt) => {
 
                self.kv(indent).with_id(PREFIX_CONTINUE_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Continue");
 
                self.kv(indent2).with_s_key("Label")
 
                    .with_opt_ascii_val(stmt.label.as_ref().map(|v| v.value.as_slice()));
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::Synchronous(stmt) => {
 
                self.kv(indent).with_id(PREFIX_SYNC_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Synchronous");
 
                self.kv(indent2).with_s_key("EndSync")
 
                    .with_opt_disp_val(stmt.end_sync.as_ref().map(|v| &v.0.index));
 
                self.kv(indent2).with_s_key("Body");
 
                self.write_stmt(heap, stmt.body, indent3);
 
            },
 
            Statement::EndSynchronous(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ENDSYNC_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("EndSynchronous");
 
                self.kv(indent2).with_s_key("StartSync").with_disp_val(&stmt.start_sync.0.index);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Return(stmt) => {
 
                self.kv(indent).with_id(PREFIX_RETURN_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Return");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression, indent3);
 
            },
 
            Statement::Assert(stmt) => {
 
                self.kv(indent).with_id(PREFIX_ASSERT_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Assert");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression, indent3);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Goto(stmt) => {
 
                self.kv(indent).with_id(PREFIX_GOTO_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("Goto");
 
                self.kv(indent2).with_s_key("Label").with_ascii_val(&stmt.label.value);
 
                self.kv(indent2).with_s_key("Target")
 
                    .with_opt_disp_val(stmt.target.as_ref().map(|v| &v.0.index));
 
            },
 
            Statement::New(stmt) => {
 
                self.kv(indent).with_id(PREFIX_NEW_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("New");
 
                self.kv(indent2).with_s_key("Expression");
 
                self.write_expr(heap, stmt.expression.upcast(), indent3);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            },
 
            Statement::Expression(stmt) => {
 
                self.kv(indent).with_id(PREFIX_EXPR_STMT_ID, stmt.this.0.index)
 
                    .with_s_key("ExpressionStatement");
 
                self.write_expr(heap, stmt.expression, indent2);
 
                self.kv(indent2).with_s_key("Next")
 
                    .with_opt_disp_val(stmt.next.as_ref().map(|v| &v.index));
 
            }
 
        }
 
    }
 

	
 
    fn write_expr(&mut self, heap: &Heap, expr_id: ExpressionId, indent: usize) {
 
        let expr = &heap[expr_id];
 
        let indent2 = indent + 1;
 
        let indent3 = indent2 + 1;
 

	
 
        match expr {
 
            Expression::Assignment(expr) => {
 
                self.kv(indent).with_id(PREFIX_ASSIGNMENT_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("AssignmentExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Left");
 
                self.write_expr(heap, expr.left, indent3);
 
                self.kv(indent2).with_s_key("Right");
 
                self.write_expr(heap, expr.right, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
            },
 
            Expression::Conditional(expr) => {
 
                self.kv(indent).with_id(PREFIX_CONDITIONAL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ConditionalExpr");
 
                self.kv(indent2).with_s_key("Condition");
 
                self.write_expr(heap, expr.test, indent3);
 
                self.kv(indent2).with_s_key("TrueExpression");
 
                self.write_expr(heap, expr.true_expression, indent3);
 
                self.kv(indent2).with_s_key("FalseExpression");
 
                self.write_expr(heap, expr.false_expression, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
            },
 
            Expression::Binary(expr) => {
 
                self.kv(indent).with_id(PREFIX_BINARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("BinaryExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Left");
 
                self.write_expr(heap, expr.left, indent3);
 
                self.kv(indent2).with_s_key("Right");
 
                self.write_expr(heap, expr.right, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
            },
 
            Expression::Unary(expr) => {
 
                self.kv(indent).with_id(PREFIX_UNARY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("UnaryExpr");
 
                self.kv(indent2).with_s_key("Operation").with_debug_val(&expr.operation);
 
                self.kv(indent2).with_s_key("Argument");
 
                self.write_expr(heap, expr.expression, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
            },
 
            Expression::Indexing(expr) => {
 
                self.kv(indent).with_id(PREFIX_INDEXING_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("IndexingExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 
                self.kv(indent2).with_s_key("Index");
 
                self.write_expr(heap, expr.index, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
            },
 
            Expression::Slicing(expr) => {
 
                self.kv(indent).with_id(PREFIX_SLICING_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("SlicingExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 
                self.kv(indent2).with_s_key("FromIndex");
 
                self.write_expr(heap, expr.from_index, indent3);
 
                self.kv(indent2).with_s_key("ToIndex");
 
                self.write_expr(heap, expr.to_index, indent3);
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
            },
 
            Expression::Select(expr) => {
 
                self.kv(indent).with_id(PREFIX_SELECT_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("SelectExpr");
 
                self.kv(indent2).with_s_key("Subject");
 
                self.write_expr(heap, expr.subject, indent3);
 

	
 
                match &expr.field {
 
                    Field::Length => {
 
                        self.kv(indent2).with_s_key("Field").with_s_val("length");
 
                    },
 
                    Field::Symbolic(field) => {
 
                        self.kv(indent2).with_s_key("Field").with_ascii_val(&field.value);
 
                    }
 
                }
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
            },
 
            Expression::Array(expr) => {
 
                self.kv(indent).with_id(PREFIX_ARRAY_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ArrayExpr");
 
                self.kv(indent2).with_s_key("Elements");
 
                for expr_id in &expr.elements {
 
                    self.write_expr(heap, *expr_id, indent3);
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
            },
 
            Expression::Constant(expr) => {
 
                self.kv(indent).with_id(PREFIX_CONST_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ConstantExpr");
 

	
 
                let val = self.kv(indent2).with_s_key("Value");
 
                match &expr.value {
 
                    Constant::Null => { val.with_s_val("null"); },
 
                    Constant::True => { val.with_s_val("true"); },
 
                    Constant::False => { val.with_s_val("false"); },
 
                    Constant::Character(char) => { val.with_ascii_val(char); },
 
                    Constant::Integer(int) => { val.with_disp_val(int); },
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
            },
 
            Expression::Call(expr) => {
 
                self.kv(indent).with_id(PREFIX_CALL_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("CallExpr");
 

	
 
                // Method
 
                let method = self.kv(indent2).with_s_key("Method");
 
                match &expr.method {
 
                    Method::Get => { method.with_s_val("get"); },
 
                    Method::Put => { method.with_s_val("put"); },
 
                    Method::Fires => { method.with_s_val("fires"); },
 
                    Method::Create => { method.with_s_val("create"); },
 
                    Method::Symbolic(symbolic) => {
 
                        method.with_s_val("symbolic");
 
                        self.kv(indent3).with_s_key("Name").with_ascii_val(&symbolic.identifier.value);
 
                        self.kv(indent3).with_s_key("Definition")
 
                            .with_opt_disp_val(symbolic.definition.as_ref().map(|v| &v.index));
 
                    }
 
                }
 

	
 
                // Arguments
 
                self.kv(indent2).with_s_key("Arguments");
 
                for arg_id in &expr.arguments {
 
                    self.write_expr(heap, *arg_id, indent3);
 
                }
 

	
 
                // Parent
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
            },
 
            Expression::Variable(expr) => {
 
                self.kv(indent).with_id(PREFIX_VARIABLE_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("VariableExpr");
 
                self.kv(indent2).with_s_key("Name").with_ascii_val(&expr.identifier.value);
 
                self.kv(indent2).with_s_key("Definition")
 
                    .with_opt_disp_val(expr.declaration.as_ref().map(|v| &v.index));
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type));
 
            }
 
        }
 
    }
 

	
 
    fn write_local(&mut self, heap: &Heap, local_id: LocalId, indent: usize) {
 
        let local = &heap[local_id];
 
        let indent2 = indent + 1;
 

	
 
        self.kv(indent).with_id(PREFIX_LOCAL_ID, local_id.0.index)
 
            .with_s_key("Local");
 

	
 
        self.kv(indent2).with_s_key("Name").with_ascii_val(&local.identifier.value);
 
        self.kv(indent2).with_s_key("Type")
 
            .with_custom_val(|w| write_type(w, heap, &heap[local.parser_type]));
 
        self.kv(indent2).with_s_key("ParserType")
 
            .with_custom_val(|w| write_parser_type(w, heap, &heap[local.parser_type]));
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Printing Utilities
 
    //--------------------------------------------------------------------------
 

	
 
    fn kv(&mut self, indent: usize) -> KV {
 
        KV::new(&mut self.buffer, &mut self.temp1, &mut self.temp2, indent)
 
    }
 

	
 
    fn flush<W: IOWrite>(&mut self, w: &mut W) {
 
        w.write(self.buffer.as_bytes()).unwrap();
 
        self.buffer.clear()
 
    }
 
}
 

	
 
fn write_option<V: Display>(target: &mut String, value: Option<V>) {
 
    target.clear();
 
    match &value {
 
        Some(v) => write!(target, "Some({})", v),
 
        None => target.write_str("None")
 
        Some(v) => target.push_str(&format!("Some({})", v)),
 
        None => target.push_str("None")
 
    };
 
}
 

	
 
fn write_type(target: &mut String, heap: &Heap, t: &ParserType) {
 
fn write_parser_type(target: &mut String, heap: &Heap, t: &ParserType) {
 
    use ParserTypeVariant as PTV;
 

	
 
    let mut embedded = Vec::new();
 
    match &t.variant {
 
        PTV::Input(id) => { target.write_str("in"); embedded.push(*id); }
 
        PTV::Output(id) => { target.write_str("out"); embedded.push(*id) }
 
        PTV::Array(id) => { target.write_str("array"); embedded.push(*id) }
 
        PTV::Message => { target.write_str("msg"); }
 
        PTV::Bool => { target.write_str("bool"); }
 
        PTV::Byte => { target.write_str("byte"); }
 
        PTV::Short => { target.write_str("short"); }
 
        PTV::Int => { target.write_str("int"); }
 
        PTV::Long => { target.write_str("long"); }
 
        PTV::String => { target.write_str("str"); }
 
        PTV::IntegerLiteral => { target.write_str("int_lit"); }
 
        PTV::Inferred => { target.write_str("auto"); }
 
        PTV::Input(id) => { target.push_str("in"); embedded.push(*id); }
 
        PTV::Output(id) => { target.push_str("out"); embedded.push(*id) }
 
        PTV::Array(id) => { target.push_str("array"); embedded.push(*id) }
 
        PTV::Message => { target.push_str("msg"); }
 
        PTV::Bool => { target.push_str("bool"); }
 
        PTV::Byte => { target.push_str("byte"); }
 
        PTV::Short => { target.push_str("short"); }
 
        PTV::Int => { target.push_str("int"); }
 
        PTV::Long => { target.push_str("long"); }
 
        PTV::String => { target.push_str("str"); }
 
        PTV::IntegerLiteral => { target.push_str("int_lit"); }
 
        PTV::Inferred => { target.push_str("auto"); }
 
        PTV::Symbolic(symbolic) => {
 
            target.write_str(&String::from_utf8_lossy(&symbolic.identifier.value));
 
            target.push_str(&String::from_utf8_lossy(&symbolic.identifier.value));
 
            match symbolic.variant {
 
                Some(SymbolicParserTypeVariant::PolyArg(def_id, idx)) => {
 
                    target.write_str(&format!("{{def: {}, idx: {}}}", def_id.index, idx));
 
                    target.push_str(&format!("{{def: {}, idx: {}}}", def_id.index, idx));
 
                },
 
                Some(SymbolicParserTypeVariant::Definition(def_id)) => {
 
                    target.write_str(&format!("{{def: {}}}", def_id.index));
 
                    target.push_str(&format!("{{def: {}}}", def_id.index));
 
                },
 
                None => {
 
                    target.write_str("{None}");
 
                    target.push_str("{None}");
 
                }
 
            }
 
            embedded.extend(&symbolic.poly_args);
 
        }
 
    };
 

	
 
    if !embedded.is_empty() {
 
        target.write_str("<");
 
        target.push_str("<");
 
        for (idx, embedded_id) in embedded.into_iter().enumerate() {
 
            if idx != 0 { target.write_str(", "); }
 
            write_type(target, heap, &heap[embedded_id]);
 
            if idx != 0 { target.push_str(", "); }
 
            write_parser_type(target, heap, &heap[embedded_id]);
 
        }
 
        target.write_str(">");
 
        target.push_str(">");
 
    }
 
}
 

	
 
fn write_concrete_type(target: &mut String, heap: &Heap, t: &ConcreteType) {
 
    use ConcreteTypePart as CTP;
 

	
 
    fn write_concrete_part(target: &mut String, heap: &Heap, t: &ConcreteType, mut idx: usize) -> usize {
 
        if idx >= t.parts.len() {
 
            target.push_str("Programmer error: invalid concrete type tree");
 
            return idx;
 
        }
 

	
 
        match &t.parts[idx] {
 
            CTP::Void => target.push_str("void"),
 
            CTP::Message => target.push_str("msg"),
 
            CTP::Bool => target.push_str("bool"),
 
            CTP::Byte => target.push_str("byte"),
 
            CTP::Short => target.push_str("short"),
 
            CTP::Int => target.push_str("int"),
 
            CTP::Long => target.push_str("long"),
 
            CTP::String => target.push_str("string"),
 
            CTP::Array => {
 
                idx = write_concrete_part(target, heap, t, idx + 1);
 
                target.push_str("[]");
 
            },
 
            CTP::Slice => {
 
                idx = write_concrete_part(target, heap, t, idx + 1);
 
                target.push_str("[..]");
 
            }
 
            CTP::Input => {
 
                target.push_str("in<");
 
                idx = write_concrete_part(target, heap, t, idx + 1);
 
                target.push('>');
 
            },
 
            CTP::Output => {
 
                target.push_str("out<");
 
                idx = write_concrete_part(target, heap, t, idx + 1);
 
                target.push('>')
 
            },
 
            CTP::Instance(definition_id, num_embedded) => {
 
                let identifier = heap[*definition_id].identifier();
 
                target.push_str(&String::from_utf8_lossy(&identifier.value));
 
                target.push('<');
 
                for idx_embedded in 0..*num_embedded {
 
                    if idx_embedded != 0 {
 
                        target.push_str(", ");
 
                    }
 
                    idx = write_concrete_part(target, heap, t, idx + 1);
 
                }
 
                target.push('>');
 
            }
 
        }
 

	
 
        idx + 1
 
    }
 

	
 
    write_concrete_part(target, heap, t, 0);
 
}
 

	
 
fn write_expression_parent(target: &mut String, parent: &ExpressionParent) {
 
    use ExpressionParent as EP;
 

	
 
    *target = match parent {
 
        EP::None => String::from("None"),
 
        EP::Memory(id) => format!("MemoryStmt({})", id.0.0.index),
 
        EP::If(id) => format!("IfStmt({})", id.0.index),
 
        EP::While(id) => format!("WhileStmt({})", id.0.index),
 
        EP::Return(id) => format!("ReturnStmt({})", id.0.index),
 
        EP::Assert(id) => format!("AssertStmt({})", id.0.index),
 
        EP::New(id) => format!("NewStmt({})", id.0.index),
 
        EP::ExpressionStmt(id) => format!("ExprStmt({})", id.0.index),
 
        EP::Expression(id, idx) => format!("Expr({}, {})", id.index, idx)
 
    };
 
}
 
\ No newline at end of file
src/protocol/eval.rs
Show inline comments
 
@@ -1322,388 +1322,391 @@ impl Store {
 
        h: &Heap,
 
        ctx: &mut EvalContext,
 
        lexpr: ExpressionId,
 
        value: Value,
 
    ) -> EvalResult {
 
        match &h[lexpr] {
 
            Expression::Variable(var) => {
 
                let var = var.declaration.unwrap();
 
                // Ensure value is compatible with type of variable
 
                let parser_type_id = match &h[var] {
 
                    Variable::Local(v) => v.parser_type,
 
                    Variable::Parameter(v) => v.parser_type
 
                };
 
                let parser_type = &h[parser_type_id];
 
                assert!(value.is_type_compatible(h, parser_type));
 
                // Overwrite mapping
 
                self.map.insert(var, value.clone());
 
                Ok(value)
 
            }
 
            Expression::Indexing(indexing) => {
 
                // Evaluate index expression, which must be some integral type
 
                let index = self.eval(h, ctx, indexing.index)?;
 
                // Mutable reference to the subject
 
                let subject;
 
                match &h[indexing.subject] {
 
                    Expression::Variable(var) => {
 
                        let var = var.declaration.unwrap();
 
                        subject = self.map.get_mut(&var).unwrap();
 
                    }
 
                    _ => unreachable!(),
 
                }
 
                match subject.set(&index, &value) {
 
                    Some(value) => Ok(value),
 
                    None => Err(EvalContinuation::Inconsistent),
 
                }
 
            }
 
            _ => unimplemented!("{:?}", h[lexpr]),
 
        }
 
    }
 
    fn get(&mut self, h: &Heap, ctx: &mut EvalContext, rexpr: ExpressionId) -> EvalResult {
 
        match &h[rexpr] {
 
            Expression::Variable(var) => {
 
                let var_id = var.declaration.unwrap();
 
                let value = self
 
                    .map
 
                    .get(&var_id)
 
                    .expect(&format!("Uninitialized variable {:?}", String::from_utf8_lossy(&var.identifier.value)));
 
                Ok(value.clone())
 
            }
 
            Expression::Indexing(indexing) => {
 
                // Evaluate index expression, which must be some integral type
 
                let index = self.eval(h, ctx, indexing.index)?;
 
                // Reference to subject
 
                let subject;
 
                match &h[indexing.subject] {
 
                    Expression::Variable(var) => {
 
                        let var = var.declaration.unwrap();
 
                        subject = self.map.get(&var).unwrap();
 
                    }
 
                    q => unreachable!("Reached {:?}", q),
 
                }
 
                match subject.get(&index) {
 
                    Some(value) => Ok(value),
 
                    None => Err(EvalContinuation::Inconsistent),
 
                }
 
            }
 
            Expression::Select(selecting) => {
 
                // Reference to subject
 
                let subject;
 
                match &h[selecting.subject] {
 
                    Expression::Variable(var) => {
 
                        let var = var.declaration.unwrap();
 
                        subject = self.map.get(&var).unwrap();
 
                    }
 
                    q => unreachable!("Reached {:?}", q),
 
                }
 
                match subject.length() {
 
                    Some(value) => Ok(value),
 
                    None => Err(EvalContinuation::Inconsistent),
 
                }
 
            }
 
            _ => unimplemented!("{:?}", h[rexpr]),
 
        }
 
    }
 
    fn eval(&mut self, h: &Heap, ctx: &mut EvalContext, expr: ExpressionId) -> EvalResult {
 
        match &h[expr] {
 
            Expression::Assignment(expr) => {
 
                let value = self.eval(h, ctx, expr.right)?;
 
                match expr.operation {
 
                    AssignmentOperator::Set => {
 
                        self.update(h, ctx, expr.left, value.clone())?;
 
                    }
 
                    AssignmentOperator::Added => {
 
                        let old = self.get(h, ctx, expr.left)?;
 
                        self.update(h, ctx, expr.left, old.plus(&value))?;
 
                    }
 
                    AssignmentOperator::Subtracted => {
 
                        let old = self.get(h, ctx, expr.left)?;
 
                        self.update(h, ctx, expr.left, old.minus(&value))?;
 
                    }
 
                    _ => unimplemented!("{:?}", expr),
 
                }
 
                Ok(value)
 
            }
 
            Expression::Conditional(expr) => {
 
                let test = self.eval(h, ctx, expr.test)?;
 
                if test.as_boolean().0 {
 
                    self.eval(h, ctx, expr.true_expression)
 
                } else {
 
                    self.eval(h, ctx, expr.false_expression)
 
                }
 
            }
 
            Expression::Binary(expr) => {
 
                let left = self.eval(h, ctx, expr.left)?;
 
                let right;
 
                match expr.operation {
 
                    BinaryOperator::LogicalAnd => {
 
                        if left.as_boolean().0 == false {
 
                            return Ok(left);
 
                        }
 
                        right = self.eval(h, ctx, expr.right)?;
 
                        right.as_boolean(); // panics if not a boolean
 
                        return Ok(right);
 
                    }
 
                    BinaryOperator::LogicalOr => {
 
                        if left.as_boolean().0 == true {
 
                            return Ok(left);
 
                        }
 
                        right = self.eval(h, ctx, expr.right)?;
 
                        right.as_boolean(); // panics if not a boolean
 
                        return Ok(right);
 
                    }
 
                    _ => {}
 
                }
 
                right = self.eval(h, ctx, expr.right)?;
 
                match expr.operation {
 
                    BinaryOperator::Equality => Ok(left.eq(&right)),
 
                    BinaryOperator::Inequality => Ok(left.neq(&right)),
 
                    BinaryOperator::LessThan => Ok(left.lt(&right)),
 
                    BinaryOperator::LessThanEqual => Ok(left.lte(&right)),
 
                    BinaryOperator::GreaterThan => Ok(left.gt(&right)),
 
                    BinaryOperator::GreaterThanEqual => Ok(left.gte(&right)),
 
                    BinaryOperator::Remainder => Ok(left.modulus(&right)),
 
                    BinaryOperator::Add => Ok(left.plus(&right)),
 
                    _ => unimplemented!("{:?}", expr.operation),
 
                }
 
            }
 
            Expression::Unary(expr) => {
 
                let mut value = self.eval(h, ctx, expr.expression)?;
 
                match expr.operation {
 
                    UnaryOperation::PostIncrement => {
 
                        self.update(h, ctx, expr.expression, value.plus(&ONE))?;
 
                    }
 
                    UnaryOperation::PreIncrement => {
 
                        value = value.plus(&ONE);
 
                        self.update(h, ctx, expr.expression, value.clone())?;
 
                    }
 
                    UnaryOperation::PostDecrement => {
 
                        self.update(h, ctx, expr.expression, value.minus(&ONE))?;
 
                    }
 
                    UnaryOperation::PreDecrement => {
 
                        value = value.minus(&ONE);
 
                        self.update(h, ctx, expr.expression, value.clone())?;
 
                    }
 
                    _ => unimplemented!(),
 
                }
 
                Ok(value)
 
            }
 
            Expression::Indexing(expr) => self.get(h, ctx, expr.this.upcast()),
 
            Expression::Slicing(_expr) => unimplemented!(),
 
            Expression::Select(expr) => self.get(h, ctx, expr.this.upcast()),
 
            Expression::Array(expr) => {
 
                let mut elements = Vec::new();
 
                for &elem in expr.elements.iter() {
 
                    elements.push(self.eval(h, ctx, elem)?);
 
                }
 
                todo!()
 
            }
 
            Expression::Constant(expr) => Ok(Value::from_constant(&expr.value)),
 
            Expression::Call(expr) => match &expr.method {
 
                Method::Get => {
 
                    assert_eq!(1, expr.arguments.len());
 
                    let value = self.eval(h, ctx, expr.arguments[0])?;
 
                    match ctx.get(value.clone()) {
 
                        None => Err(EvalContinuation::BlockGet(value)),
 
                        Some(result) => Ok(result),
 
                    }
 
                }
 
                Method::Put => {
 
                    assert_eq!(2, expr.arguments.len());
 
                    let port_value = self.eval(h, ctx, expr.arguments[0])?;
 
                    let msg_value = self.eval(h, ctx, expr.arguments[1])?;
 
                    println!("DEBUG: Handiling put({:?}, {:?})", port_value, msg_value);
 
                    if ctx.did_put(port_value.clone()) {
 
                        println!("DEBUG: Already put...");
 
                        // Return bogus, replacing this at some point anyway
 
                        Ok(Value::Message(MessageValue(None)))
 
                    } else {
 
                        println!("DEBUG: Did not yet put...");
 
                        Err(EvalContinuation::Put(port_value, msg_value))
 
                    }
 
                }
 
                Method::Fires => {
 
                    assert_eq!(1, expr.arguments.len());
 
                    let value = self.eval(h, ctx, expr.arguments[0])?;
 
                    match ctx.fires(value.clone()) {
 
                        None => Err(EvalContinuation::BlockFires(value)),
 
                        Some(result) => Ok(result),
 
                    }
 
                }
 
                Method::Create => {
 
                    assert_eq!(1, expr.arguments.len());
 
                    let length = self.eval(h, ctx, expr.arguments[0])?;
 
                    Ok(Value::create_message(length))
 
                }
 
                Method::Symbolic(_symbol) => unimplemented!(),
 
            },
 
            Expression::Variable(expr) => self.get(h, ctx, expr.this.upcast()),
 
        }
 
    }
 
}
 

	
 
type EvalResult = Result<Value, EvalContinuation>;
 
pub enum EvalContinuation {
 
    Stepping,
 
    Inconsistent,
 
    Terminal,
 
    SyncBlockStart,
 
    SyncBlockEnd,
 
    NewComponent(DefinitionId, Vec<Value>),
 
    BlockFires(Value),
 
    BlockGet(Value),
 
    Put(Value, Value),
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub(crate) struct Prompt {
 
    definition: DefinitionId,
 
    store: Store,
 
    position: Option<StatementId>,
 
}
 

	
 
impl Prompt {
 
    pub fn new(h: &Heap, def: DefinitionId, args: &Vec<Value>) -> Self {
 
        let mut prompt =
 
            Prompt { definition: def, store: Store::new(), position: Some((&h[def]).body()) };
 
        prompt.set_arguments(h, args);
 
        prompt
 
    }
 
    fn set_arguments(&mut self, h: &Heap, args: &Vec<Value>) {
 
        let def = &h[self.definition];
 
        let params = def.parameters();
 
        assert_eq!(params.len(), args.len());
 
        for (param, value) in params.iter().zip(args.iter()) {
 
            let hparam = &h[*param];
 
            let parser_type = &h[hparam.parser_type];
 
            assert!(value.is_type_compatible(h, parser_type));
 
            self.store.initialize(h, param.upcast(), value.clone());
 
        }
 
    }
 
    pub fn step(&mut self, h: &Heap, ctx: &mut EvalContext) -> EvalResult {
 
        if self.position.is_none() {
 
            return Err(EvalContinuation::Terminal);
 
        }
 

	
 
        let stmt = &h[self.position.unwrap()];
 
        match stmt {
 
            Statement::Block(stmt) => {
 
                // Continue to first statement
 
                self.position = Some(stmt.first());
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Local(stmt) => {
 
                match stmt {
 
                    LocalStatement::Memory(stmt) => {
 
                        // Evaluate initial expression
 
                        let value = self.store.eval(h, ctx, stmt.initial)?;
 
                        // Update store
 
                        self.store.initialize(h, stmt.variable.upcast(), value);
 
                    }
 
                    LocalStatement::Channel(stmt) => {
 
                        let [from, to] = ctx.new_channel();
 
                        // Store the values in the declared variables
 
                        self.store.initialize(h, stmt.from.upcast(), from);
 
                        self.store.initialize(h, stmt.to.upcast(), to);
 
                    }
 
                }
 
                // Continue to next statement
 
                self.position = stmt.next();
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Skip(stmt) => {
 
                // Continue to next statement
 
                self.position = stmt.next;
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Labeled(stmt) => {
 
                // Continue to next statement
 
                self.position = Some(stmt.body);
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::If(stmt) => {
 
                // Evaluate test
 
                let value = self.store.eval(h, ctx, stmt.test)?;
 
                // Continue with either branch
 
                if value.as_boolean().0 {
 
                    self.position = Some(stmt.true_body);
 
                } else {
 
                    self.position = Some(stmt.false_body);
 
                }
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::EndIf(stmt) => {
 
                // Continue to next statement
 
                self.position = stmt.next;
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::While(stmt) => {
 
                // Evaluate test
 
                let value = self.store.eval(h, ctx, stmt.test)?;
 
                // Either continue with body, or go to next
 
                if value.as_boolean().0 {
 
                    self.position = Some(stmt.body);
 
                } else {
 
                    self.position = stmt.end_while.map(|x| x.upcast());
 
                }
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::EndWhile(stmt) => {
 
                // Continue to next statement
 
                self.position = stmt.next;
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Synchronous(stmt) => {
 
                // Continue to next statement, and signal upward
 
                self.position = Some(stmt.body);
 
                Err(EvalContinuation::SyncBlockStart)
 
            }
 
            Statement::EndSynchronous(stmt) => {
 
                // Continue to next statement, and signal upward
 
                self.position = stmt.next;
 
                Err(EvalContinuation::SyncBlockEnd)
 
            }
 
            Statement::Break(stmt) => {
 
                // Continue to end of while
 
                self.position = stmt.target.map(EndWhileStatementId::upcast);
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Continue(stmt) => {
 
                // Continue to beginning of while
 
                self.position = stmt.target.map(WhileStatementId::upcast);
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::Assert(stmt) => {
 
                // Evaluate expression
 
                let value = self.store.eval(h, ctx, stmt.expression)?;
 
                if value.as_boolean().0 {
 
                    // Continue to next statement
 
                    self.position = stmt.next;
 
                    Err(EvalContinuation::Stepping)
 
                } else {
 
                    // Assertion failed: inconsistent
 
                    Err(EvalContinuation::Inconsistent)
 
                }
 
            }
 
            Statement::Return(stmt) => {
 
                // Evaluate expression
 
                let value = self.store.eval(h, ctx, stmt.expression)?;
 
                // Done with evaluation
 
                Ok(value)
 
            }
 
            Statement::Goto(stmt) => {
 
                // Continue to target
 
                self.position = stmt.target.map(|x| x.upcast());
 
                Err(EvalContinuation::Stepping)
 
            }
 
            Statement::New(stmt) => {
 
                let expr = &h[stmt.expression];
 
                let mut args = Vec::new();
 
                for &arg in expr.arguments.iter() {
 
                    let value = self.store.eval(h, ctx, arg)?;
 
                    args.push(value);
 
                }
 
                self.position = stmt.next;
 
                match &expr.method {
 
                    Method::Symbolic(symbolic) => {
 
                         Err(EvalContinuation::NewComponent(symbolic.definition.unwrap(), args))
 
                    },
 
                    _ => unreachable!("not a symbolic call expression")
 
                }
 
            }
src/protocol/inputsource.rs
Show inline comments
 
@@ -23,414 +23,413 @@ primitive sync(in<msg> i, out<msg> o) {
 
}
 
primitive alternator(in<msg> i, out<msg> l, out<msg> r) {
 
    while(true) {
 
        synchronous if(fires(i)) put(l, get(i));
 
        synchronous if(fires(i)) put(r, get(i));
 
    }
 
}
 
primitive replicator(in<msg> i, out<msg> l, out<msg> r) {
 
    while(true) synchronous {
 
        if(fires(i)) {
 
            msg m = get(i);
 
            put(l, m);
 
            put(r, m);
 
        }
 
    }
 
}
 
primitive merger(in<msg> l, in<msg> r, out<msg> o) {
 
    while(true) synchronous {
 
        if(fires(l))      put(o, get(l));
 
        else if(fires(r)) put(o, get(r));
 
    }
 
}
 
";
 

	
 
impl InputSource {
 
    // Constructors
 
    pub fn new<R: io::Read, S: ToString>(filename: S, reader: &mut R) -> io::Result<InputSource> {
 
        let mut vec = Vec::new();
 
        reader.read_to_end(&mut vec)?;
 
        vec.extend(STD_LIB_PDL.to_vec());
 
        Ok(InputSource {
 
            filename: filename.to_string(),
 
            input: vec,
 
            line: 1,
 
            column: 1,
 
            offset: 0,
 
        })
 
    }
 
    // Constructor helpers
 
    pub fn from_file(path: &Path) -> io::Result<InputSource> {
 
        let filename = path.file_name();
 
        match filename {
 
            Some(filename) => {
 
                let mut f = File::open(path)?;
 
                InputSource::new(filename.to_string_lossy(), &mut f)
 
            }
 
            None => Err(io::Error::new(io::ErrorKind::NotFound, "Invalid path")),
 
        }
 
    }
 
    pub fn from_string(string: &str) -> io::Result<InputSource> {
 
        let buffer = Box::new(string);
 
        let mut bytes = buffer.as_bytes();
 
        InputSource::new(String::new(), &mut bytes)
 
    }
 
    pub fn from_buffer(buffer: &[u8]) -> io::Result<InputSource> {
 
        InputSource::new(String::new(), &mut Box::new(buffer))
 
    }
 
    // Internal methods
 
    pub fn pos(&self) -> InputPosition {
 
        InputPosition { line: self.line, column: self.column, offset: self.offset }
 
    }
 
    pub fn seek(&mut self, pos: InputPosition) {
 
        debug_assert!(pos.offset < self.input.len());
 
        self.line = pos.line;
 
        self.column = pos.column;
 
        self.offset = pos.offset;
 
    }
 
    // pub fn error<S: ToString>(&self, message: S) -> ParseError {
 
    //     self.pos().parse_error(message)
 
    // }
 
    pub fn is_eof(&self) -> bool {
 
        self.next() == None
 
    }
 

	
 
    pub fn next(&self) -> Option<u8> {
 
        if self.offset < self.input.len() {
 
            Some(self.input[self.offset])
 
        } else {
 
            None
 
        }
 
    }
 

	
 
    pub fn lookahead(&self, pos: usize) -> Option<u8> {
 
        let offset_pos = self.offset + pos;
 
        if offset_pos < self.input.len() {
 
            Some(self.input[offset_pos])
 
        } else {
 
            None
 
        }
 
    }
 

	
 
    pub fn has(&self, to_compare: &[u8]) -> bool {
 
        if self.offset + to_compare.len() <= self.input.len() {
 
            for idx in 0..to_compare.len() {
 
                if to_compare[idx] != self.input[self.offset + idx] {
 
                    return false;
 
                }
 
            }
 

	
 
            true
 
        } else {
 
            false
 
        }
 
    }
 

	
 
    pub fn consume(&mut self) {
 
        match self.next() {
 
            Some(x) if x == b'\r' && self.lookahead(1) != Some(b'\n') || x == b'\n' => {
 
                self.line += 1;
 
                self.offset += 1;
 
                self.column = 1;
 
            }
 
            Some(_) => {
 
                self.offset += 1;
 
                self.column += 1;
 
            }
 
            None => {}
 
        }
 
    }
 
}
 

	
 
impl fmt::Display for InputSource {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        self.pos().fmt(f)
 
    }
 
}
 

	
 
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
 
pub struct InputPosition {
 
    line: usize,
 
    column: usize,
 
    offset: usize,
 
}
 

	
 
impl InputPosition {
 
    fn context<'a>(&self, source: &'a InputSource) -> &'a [u8] {
 
        let start = self.offset - (self.column - 1);
 
        let mut end = self.offset;
 
        while end < source.input.len() {
 
            let cur = (*source.input)[end];
 
            if cur == b'\n' || cur == b'\r' {
 
                break;
 
            }
 
            end += 1;
 
        }
 
        &source.input[start..end]
 
    }
 
    // fn parse_error<S: ToString>(&self, message: S) -> ParseError {
 
    //     ParseError { position: *self, message: message.to_string(), backtrace: Backtrace::new() }
 
    // }
 
    fn eval_error<S: ToString>(&self, message: S) -> EvalError {
 
        EvalError { position: *self, message: message.to_string(), backtrace: Backtrace::new() }
 
    }
 
}
 

	
 
impl Default for InputPosition {
 
    fn default() -> Self {
 
        Self{ line: 1, column: 1, offset: 0 }
 
    }
 
}
 

	
 
impl fmt::Display for InputPosition {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        write!(f, "{}:{}", self.line, self.column)
 
    }
 
}
 

	
 
pub trait SyntaxElement {
 
    fn position(&self) -> InputPosition;
 
    fn error<S: ToString>(&self, message: S) -> EvalError {
 
        self.position().eval_error(message)
 
    }
 
}
 

	
 
#[derive(Debug)]
 
pub enum ParseErrorType {
 
    Info,
 
    Error
 
}
 

	
 
#[derive(Debug)]
 
pub struct ParseErrorStatement {
 
    error_type: ParseErrorType,
 
    position: InputPosition,
 
    filename: String,
 
    context: String,
 
    message: String,
 
}
 

	
 
impl ParseErrorStatement {
 
    fn from_source(error_type: ParseErrorType, source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        // Seek line start and end
 
        debug_assert!(position.column < position.offset);
 
        let line_start = position.offset - (position.column - 1);
 
        let mut line_end = position.offset;
 
        while line_end < source.input.len() && source.input[line_end] != b'\n' {
 
            line_end += 1;
 
        }
 

	
 
        // Compensate for '\r\n'
 
        if line_end > line_start && source.input[line_end - 1] == b'\r' {
 
            line_end -= 1;
 
        }
 

	
 
        Self{
 
            error_type,
 
            position,
 
            filename: source.filename.clone(),
 
            context: String::from_utf8_lossy(&source.input[line_start..line_end]).to_string(),
 
            message: msg.to_string()
 
        }
 
    }
 
}
 

	
 
impl fmt::Display for ParseErrorStatement {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        // Write message
 
        match self.error_type {
 
            ParseErrorType::Info => write!(f, " INFO: ")?,
 
            ParseErrorType::Error => write!(f, "ERROR: ")?,
 
        }
 
        writeln!(f, "{}", &self.message);
 
        writeln!(f, "{}", &self.message)?;
 

	
 
        // Write originating file/line/column
 
        if self.filename.is_empty() {
 
            writeln!(f, " +- at {}:{}", self.position.line, self.position.column)?;
 
        } else {
 
            writeln!(f, " +- at {}:{}:{}", self.filename, self.position.line, self.position.column)?;
 
        }
 

	
 
        // Write source context
 
        writeln!(f, " | ")?;
 
        writeln!(f, " | {}", self.context)?;
 

	
 
        // Write underline indicating where the error ocurred
 
        debug_assert!(self.position.column <= self.context.chars().count());
 
        let mut arrow = String::with_capacity(self.context.len() + 3);
 
        arrow.push_str(" | ");
 
        let mut char_col = 1;
 
        for char in self.context.chars() {
 
            if char_col == self.position.column { break; }
 
            if char == '\t' {
 
                arrow.push('\t');
 
            } else {
 
                arrow.push(' ');
 
            }
 

	
 
            char_col += 1;
 
        }
 
        arrow.push('^');
 
        writeln!(f, "{}", arrow)?;
 

	
 
        Ok(())
 
    }
 
}
 

	
 
#[derive(Debug)]
 
pub struct ParseError2 {
 
    statements: Vec<ParseErrorStatement>
 
}
 

	
 
impl fmt::Display for ParseError2 {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        if self.statements.is_empty() {
 
            return Ok(())
 
        }
 

	
 
        self.statements[0].fmt(f)?;
 
        for statement in self.statements.iter().skip(1) {
 
            writeln!(f)?;
 
            statement.fmt(f)?;
 
        }
 

	
 
        Ok(())
 
    }
 
}
 

	
 
impl ParseError2 {
 
    pub fn empty() -> Self {
 
        Self{ statements: Vec::new() }
 
    }
 

	
 
    pub fn new_error(source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        Self{ statements: vec!(ParseErrorStatement::from_source(ParseErrorType::Error, source, position, msg))}
 
    }
 

	
 
    pub fn with_prefixed(mut self, error_type: ParseErrorType, source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        self.statements.insert(0, ParseErrorStatement::from_source(error_type, source, position, msg));
 
        self
 
    }
 

	
 
    pub fn with_postfixed(mut self, error_type: ParseErrorType, source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        self.statements.push(ParseErrorStatement::from_source(error_type, source, position, msg));
 
        self
 
    }
 

	
 
    pub fn with_postfixed_info(self, source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        self.with_postfixed(ParseErrorType::Info, source, position, msg)
 
    }
 
}
 

	
 
#[derive(Debug, Clone)]
 
pub struct EvalError {
 
    position: InputPosition,
 
    message: String,
 
    backtrace: Backtrace,
 
}
 

	
 
impl EvalError {
 
    pub fn new<S: ToString>(position: InputPosition, message: S) -> EvalError {
 
        EvalError { position, message: message.to_string(), backtrace: Backtrace::new() }
 
    }
 
    // Diagnostic methods
 
    pub fn write<A: io::Write>(&self, source: &InputSource, writer: &mut A) -> io::Result<()> {
 
        if !source.filename.is_empty() {
 
            writeln!(
 
                writer,
 
                "Evaluation error at {}:{}: {}",
 
                source.filename, self.position, self.message
 
            )?;
 
        } else {
 
            writeln!(writer, "Evaluation error at {}: {}", self.position, self.message)?;
 
        }
 
        let line = self.position.context(source);
 
        writeln!(writer, "{}", String::from_utf8_lossy(line))?;
 
        let mut arrow: Vec<u8> = Vec::new();
 
        for pos in 1..self.position.column {
 
            let c = line[pos - 1];
 
            if c == b'\t' {
 
                arrow.push(b'\t')
 
            } else {
 
                arrow.push(b' ')
 
            }
 
        }
 
        arrow.push(b'^');
 
        writeln!(writer, "{}", String::from_utf8_lossy(&arrow))
 
    }
 
    pub fn print(&self, source: &InputSource) {
 
        self.write(source, &mut std::io::stdout()).unwrap()
 
    }
 
    pub fn display<'a>(&'a self, source: &'a InputSource) -> DisplayEvalError<'a> {
 
        DisplayEvalError::new(self, source)
 
    }
 
}
 

	
 
impl From<EvalError> for io::Error {
 
    fn from(_: EvalError) -> io::Error {
 
        io::Error::new(io::ErrorKind::InvalidInput, "eval error")
 
    }
 
}
 

	
 
#[derive(Clone, Copy)]
 
pub struct DisplayEvalError<'a> {
 
    error: &'a EvalError,
 
    source: &'a InputSource,
 
}
 

	
 
impl DisplayEvalError<'_> {
 
    fn new<'a>(error: &'a EvalError, source: &'a InputSource) -> DisplayEvalError<'a> {
 
        DisplayEvalError { error, source }
 
    }
 
}
 

	
 
impl fmt::Display for DisplayEvalError<'_> {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        let mut vec: Vec<u8> = Vec::new();
 
        match self.error.write(self.source, &mut vec) {
 
            Err(_) => {
 
                return fmt::Result::Err(fmt::Error);
 
            }
 
            Ok(_) => {}
 
        }
 
        write!(f, "{}", String::from_utf8_lossy(&vec))
 
    }
 
}
 

	
 
// #[cfg(test)]
 
// mod tests {
 
//     use super::*;
 

	
 
//     #[test]
 
//     fn test_from_string() {
 
//         let mut is = InputSource::from_string("#version 100\n").unwrap();
 
//         assert!(is.input.len() == 13);
 
//         assert!(is.line == 1);
 
//         assert!(is.column == 1);
 
//         assert!(is.offset == 0);
 
//         let ps = is.pos();
 
//         assert!(ps.line == 1);
 
//         assert!(ps.column == 1);
 
//         assert!(ps.offset == 0);
 
//         assert!(is.next() == Some(b'#'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'v'));
 
//         assert!(is.lookahead(1) == Some(b'e'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'e'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'r'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b's'));
 
//         is.consume();
 
//         assert!(is.next() == Some(b'i'));
 
//         is.consume();
 
//         {
 
//             let ps = is.pos();
 
//             assert_eq!(b"#version 100", ps.context(&is));
 
//             let er = is.error("hello world!");
 
//             let mut vec: Vec<u8> = Vec::new();
 
//             er.write(&is, &mut vec).unwrap();
 
//             assert_eq!(
 
//                 "Parse error at 1:7: hello world!\n#version 100\n      ^\n",
 
//                 String::from_utf8_lossy(&vec)
 
//             );
src/protocol/lexer.rs
Show inline comments
 
@@ -91,1596 +91,1618 @@ impl Lexer<'_> {
 
            if !(is_vchar(next) || is_wsp(next)) {
 
                return Err(self.error_at_pos("Expected visible character or whitespace"));
 
            }
 
            result.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 
        if next.is_some() {
 
            self.source.consume();
 
        }
 
        if next == Some(b'\r') && self.source.next() == Some(b'\n') {
 
            self.source.consume();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_whitespace(&mut self, expected: bool) -> Result<(), ParseError2> {
 
        let mut found = false;
 
        let mut next = self.source.next();
 
        while next.is_some() {
 
            if next == Some(b' ')
 
                || next == Some(b'\t')
 
                || next == Some(b'\r')
 
                || next == Some(b'\n')
 
            {
 
                self.source.consume();
 
                next = self.source.next();
 
                found = true;
 
                continue;
 
            }
 
            if next == Some(b'/') {
 
                next = self.source.lookahead(1);
 
                if next == Some(b'/') {
 
                    self.source.consume(); // slash
 
                    self.source.consume(); // slash
 
                    self.consume_line()?;
 
                    next = self.source.next();
 
                    found = true;
 
                    continue;
 
                }
 
                if next == Some(b'*') {
 
                    self.source.consume(); // slash
 
                    self.source.consume(); // star
 
                    next = self.source.next();
 
                    while next.is_some() {
 
                        if next == Some(b'*') {
 
                            next = self.source.lookahead(1);
 
                            if next == Some(b'/') {
 
                                self.source.consume(); // star
 
                                self.source.consume(); // slash
 
                                break;
 
                            }
 
                        }
 
                        self.source.consume();
 
                        next = self.source.next();
 
                    }
 
                    next = self.source.next();
 
                    found = true;
 
                    continue;
 
                }
 
            }
 
            break;
 
        }
 
        if expected && !found {
 
            Err(self.error_at_pos("Expected whitespace"))
 
        } else {
 
            Ok(())
 
        }
 
    }
 
    fn consume_any_chars(&mut self) {
 
        if !is_ident_start(self.source.next()) { return }
 
        self.source.consume();
 
        while is_ident_rest(self.source.next()) {
 
            self.source.consume()
 
        }
 
    }
 
    fn has_keyword(&self, keyword: &[u8]) -> bool {
 
        if !self.source.has(keyword) {
 
            return false;
 
        }
 

	
 
        // Word boundary
 
        if let Some(next) = self.source.lookahead(keyword.len()) {
 
            !(next >= b'A' && next <= b'Z' || next >= b'a' && next <= b'z')
 
        } else {
 
            true
 
        }
 
    }
 
    fn consume_keyword(&mut self, keyword: &[u8]) -> Result<(), ParseError2> {
 
        let len = keyword.len();
 
        for i in 0..len {
 
            let expected = Some(lowercase(keyword[i]));
 
            let next = self.source.next();
 
            if next != expected {
 
                return Err(self.error_at_pos(&format!("Expected keyword '{}'", String::from_utf8_lossy(keyword))));
 
            }
 
            self.source.consume();
 
        }
 
        if let Some(next) = self.source.next() {
 
            if next >= b'A' && next <= b'Z' || next >= b'a' && next <= b'z' || next >= b'0' && next <= b'9' {
 
                return Err(self.error_at_pos(&format!("Expected word boundary after '{}'", String::from_utf8_lossy(keyword))));
 
            }
 
        }
 
        Ok(())
 
    }
 
    fn has_string(&self, string: &[u8]) -> bool {
 
        self.source.has(string)
 
    }
 
    fn consume_string(&mut self, string: &[u8]) -> Result<(), ParseError2> {
 
        let len = string.len();
 
        for i in 0..len {
 
            let expected = Some(string[i]);
 
            let next = self.source.next();
 
            if next != expected {
 
                return Err(self.error_at_pos(&format!("Expected {}", String::from_utf8_lossy(string))));
 
            }
 
            self.source.consume();
 
        }
 
        Ok(())
 
    }
 
    fn consume_ident(&mut self) -> Result<Vec<u8>, ParseError2> {
 
        if !self.has_identifier() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let mut result = Vec::new();
 
        let mut next = self.source.next();
 
        result.push(next.unwrap());
 
        self.source.consume();
 
        next = self.source.next();
 
        while is_ident_rest(next) {
 
            result.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 
        Ok(result)
 
    }
 
    fn has_integer(&mut self) -> bool {
 
        is_integer_start(self.source.next())
 
    }
 
    fn consume_integer(&mut self) -> Result<i64, ParseError2> {
 
        let position = self.source.pos();
 
        let mut data = Vec::new();
 
        let mut next = self.source.next();
 
        while is_integer_rest(next) {
 
            data.push(next.unwrap());
 
            self.source.consume();
 
            next = self.source.next();
 
        }
 

	
 
        let data_len = data.len();
 
        debug_assert_ne!(data_len, 0);
 
        if data_len == 1 {
 
            debug_assert!(data[0] >= b'0' && data[0] <= b'9');
 
            return Ok((data[0] - b'0') as i64);
 
        } else {
 
            // TODO: Fix, u64 should be supported as well
 
            let parsed = if data[1] == b'b' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 2)
 
            } else if data[1] == b'o' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 8)
 
            } else if data[1] == b'x' {
 
                let data = String::from_utf8_lossy(&data[2..]);
 
                i64::from_str_radix(&data, 16)
 
            } else {
 
                // Assume decimal
 
                let data = String::from_utf8_lossy(&data);
 
                i64::from_str_radix(&data, 10)
 
            };
 

	
 
            if let Err(_err) = parsed {
 
                return Err(ParseError2::new_error(&self.source, position, "Invalid integer constant"));
 
            }
 

	
 
            Ok(parsed.unwrap())
 
        }
 
    }
 

	
 
    // Statement keywords
 
    // TODO: Clean up these functions
 
    fn has_statement_keyword(&self) -> bool {
 
        self.has_keyword(b"channel")
 
            || self.has_keyword(b"skip")
 
            || self.has_keyword(b"if")
 
            || self.has_keyword(b"while")
 
            || self.has_keyword(b"break")
 
            || self.has_keyword(b"continue")
 
            || self.has_keyword(b"synchronous")
 
            || self.has_keyword(b"return")
 
            || self.has_keyword(b"assert")
 
            || self.has_keyword(b"goto")
 
            || self.has_keyword(b"new")
 
            || self.has_keyword(b"put") // TODO: @fix, should be a function, even though it has sideeffects
 
    }
 
    fn has_type_keyword(&self) -> bool {
 
        self.has_keyword(b"in")
 
            || self.has_keyword(b"out")
 
            || self.has_keyword(b"msg")
 
            || self.has_keyword(b"boolean")
 
            || self.has_keyword(b"byte")
 
            || self.has_keyword(b"short")
 
            || self.has_keyword(b"int")
 
            || self.has_keyword(b"long")
 
            || self.has_keyword(b"auto")
 
    }
 
    fn has_builtin_keyword(&self) -> bool {
 
        self.has_keyword(b"get")
 
            || self.has_keyword(b"fires")
 
            || self.has_keyword(b"create")
 
            || self.has_keyword(b"length")
 
    }
 
    fn has_reserved(&self) -> bool {
 
        self.has_statement_keyword()
 
            || self.has_type_keyword()
 
            || self.has_builtin_keyword()
 
            || self.has_keyword(b"let")
 
            || self.has_keyword(b"struct")
 
            || self.has_keyword(b"enum")
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
            || self.has_keyword(b"null")
 
    }
 

	
 
    // Identifiers
 

	
 
    fn has_identifier(&self) -> bool {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return false;
 
        }
 
        let next = self.source.next();
 
        is_ident_start(next)
 
    }
 
    fn consume_identifier(&mut self) -> Result<Identifier, ParseError2> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let position = self.source.pos();
 
        let value = self.consume_ident()?;
 
        Ok(Identifier{ position, value })
 
    }
 
    fn consume_identifier_spilled(&mut self) -> Result<(), ParseError2> {
 
        if self.has_statement_keyword() || self.has_type_keyword() || self.has_builtin_keyword() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        self.consume_ident()?;
 
        Ok(())
 
    }
 
    fn has_namespaced_identifier(&self) -> bool { 
 
        self.has_identifier() 
 
    }
 
    fn consume_namespaced_identifier(&mut self) -> Result<NamespacedIdentifier, ParseError2> {
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        let position = self.source.pos();
 
        let mut ns_ident = self.consume_ident()?;
 
        let mut num_namespaces = 1;
 
        while self.has_string(b"::") {
 
            self.consume_string(b"::");
 
            if num_namespaces >= MAX_NAMESPACES {
 
                return Err(self.error_at_pos("Too many namespaces in identifier"));
 
            }
 
            let new_ident = self.consume_ident()?;
 
            ns_ident.extend(b"::");
 
            ns_ident.extend(new_ident);
 
            num_namespaces += 1;
 
        }
 

	
 
        Ok(NamespacedIdentifier{
 
            position,
 
            value: ns_ident,
 
            num_namespaces,
 
        })
 
    }
 
    fn consume_namespaced_identifier_spilled(&mut self) -> Result<(), ParseError2> {
 
        // TODO: @performance
 
        if self.has_reserved() {
 
            return Err(self.error_at_pos("Encountered reserved keyword"));
 
        }
 

	
 
        self.consume_ident()?;
 
        while self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_ident()?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    // Types and type annotations
 

	
 
    /// Consumes a type definition. When called the input position should be at
 
    /// the type specification. When done the input position will be at the end
 
    /// of the type specifications (hence may be at whitespace).
 
    fn consume_type2(&mut self, h: &mut Heap, allow_inference: bool) -> Result<ParserTypeId, ParseError2> {
 
        // Small helper function to convert in/out polymorphic arguments. Not
 
        // pretty, but return boolean is true if the error is due to inference
 
        // not being allowed
 
        let reduce_port_poly_args = |
 
            heap: &mut Heap,
 
            port_pos: &InputPosition,
 
            args: Vec<ParserTypeId>,
 
        | -> Result<ParserTypeId, bool> {
 
            match args.len() {
 
                0 => if allow_inference {  
 
                    Ok(heap.alloc_parser_type(|this| ParserType{
 
                        this,
 
                        pos: port_pos.clone(),
 
                        variant: ParserTypeVariant::Inferred
 
                    }))
 
                } else {
 
                    Err(true)
 
                },
 
                1 => Ok(args[0]),
 
                _ => Err(false)
 
            }
 
        };
 

	
 
        // Consume the type
 
        let pos = self.source.pos();
 
        let parser_type_variant = if self.has_keyword(b"msg") {
 
            self.consume_keyword(b"msg")?;
 
            ParserTypeVariant::Message
 
        } else if self.has_keyword(b"boolean") {
 
            self.consume_keyword(b"boolean")?;
 
            ParserTypeVariant::Bool
 
        } else if self.has_keyword(b"byte") {
 
            self.consume_keyword(b"byte")?;
 
            ParserTypeVariant::Byte
 
        } else if self.has_keyword(b"short") {
 
            self.consume_keyword(b"short")?;
 
            ParserTypeVariant::Short
 
        } else if self.has_keyword(b"int") {
 
            self.consume_keyword(b"int")?;
 
            ParserTypeVariant::Int
 
        } else if self.has_keyword(b"long") {
 
            self.consume_keyword(b"long")?;
 
            ParserTypeVariant::Long
 
        } else if self.has_keyword(b"str") {
 
            self.consume_keyword(b"str")?;
 
            ParserTypeVariant::String
 
        } else if self.has_keyword(b"auto") {
 
            if !allow_inference {
 
                return Err(ParseError2::new_error(
 
                        &self.source, pos,
 
                        "Type inference is not allowed here"
 
                ));
 
            }
 

	
 
            self.consume_keyword(b"auto")?;
 
            ParserTypeVariant::Inferred
 
        } else if self.has_keyword(b"in") {
 
            // TODO: @cleanup: not particularly neat to have this special case
 
            //  where we enforce polyargs in the parser-phase
 
            // TODO: @hack, temporarily allow inferred port values
 
            self.consume_keyword(b"in")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?;
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error|  {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'in' only allows for 1 polymorphic argument"
 
                    };
 
                    ParseError2::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Input(poly_arg)
 
        } else if self.has_keyword(b"out") {
 
            // TODO: @hack, temporarily allow inferred port values
 
            self.consume_keyword(b"out")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?;
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error| {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
                    } else {
 
                        "Type 'out' only allows for 1 polymorphic argument, but {} were specified"
 
                    };
 
                    ParseError2::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Output(poly_arg)
 
        } else {
 
            // Must be a symbolic type
 
            let identifier = self.consume_namespaced_identifier()?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?;
 
            ParserTypeVariant::Symbolic(SymbolicParserType{identifier, poly_args, variant: None})
 
        };
 

	
 
        // If the type was a basic type (not supporting polymorphic type
 
        // arguments), then we make sure the user did not specify any of them.
 
        let mut backup_pos = self.source.pos();
 
        if !parser_type_variant.supports_polymorphic_args() {
 
            self.consume_whitespace(false)?;
 
            if let Some(b'<') = self.source.next() {
 
                return Err(ParseError2::new_error(
 
                    &self.source, self.source.pos(),
 
                    "This type does not allow polymorphic arguments"
 
                ));
 
            }
 

	
 
            self.source.seek(backup_pos);
 
        }
 

	
 
        let mut parser_type_id = h.alloc_parser_type(|this| ParserType{
 
            this, pos, variant: parser_type_variant
 
        });
 

	
 
        // If we're dealing with arrays, then we need to wrap the currently
 
        // parsed type in array types
 
        self.consume_whitespace(false)?;
 
        while let Some(b'[') = self.source.next() {
 
            let pos = self.source.pos();
 
            self.source.consume();
 
            self.consume_whitespace(false)?;
 
            if let Some(b']') = self.source.next() {
 
                // Type is wrapped in an array
 
                self.source.consume();
 
                parser_type_id = h.alloc_parser_type(|this| ParserType{
 
                    this, pos, variant: ParserTypeVariant::Array(parser_type_id)
 
                });
 
                backup_pos = self.source.pos();
 

	
 
                // In case we're dealing with another array
 
                self.consume_whitespace(false)?;
 
            } else {
 
                return Err(ParseError2::new_error(
 
                    &self.source, pos,
 
                    "Expected a closing ']'"
 
                ));
 
            }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        Ok(parser_type_id)
 
    }
 

	
 
    /// Attempts to consume a type without returning it. If it doesn't encounter
 
    /// a well-formed type, then the input position is left at a "random"
 
    /// position.
 
    fn maybe_consume_type_spilled_without_pos_recovery(&mut self) -> bool {
 
        // Consume type identifier
 
        if self.has_type_keyword() {
 
            self.consume_any_chars();
 
        } else {
 
            let ident = self.consume_namespaced_identifier();
 
            if ident.is_err() { return false; }
 
        }
 

	
 
        // Consume any polymorphic arguments that follow the type identifier
 
        let mut backup_pos = self.source.pos();
 
        if self.consume_whitespace(false).is_err() { return false; }
 
        if !self.maybe_consume_poly_args_spilled_without_pos_recovery() { return false; }
 

	
 
        // Consume any array specifiers. Make sure we always leave the input
 
        // position at the end of the last array specifier if we do find a
 
        // valid type
 
        let mut backup_pos = self.source.pos();
 

	
 
        if self.consume_whitespace(false).is_err() { return false; }
 
        while let Some(b'[') = self.source.next() {
 
            self.source.consume();
 
            if self.consume_whitespace(false).is_err() { return false; }
 
            if self.source.next() != Some(b']') { return false; }
 
            self.source.consume();
 
            backup_pos = self.source.pos();
 
            if self.consume_whitespace(false).is_err() { return false; }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return true;
 
    }
 

	
 
    fn maybe_consume_type_spilled(&mut self) -> bool {
 
        let backup_pos = self.source.pos();
 
        if !self.maybe_consume_type_spilled_without_pos_recovery() {
 
            self.source.seek(backup_pos);
 
            return false;
 
        }
 

	
 
        return true;
 
    }
 

	
 
    /// Attempts to consume polymorphic arguments without returning them. If it
 
    /// doesn't encounter well-formed polymorphic arguments, then the input
 
    /// position is left at a "random" position.
 
    fn maybe_consume_poly_args_spilled_without_pos_recovery(&mut self) -> bool {
 
        if let Some(b'<') = self.source.next() {
 
            self.source.consume();
 
            if self.consume_whitespace(false).is_err() { return false; }
 
            loop {
 
                if !self.maybe_consume_type_spilled_without_pos_recovery() { return false; }
 
                if self.consume_whitespace(false).is_err() { return false; }
 
                let has_comma = self.source.next() == Some(b',');
 
                if has_comma {
 
                    self.source.consume();
 
                    if self.consume_whitespace(false).is_err() { return false; }
 
                }
 
                if let Some(b'>') = self.source.next() {
 
                    self.source.consume();
 
                    break;
 
                } else if !has_comma {
 
                    return false;
 
                }
 
            }
 
        }
 

	
 
        return true;
 
    }
 

	
 
    /// Consumes polymorphic arguments and its delimiters if specified. The
 
    /// input position may be at whitespace. If polyargs are present then the
 
    /// whitespace and the args are consumed and the input position will be
 
    /// placed after the polyarg list. If polyargs are not present then the
 
    /// input position will remain unmodified and an empty vector will be
 
    /// returned.
 
    ///
 
    /// Polymorphic arguments represent the specification of the parametric
 
    /// types of a polymorphic type: they specify the value of the polymorphic
 
    /// type's polymorphic variables.
 
    fn consume_polymorphic_args(&mut self, h: &mut Heap, allow_inference: bool) -> Result<Vec<ParserTypeId>, ParseError2> {
 
        let backup_pos = self.source.pos();
 
        self.consume_whitespace(false)?;
 
        if let Some(b'<') = self.source.next() {
 
            // Has polymorphic args, at least one type must be specified
 
            self.source.consume();
 
            self.consume_whitespace(false)?;
 
            let mut poly_args = Vec::new();
 

	
 
            loop {
 
                // TODO: @cleanup, remove the no_more_types var
 
                poly_args.push(self.consume_type2(h, allow_inference)?);
 
                self.consume_whitespace(false)?;
 

	
 
                let has_comma = self.source.next() == Some(b',');
 
                if has_comma {
 
                    // We might not actually be getting more types when the
 
                    // comma is at the end of the line, and we get a closing
 
                    // angular bracket on the next line.
 
                    self.source.consume();
 
                    self.consume_whitespace(false)?;
 
                }
 

	
 
                if let Some(b'>') = self.source.next() {
 
                    self.source.consume();
 
                    break;
 
                } else if !has_comma {
 
                    return Err(ParseError2::new_error(
 
                        &self.source, self.source.pos(),
 
                        "Expected the end of the polymorphic argument list"
 
                    ))
 
                }
 
            }
 

	
 
            Ok(poly_args)
 
        } else {
 
            // No polymorphic args
 
            self.source.seek(backup_pos);
 
            Ok(vec!())
 
        }
 
    }
 

	
 
    /// Consumes polymorphic variables. These are identifiers that are used
 
    /// within polymorphic types. The input position may be at whitespace. If
 
    /// polymorphic variables are present then the whitespace, wrapping
 
    /// delimiters and the polymorphic variables are consumed. Otherwise the
 
    /// input position will stay where it is. If no polymorphic variables are
 
    /// present then an empty vector will be returned.
 
    fn consume_polymorphic_vars(&mut self) -> Result<Vec<Identifier>, ParseError2> {
 
        let backup_pos = self.source.pos();
 
        self.consume_whitespace(false)?;
 
        if let Some(b'<') = self.source.next() {
 
            // Found the opening delimiter, we want at least one polyvar
 
            self.source.consume();
 
            self.consume_whitespace(false)?;
 
            let mut poly_vars = Vec::new();
 

	
 
            loop {
 
                poly_vars.push(self.consume_identifier()?);
 
                self.consume_whitespace(false)?;
 

	
 
                let has_comma = self.source.next() == Some(b',');
 
                if has_comma {
 
                    // We may get another variable
 
                    self.source.consume();
 
                    self.consume_whitespace(false)?;
 
                }
 

	
 
                if let Some(b'>') = self.source.next() {
 
                    self.source.consume();
 
                    break;
 
                } else if !has_comma {
 
                    return Err(ParseError2::new_error(
 
                        &self.source, self.source.pos(),
 
                        "Expected the end of the polymorphic variable list"
 
                    ))
 
                }
 
            }
 

	
 
            Ok(poly_vars)
 
        } else {
 
            // No polymorphic args
 
            self.source.seek(backup_pos);
 
            Ok(vec!())
 
        }
 
    }
 

	
 
    // Parameters
 

	
 
    fn consume_parameter(&mut self, h: &mut Heap) -> Result<ParameterId, ParseError2> {
 
        let parser_type = self.consume_type2(h, false)?;
 
        self.consume_whitespace(true)?;
 
        let position = self.source.pos();
 
        let identifier = self.consume_identifier()?;
 
        let id =
 
            h.alloc_parameter(|this| Parameter { this, position, parser_type, identifier });
 
        Ok(id)
 
    }
 
    fn consume_parameters(
 
        &mut self,
 
        h: &mut Heap,
 
        params: &mut Vec<ParameterId>,
 
    ) -> Result<(), ParseError2> {
 
        self.consume_string(b"(")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b")") {
 
            while self.source.next().is_some() {
 
                params.push(self.consume_parameter(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b")") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?;
 
            }
 
        }
 
        self.consume_string(b")")?;
 

	
 
        Ok(())
 
    }
 

	
 
    // ====================
 
    // Expressions
 
    // ====================
 

	
 
    fn consume_paren_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        self.consume_string(b"(")?;
 
        self.consume_whitespace(false)?;
 
        let result = self.consume_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b")")?;
 
        Ok(result)
 
    }
 
    fn consume_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        if self.level >= MAX_LEVEL {
 
            return Err(self.error_at_pos("Too deeply nested expression"));
 
        }
 
        self.level += 1;
 
        let result = self.consume_assignment_expression(h);
 
        self.level -= 1;
 
        result
 
    }
 
    fn consume_assignment_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let result = self.consume_conditional_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        if self.has_assignment_operator() {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation = self.consume_assignment_operator()?;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_expression(h)?;
 
            Ok(h.alloc_assignment_expression(|this| AssignmentExpression {
 
                this,
 
                position,
 
                left,
 
                operation,
 
                right,
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            })
 
            .upcast())
 
        } else {
 
            Ok(result)
 
        }
 
    }
 
    fn has_assignment_operator(&self) -> bool {
 
        self.has_string(b"=")
 
            || self.has_string(b"*=")
 
            || self.has_string(b"/=")
 
            || self.has_string(b"%=")
 
            || self.has_string(b"+=")
 
            || self.has_string(b"-=")
 
            || self.has_string(b"<<=")
 
            || self.has_string(b">>=")
 
            || self.has_string(b"&=")
 
            || self.has_string(b"^=")
 
            || self.has_string(b"|=")
 
    }
 
    fn consume_assignment_operator(&mut self) -> Result<AssignmentOperator, ParseError2> {
 
        if self.has_string(b"=") {
 
            self.consume_string(b"=")?;
 
            Ok(AssignmentOperator::Set)
 
        } else if self.has_string(b"*=") {
 
            self.consume_string(b"*=")?;
 
            Ok(AssignmentOperator::Multiplied)
 
        } else if self.has_string(b"/=") {
 
            self.consume_string(b"/=")?;
 
            Ok(AssignmentOperator::Divided)
 
        } else if self.has_string(b"%=") {
 
            self.consume_string(b"%=")?;
 
            Ok(AssignmentOperator::Remained)
 
        } else if self.has_string(b"+=") {
 
            self.consume_string(b"+=")?;
 
            Ok(AssignmentOperator::Added)
 
        } else if self.has_string(b"-=") {
 
            self.consume_string(b"-=")?;
 
            Ok(AssignmentOperator::Subtracted)
 
        } else if self.has_string(b"<<=") {
 
            self.consume_string(b"<<=")?;
 
            Ok(AssignmentOperator::ShiftedLeft)
 
        } else if self.has_string(b">>=") {
 
            self.consume_string(b">>=")?;
 
            Ok(AssignmentOperator::ShiftedRight)
 
        } else if self.has_string(b"&=") {
 
            self.consume_string(b"&=")?;
 
            Ok(AssignmentOperator::BitwiseAnded)
 
        } else if self.has_string(b"^=") {
 
            self.consume_string(b"^=")?;
 
            Ok(AssignmentOperator::BitwiseXored)
 
        } else if self.has_string(b"|=") {
 
            self.consume_string(b"|=")?;
 
            Ok(AssignmentOperator::BitwiseOred)
 
        } else {
 
            Err(self.error_at_pos("Expected assignment operator"))
 
        }
 
    }
 
    fn consume_conditional_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let result = self.consume_concat_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        if self.has_string(b"?") {
 
            let position = self.source.pos();
 
            let test = result;
 
            self.consume_string(b"?")?;
 
            self.consume_whitespace(false)?;
 
            let true_expression = self.consume_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            self.consume_string(b":")?;
 
            self.consume_whitespace(false)?;
 
            let false_expression = self.consume_expression(h)?;
 
            Ok(h.alloc_conditional_expression(|this| ConditionalExpression {
 
                this,
 
                position,
 
                test,
 
                true_expression,
 
                false_expression,
 
                parent: ExpressionParent::None,
 
                concrete_type: ConcreteType::default(),
 
            })
 
            .upcast())
 
        } else {
 
            Ok(result)
 
        }
 
    }
 
    fn consume_concat_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let mut result = self.consume_lor_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"@") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"@")?;
 
            let operation = BinaryOperator::Concatenate;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_lor_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_lor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let mut result = self.consume_land_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"||") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"||")?;
 
            let operation = BinaryOperator::LogicalOr;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_land_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_land_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let mut result = self.consume_bor_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"&&") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"&&")?;
 
            let operation = BinaryOperator::LogicalAnd;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_bor_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_bor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let mut result = self.consume_xor_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"|") && !self.has_string(b"||") && !self.has_string(b"|=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"|")?;
 
            let operation = BinaryOperator::BitwiseOr;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_xor_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_xor_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let mut result = self.consume_band_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"^") && !self.has_string(b"^=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"^")?;
 
            let operation = BinaryOperator::BitwiseXor;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_band_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_band_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let mut result = self.consume_eq_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"&") && !self.has_string(b"&&") && !self.has_string(b"&=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            self.consume_string(b"&")?;
 
            let operation = BinaryOperator::BitwiseAnd;
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_eq_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_eq_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let mut result = self.consume_rel_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"==") || self.has_string(b"!=") {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"==") {
 
                self.consume_string(b"==")?;
 
                operation = BinaryOperator::Equality;
 
            } else {
 
                self.consume_string(b"!=")?;
 
                operation = BinaryOperator::Inequality;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_rel_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_rel_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let mut result = self.consume_shift_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"<=")
 
            || self.has_string(b">=")
 
            || self.has_string(b"<") && !self.has_string(b"<<=")
 
            || self.has_string(b">") && !self.has_string(b">>=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"<=") {
 
                self.consume_string(b"<=")?;
 
                operation = BinaryOperator::LessThanEqual;
 
            } else if self.has_string(b">=") {
 
                self.consume_string(b">=")?;
 
                operation = BinaryOperator::GreaterThanEqual;
 
            } else if self.has_string(b"<") {
 
                self.consume_string(b"<")?;
 
                operation = BinaryOperator::LessThan;
 
            } else {
 
                self.consume_string(b">")?;
 
                operation = BinaryOperator::GreaterThan;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_shift_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_shift_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let mut result = self.consume_add_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"<<") && !self.has_string(b"<<=")
 
            || self.has_string(b">>") && !self.has_string(b">>=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"<<") {
 
                self.consume_string(b"<<")?;
 
                operation = BinaryOperator::ShiftLeft;
 
            } else {
 
                self.consume_string(b">>")?;
 
                operation = BinaryOperator::ShiftRight;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_add_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_add_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let mut result = self.consume_mul_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"+") && !self.has_string(b"+=")
 
            || self.has_string(b"-") && !self.has_string(b"-=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"+") {
 
                self.consume_string(b"+")?;
 
                operation = BinaryOperator::Add;
 
            } else {
 
                self.consume_string(b"-")?;
 
                operation = BinaryOperator::Subtract;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_mul_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_mul_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let mut result = self.consume_prefix_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"*") && !self.has_string(b"*=")
 
            || self.has_string(b"/") && !self.has_string(b"/=")
 
            || self.has_string(b"%") && !self.has_string(b"%=")
 
        {
 
            let position = self.source.pos();
 
            let left = result;
 
            let operation;
 
            if self.has_string(b"*") {
 
                self.consume_string(b"*")?;
 
                operation = BinaryOperator::Multiply;
 
            } else if self.has_string(b"/") {
 
                self.consume_string(b"/")?;
 
                operation = BinaryOperator::Divide;
 
            } else {
 
                self.consume_string(b"%")?;
 
                operation = BinaryOperator::Remainder;
 
            }
 
            self.consume_whitespace(false)?;
 
            let right = self.consume_prefix_expression(h)?;
 
            self.consume_whitespace(false)?;
 
            result = h
 
                .alloc_binary_expression(|this| BinaryExpression {
 
                    this,
 
                    position,
 
                    left,
 
                    operation,
 
                    right,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast();
 
        }
 
        Ok(result)
 
    }
 
    fn consume_prefix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        if self.has_string(b"+")
 
            || self.has_string(b"-")
 
            || self.has_string(b"~")
 
            || self.has_string(b"!")
 
        {
 
            let position = self.source.pos();
 
            let operation;
 
            if self.has_string(b"+") {
 
                self.consume_string(b"+")?;
 
                if self.has_string(b"+") {
 
                    self.consume_string(b"+")?;
 
                    operation = UnaryOperation::PreIncrement;
 
                } else {
 
                    operation = UnaryOperation::Positive;
 
                }
 
            } else if self.has_string(b"-") {
 
                self.consume_string(b"-")?;
 
                if self.has_string(b"-") {
 
                    self.consume_string(b"-")?;
 
                    operation = UnaryOperation::PreDecrement;
 
                } else {
 
                    operation = UnaryOperation::Negative;
 
                }
 
            } else if self.has_string(b"~") {
 
                self.consume_string(b"~")?;
 
                operation = UnaryOperation::BitwiseNot;
 
            } else {
 
                self.consume_string(b"!")?;
 
                operation = UnaryOperation::LogicalNot;
 
            }
 
            self.consume_whitespace(false)?;
 
            if self.level >= MAX_LEVEL {
 
                return Err(self.error_at_pos("Too deeply nested expression"));
 
            }
 
            self.level += 1;
 
            let result = self.consume_prefix_expression(h);
 
            self.level -= 1;
 
            let expression = result?;
 
            return Ok(h
 
                .alloc_unary_expression(|this| UnaryExpression {
 
                    this,
 
                    position,
 
                    operation,
 
                    expression,
 
                    parent: ExpressionParent::None,
 
                    concrete_type: ConcreteType::default(),
 
                })
 
                .upcast());
 
        }
 
        self.consume_postfix_expression(h)
 
    }
 
    fn consume_postfix_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        let mut result = self.consume_primary_expression(h)?;
 
        self.consume_whitespace(false)?;
 
        while self.has_string(b"++")
 
            || self.has_string(b"--")
 
            || self.has_string(b"[")
 
            || (self.has_string(b".") && !self.has_string(b".."))
 
        {
 
            let mut position = self.source.pos();
 
            if self.has_string(b"++") {
 
                self.consume_string(b"++")?;
 
                let operation = UnaryOperation::PostIncrement;
 
                let expression = result;
 
                self.consume_whitespace(false)?;
 
                result = h
 
                    .alloc_unary_expression(|this| UnaryExpression {
 
                        this,
 
                        position,
 
                        operation,
 
                        expression,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            } else if self.has_string(b"--") {
 
                self.consume_string(b"--")?;
 
                let operation = UnaryOperation::PostDecrement;
 
                let expression = result;
 
                self.consume_whitespace(false)?;
 
                result = h
 
                    .alloc_unary_expression(|this| UnaryExpression {
 
                        this,
 
                        position,
 
                        operation,
 
                        expression,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            } else if self.has_string(b"[") {
 
                self.consume_string(b"[")?;
 
                self.consume_whitespace(false)?;
 
                let subject = result;
 
                let index = self.consume_expression(h)?;
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b"..") || self.has_string(b":") {
 
                    position = self.source.pos();
 
                    if self.has_string(b"..") {
 
                        self.consume_string(b"..")?;
 
                    } else {
 
                        self.consume_string(b":")?;
 
                    }
 
                    self.consume_whitespace(false)?;
 
                    let to_index = self.consume_expression(h)?;
 
                    self.consume_whitespace(false)?;
 
                    result = h
 
                        .alloc_slicing_expression(|this| SlicingExpression {
 
                            this,
 
                            position,
 
                            subject,
 
                            from_index: index,
 
                            to_index,
 
                            parent: ExpressionParent::None,
 
                            concrete_type: ConcreteType::default(),
 
                        })
 
                        .upcast();
 
                } else {
 
                    result = h
 
                        .alloc_indexing_expression(|this| IndexingExpression {
 
                            this,
 
                            position,
 
                            subject,
 
                            index,
 
                            parent: ExpressionParent::None,
 
                            concrete_type: ConcreteType::default(),
 
                        })
 
                        .upcast();
 
                }
 
                self.consume_string(b"]")?;
 
                self.consume_whitespace(false)?;
 
            } else {
 
                assert!(self.has_string(b"."));
 
                self.consume_string(b".")?;
 
                self.consume_whitespace(false)?;
 
                let subject = result;
 
                let field;
 
                if self.has_keyword(b"length") {
 
                    self.consume_keyword(b"length")?;
 
                    field = Field::Length;
 
                } else {
 
                    field = Field::Symbolic(self.consume_identifier()?);
 
                }
 
                result = h
 
                    .alloc_select_expression(|this| SelectExpression {
 
                        this,
 
                        position,
 
                        subject,
 
                        field,
 
                        parent: ExpressionParent::None,
 
                        concrete_type: ConcreteType::default(),
 
                    })
 
                    .upcast();
 
            }
 
        }
 
        Ok(result)
 
    }
 
    fn consume_primary_expression(&mut self, h: &mut Heap) -> Result<ExpressionId, ParseError2> {
 
        if self.has_string(b"(") {
 
            return self.consume_paren_expression(h);
 
        }
 
        if self.has_string(b"{") {
 
            return Ok(self.consume_array_expression(h)?.upcast());
 
        }
 
        if self.has_constant()
 
            || self.has_keyword(b"null")
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
        {
 
            return Ok(self.consume_constant_expression(h)?.upcast());
 
        }
 
        if self.has_call_expression() {
 
            return Ok(self.consume_call_expression(h)?.upcast());
 
        }
 
        Ok(self.consume_variable_expression(h)?.upcast())
 
    }
 
    fn consume_array_expression(&mut self, h: &mut Heap) -> Result<ArrayExpressionId, ParseError2> {
 
        let position = self.source.pos();
 
        let mut elements = Vec::new();
 
        self.consume_string(b"{")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b"}") {
 
            while self.source.next().is_some() {
 
                elements.push(self.consume_expression(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b"}") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?;
 
            }
 
        }
 
        self.consume_string(b"}")?;
 
        Ok(h.alloc_array_expression(|this| ArrayExpression {
 
            this,
 
            position,
 
            elements,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn has_constant(&self) -> bool {
 
        is_constant(self.source.next())
 
    }
 
    fn consume_constant_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ConstantExpressionId, ParseError2> {
 
        let position = self.source.pos();
 
        let value;
 
        if self.has_keyword(b"null") {
 
            self.consume_keyword(b"null")?;
 
            value = Constant::Null;
 
        } else if self.has_keyword(b"true") {
 
            self.consume_keyword(b"true")?;
 
            value = Constant::True;
 
        } else if self.has_keyword(b"false") {
 
            self.consume_keyword(b"false")?;
 
            value = Constant::False;
 
        } else if self.source.next() == Some(b'\'') {
 
            self.source.consume();
 
            let mut data = Vec::new();
 
            let mut next = self.source.next();
 
            while next != Some(b'\'') && (is_vchar(next) || next == Some(b' ')) {
 
                data.push(next.unwrap());
 
                self.source.consume();
 
                next = self.source.next();
 
            }
 
            if next != Some(b'\'') || data.is_empty() {
 
                return Err(self.error_at_pos("Expected character constant"));
 
            }
 
            self.source.consume();
 
            value = Constant::Character(data);
 
        } else {
 
            if !self.has_integer() {
 
                return Err(self.error_at_pos("Expected integer constant"));
 
            }
 

	
 
            value = Constant::Integer(self.consume_integer()?);
 
        }
 
        Ok(h.alloc_constant_expression(|this| ConstantExpression {
 
            this,
 
            position,
 
            value,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn has_call_expression(&mut self) -> bool {
 
        // We need to prevent ambiguity with various operators (because we may
 
        // be specifying polymorphic variables) and variables.
 
        if self.has_builtin_keyword() {
 
            return true;
 
        }
 

	
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 

	
 
        if self.consume_namespaced_identifier_spilled().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.maybe_consume_poly_args_spilled_without_pos_recovery() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.source.next() == Some(b'(') {
 
            // Seems like we have a function call or an enum literal
 
            result = true;
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_call_expression(&mut self, h: &mut Heap) -> Result<CallExpressionId, ParseError2> {
 
        let position = self.source.pos();
 

	
 
        // Consume method identifier
 
        let method;
 
        if self.has_keyword(b"get") {
 
            self.consume_keyword(b"get")?;
 
            method = Method::Get;
 
        } else if self.has_keyword(b"put") {
 
            self.consume_keyword(b"put")?;
 
            method = Method::Put;
 
        } else if self.has_keyword(b"fires") {
 
            self.consume_keyword(b"fires")?;
 
            method = Method::Fires;
 
        } else if self.has_keyword(b"create") {
 
            self.consume_keyword(b"create")?;
 
            method = Method::Create;
 
        } else {
 
            let identifier = self.consume_namespaced_identifier()?;
 
            method = Method::Symbolic(MethodSymbolic{
 
                identifier,
 
                definition: None
 
            })
 
        }
 

	
 
        // Consume polymorphic arguments
 
        self.consume_whitespace(false)?;
 
        let poly_args = self.consume_polymorphic_args(h, true)?;
 

	
 
        // Consume arguments to call
 
        self.consume_whitespace(false)?;
 
        let mut arguments = Vec::new();
 
        self.consume_string(b"(")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b")") {
 
            // TODO: allow trailing comma
 
            while self.source.next().is_some() {
 
                arguments.push(self.consume_expression(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b")") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?
 
            }
 
        }
 
        self.consume_string(b")")?;
 
        Ok(h.alloc_call_expression(|this| CallExpression {
 
            this,
 
            position,
 
            method,
 
            arguments,
 
            poly_args,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn consume_variable_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<VariableExpressionId, ParseError2> {
 
        let position = self.source.pos();
 
        let identifier = self.consume_namespaced_identifier()?;
 
        Ok(h.alloc_variable_expression(|this| VariableExpression {
 
            this,
 
            position,
 
            identifier,
 
            declaration: None,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 

	
 
    // ====================
 
    // Statements
 
    // ====================
 

	
 
    /// Consumes any kind of statement from the source and will error if it
 
    /// did not encounter a statement. Will also return an error if the
 
    /// statement is nested too deeply.
 
    ///
 
    /// `wrap_in_block` may be set to true to ensure that the parsed statement
 
    /// will be wrapped in a block statement if it is not already a block
 
    /// statement. This is used to ensure that all `if`, `while` and `sync`
 
    /// statements have a block statement as body.
 
    fn consume_statement(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError2> {
 
        if self.level >= MAX_LEVEL {
 
            return Err(self.error_at_pos("Too deeply nested statement"));
 
        }
 
        self.level += 1;
 
        let result = self.consume_statement_impl(h, wrap_in_block);
 
        self.level -= 1;
 
        result
 
    }
 
    fn has_label(&mut self) -> bool {
 
        // To prevent ambiguity with expression statements consisting only of an
 
        // identifier or a namespaced identifier, we look ahead and match on the
 
        // *single* colon that signals a labeled statement.
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 
        if self.consume_identifier_spilled().is_ok() {
 
            // next character is ':', second character is NOT ':'
 
            result = Some(b':') == self.source.next() && Some(b':') != self.source.lookahead(1)
 
        }
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_statement_impl(&mut self, h: &mut Heap, wrap_in_block: bool) -> Result<StatementId, ParseError2> {
 
        // Parse and allocate statement
 
        let mut must_wrap = true;
 
        let mut stmt_id = if self.has_string(b"{") {
 
            must_wrap = false;
 
            self.consume_block_statement(h)?
 
        } else if self.has_keyword(b"skip") {
 
            must_wrap = false;
 
            self.consume_skip_statement(h)?.upcast()
 
        } else if self.has_keyword(b"if") {
 
            self.consume_if_statement(h)?.upcast()
 
        } else if self.has_keyword(b"while") {
 
            self.consume_while_statement(h)?.upcast()
 
        } else if self.has_keyword(b"break") {
 
            self.consume_break_statement(h)?.upcast()
 
        } else if self.has_keyword(b"continue") {
 
            self.consume_continue_statement(h)?.upcast()
 
        } else if self.has_keyword(b"synchronous") {
 
            self.consume_synchronous_statement(h)?.upcast()
 
        } else if self.has_keyword(b"return") {
 
            self.consume_return_statement(h)?.upcast()
 
        } else if self.has_keyword(b"assert") {
 
            self.consume_assert_statement(h)?.upcast()
 
        } else if self.has_keyword(b"goto") {
 
            self.consume_goto_statement(h)?.upcast()
 
        } else if self.has_keyword(b"new") {
 
            self.consume_new_statement(h)?.upcast()
 
        } else if self.has_label() {
 
            self.consume_labeled_statement(h)?.upcast()
 
        } else {
 
            self.consume_expression_statement(h)?.upcast()
 
        };
 

	
 
        // Wrap if desired and if needed
 
        if must_wrap && wrap_in_block {
 
            let position = h[stmt_id].position();
 
            let block_wrapper = h.alloc_block_statement(|this| BlockStatement{
 
                this,
 
                position,
 
                statements: vec![stmt_id],
 
                parent_scope: None,
 
                relative_pos_in_parent: 0,
 
                locals: Vec::new(),
 
                labels: Vec::new()
 
            });
 

	
 
            stmt_id = block_wrapper.upcast();
 
        }
 

	
 
        Ok(stmt_id)
 
    }
 
    fn has_local_statement(&mut self) -> bool {
 
        /* To avoid ambiguity, we look ahead to find either the
 
        channel keyword that signals a variable declaration, or
 
        a type annotation followed by another identifier.
 
        Example:
 
          my_type[] x = {5}; // memory statement
 
          my_var[5] = x; // assignment expression, expression statement
 
        Note how both the local and the assignment
 
        start with arbitrary identifier followed by [. */
 
        if self.has_keyword(b"channel") {
 
            return true;
 
        }
 
        if self.has_statement_keyword() {
 
            return false;
 
        }
 
        let backup_pos = self.source.pos();
 
        let mut result = false;
 
        if self.maybe_consume_type_spilled_without_pos_recovery() {
 
            // We seem to have a valid type, do we now have an identifier?
 
            if self.consume_whitespace(true).is_ok() {
 
                result = self.has_identifier();
 
            }
 
        }
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 
    fn consume_block_statement(&mut self, h: &mut Heap) -> Result<StatementId, ParseError2> {
 
        let position = self.source.pos();
 
        let mut statements = Vec::new();
 
        self.consume_string(b"{")?;
 
        self.consume_whitespace(false)?;
 
        while self.has_local_statement() {
 
            statements.push(self.consume_local_statement(h)?.upcast());
 
            self.consume_whitespace(false)?;
 
        }
 
        while !self.has_string(b"}") {
 
            statements.push(self.consume_statement(h, false)?);
 
            self.consume_whitespace(false)?;
 
        }
 
        self.consume_string(b"}")?;
 
        if statements.is_empty() {
 
            Ok(h.alloc_skip_statement(|this| SkipStatement { this, position, next: None }).upcast())
 
        } else {
 
            Ok(h.alloc_block_statement(|this| BlockStatement {
 
                this,
 
                position,
 
                statements,
 
                parent_scope: None,
 
                relative_pos_in_parent: 0,
 
                locals: Vec::new(),
 
                labels: Vec::new(),
 
            })
 
            .upcast())
 
        }
 
    }
 
    fn consume_local_statement(&mut self, h: &mut Heap) -> Result<LocalStatementId, ParseError2> {
 
        if self.has_keyword(b"channel") {
 
            Ok(self.consume_channel_statement(h)?.upcast())
 
        } else {
 
            Ok(self.consume_memory_statement(h)?.upcast())
 
        }
 
    }
 
    fn consume_channel_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ChannelStatementId, ParseError2> {
 
        // Consume channel statement and polymorphic argument if specified
 
        let position = self.source.pos();
 
        self.consume_keyword(b"channel")?;
 

	
 
        let poly_args = self.consume_polymorphic_args(h, true)?;
 
        let poly_arg_id = match poly_args.len() {
 
            0 => h.alloc_parser_type(|this| ParserType{
 
                this, pos: position.clone(), variant: ParserTypeVariant::Inferred,
 
            }),
 
            1 => poly_args[0],
 
            _ => return Err(ParseError2::new_error(
 
                &self.source, self.source.pos(),
 
                "port construction using 'channel' accepts up to 1 polymorphic argument"
 
            ))
 
        };
 
        self.consume_whitespace(true)?;
 

	
 
        // Consume the output port
 
        let out_parser_type = h.alloc_parser_type(|this| ParserType{
 
            this, pos: position.clone(), variant: ParserTypeVariant::Output(poly_arg_id)
 
        });
 
        let out_identifier = self.consume_identifier()?;
 

	
 
        // Consume the "->" syntax
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b"->")?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume the input port
 
        // TODO: Unsure about this, both ports refer to the same ParserType, is this ok?
 
        let in_parser_type = h.alloc_parser_type(|this| ParserType{
 
            this, pos: position.clone(), variant: ParserTypeVariant::Input(poly_arg_id)
 
        });
 
        let in_identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
        let out_port = h.alloc_local(|this| Local {
src/protocol/mod.rs
Show inline comments
 
@@ -110,200 +110,200 @@ impl ProtocolDescription {
 
    // expects port polarities to be correct
 
    pub(crate) fn new_component(&self, identifier: &[u8], ports: &[PortId]) -> ComponentState {
 
        let mut args = Vec::new();
 
        for (&x, y) in ports.iter().zip(self.component_polarities(identifier).unwrap()) {
 
            match y {
 
                Polarity::Getter => args.push(Value::Input(InputValue(x))),
 
                Polarity::Putter => args.push(Value::Output(OutputValue(x))),
 
            }
 
        }
 
        let h = &self.heap;
 
        let root = &h[self.root];
 
        let def = root.get_definition_ident(h, identifier).unwrap();
 
        ComponentState { prompt: Prompt::new(h, def, &args) }
 
    }
 
}
 
impl ComponentState {
 
    pub(crate) fn nonsync_run<'a: 'b, 'b>(
 
        &'a mut self,
 
        context: &'b mut NonsyncProtoContext<'b>,
 
        pd: &'a ProtocolDescription,
 
    ) -> NonsyncBlocker {
 
        let mut context = EvalContext::Nonsync(context);
 
        loop {
 
            let result = self.prompt.step(&pd.heap, &mut context);
 
            match result {
 
                // In component definitions, there are no return statements
 
                Ok(_) => unreachable!(),
 
                Err(cont) => match cont {
 
                    EvalContinuation::Stepping => continue,
 
                    EvalContinuation::Inconsistent => return NonsyncBlocker::Inconsistent,
 
                    EvalContinuation::Terminal => return NonsyncBlocker::ComponentExit,
 
                    EvalContinuation::SyncBlockStart => return NonsyncBlocker::SyncBlockStart,
 
                    // Not possible to end sync block if never entered one
 
                    EvalContinuation::SyncBlockEnd => unreachable!(),
 
                    EvalContinuation::NewComponent(definition_id, args) => {
 
                        // Look up definition (TODO for now, assume it is a definition)
 
                        let h = &pd.heap;
 
                        let init_state = ComponentState { prompt: Prompt::new(h, definition_id, &args) };
 
                        context.new_component(&args, init_state);
 
                        // Continue stepping
 
                        continue;
 
                    }
 
                    // Outside synchronous blocks, no fires/get/put happens
 
                    EvalContinuation::BlockFires(_) => unreachable!(),
 
                    EvalContinuation::BlockGet(_) => unreachable!(),
 
                    EvalContinuation::Put(_, _) => unreachable!(),
 
                },
 
            }
 
        }
 
    }
 

	
 
    pub(crate) fn sync_run<'a: 'b, 'b>(
 
        &'a mut self,
 
        context: &'b mut SyncProtoContext<'b>,
 
        pd: &'a ProtocolDescription,
 
    ) -> SyncBlocker {
 
        let mut context = EvalContext::Sync(context);
 
        loop {
 
            let result = self.prompt.step(&pd.heap, &mut context);
 
            match result {
 
                // Inside synchronous blocks, there are no return statements
 
                Ok(_) => unreachable!(),
 
                Err(cont) => match cont {
 
                    EvalContinuation::Stepping => continue,
 
                    EvalContinuation::Inconsistent => return SyncBlocker::Inconsistent,
 
                    // First need to exit synchronous block before definition may end
 
                    EvalContinuation::Terminal => unreachable!(),
 
                    // No nested synchronous blocks
 
                    EvalContinuation::SyncBlockStart => unreachable!(),
 
                    EvalContinuation::SyncBlockEnd => return SyncBlocker::SyncBlockEnd,
 
                    // Not possible to create component in sync block
 
                    EvalContinuation::NewComponent(_, _) => unreachable!(),
 
                    EvalContinuation::BlockFires(port) => match port {
 
                        Value::Output(OutputValue(port)) => {
 
                            return SyncBlocker::CouldntCheckFiring(port);
 
                        }
 
                        Value::Input(InputValue(port)) => {
 
                            return SyncBlocker::CouldntCheckFiring(port);
 
                        }
 
                        _ => unreachable!(),
 
                    },
 
                    EvalContinuation::BlockGet(port) => match port {
 
                        Value::Output(OutputValue(port)) => {
 
                            return SyncBlocker::CouldntReadMsg(port);
 
                        }
 
                        Value::Input(InputValue(port)) => {
 
                            return SyncBlocker::CouldntReadMsg(port);
 
                        }
 
                        _ => unreachable!(),
 
                    },
 
                    EvalContinuation::Put(port, message) => {
 
                        let value;
 
                        match port {
 
                            Value::Output(OutputValue(port_value)) => {
 
                                value = port_value;
 
                            }
 
                            Value::Input(InputValue(port_value)) => {
 
                                value = port_value;
 
                            }
 
                            _ => unreachable!(),
 
                        }
 
                        let payload;
 
                        match message {
 
                            Value::Message(MessageValue(None)) => {
 
                                // Putting a null message is inconsistent
 
                                return SyncBlocker::Inconsistent;
 
                            }
 
                            Value::Message(MessageValue(Some(buffer))) => {
 
                                // Create a copy of the payload
 
                                payload = buffer;
 
                            }
 
                            _ => unreachable!(),
 
                        }
 
                        return SyncBlocker::PutMsg(value, payload);
 
                    }
 
                },
 
            }
 
        }
 
    }
 
}
 
impl EvalContext<'_> {
 
    // fn random(&mut self) -> LongValue {
 
    //     match self {
 
    //         // EvalContext::None => unreachable!(),
 
    //         EvalContext::Nonsync(_context) => todo!(),
 
    //         EvalContext::Sync(_) => unreachable!(),
 
    //     }
 
    // }
 
    fn new_component(&mut self, args: &[Value], init_state: ComponentState) -> () {
 
        match self {
 
            // EvalContext::None => unreachable!(),
 
            EvalContext::Nonsync(context) => {
 
                let mut moved_ports = HashSet::new();
 
                for arg in args.iter() {
 
                    match arg {
 
                        Value::Output(OutputValue(port)) => {
 
                            moved_ports.insert(*port);
 
                        }
 
                        Value::Input(InputValue(port)) => {
 
                            moved_ports.insert(*port);
 
                        }
 
                        _ => {}
 
                    }
 
                }
 
                context.new_component(moved_ports, init_state)
 
            }
 
            EvalContext::Sync(_) => unreachable!(),
 
        }
 
    }
 
    fn new_channel(&mut self) -> [Value; 2] {
 
        match self {
 
            // EvalContext::None => unreachable!(),
 
            EvalContext::Nonsync(context) => {
 
                let [from, to] = context.new_port_pair();
 
                let from = Value::Output(OutputValue(from));
 
                let to = Value::Input(InputValue(to));
 
                return [from, to];
 
            }
 
            EvalContext::Sync(_) => unreachable!(),
 
        }
 
    }
 
    fn fires(&mut self, port: Value) -> Option<Value> {
 
        match self {
 
            // EvalContext::None => unreachable!(),
 
            EvalContext::Nonsync(_) => unreachable!(),
 
            EvalContext::Sync(context) => match port {
 
                Value::Output(OutputValue(port)) => context.is_firing(port).map(Value::from),
 
                Value::Input(InputValue(port)) => context.is_firing(port).map(Value::from),
 
                _ => unreachable!(),
 
            },
 
        }
 
    }
 
    fn get(&mut self, port: Value) -> Option<Value> {
 
        match self {
 
            // EvalContext::None => unreachable!(),
 
            EvalContext::Nonsync(_) => unreachable!(),
 
            EvalContext::Sync(context) => match port {
 
                Value::Output(OutputValue(port)) => {
 
                    context.read_msg(port).map(Value::receive_message)
 
                }
 
                Value::Input(InputValue(port)) => {
 
                    context.read_msg(port).map(Value::receive_message)
 
                }
 
                _ => unreachable!(),
 
            },
 
        }
 
    }
 
    fn did_put(&mut self, port: Value) -> bool {
 
        match self {
 
            EvalContext::Nonsync(_) => unreachable!("did_put in nonsync context"),
 
            EvalContext::Sync(context) => match port {
 
                Value::Output(OutputValue(port)) => {
 
                    context.is_firing(port).unwrap_or(false)
 
                    context.did_put_or_get(port)
 
                },
 
                Value::Input(_) => unreachable!("did_put on input port"),
 
                _ => unreachable!("did_put on non-port value")
 
            }
 
        }
 
    }
 
}
src/protocol/parser/mod.rs
Show inline comments
 
mod depth_visitor;
 
mod symbol_table;
 
// mod type_table_old;
 
mod type_table;
 
mod type_resolver;
 
mod visitor;
 
mod visitor_linker;
 
mod utils;
 

	
 
use depth_visitor::*;
 
use symbol_table::SymbolTable;
 
use visitor::Visitor2;
 
use visitor_linker::ValidityAndLinkerVisitor;
 
use type_resolver::{TypeResolvingVisitor, ResolveQueue};
 
use type_table::{TypeTable, TypeCtx};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::lexer::*;
 

	
 
use std::collections::HashMap;
 
use crate::protocol::ast_printer::ASTWriter;
 

	
 
// TODO: @fixme, pub qualifier
 
pub(crate) struct LexedModule {
 
    pub(crate) source: InputSource,
 
    module_name: Vec<u8>,
 
    version: Option<u64>,
 
    root_id: RootId,
 
}
 

	
 
pub struct Parser {
 
    pub(crate) heap: Heap,
 
    pub(crate) modules: Vec<LexedModule>,
 
    pub(crate) module_lookup: HashMap<Vec<u8>, usize>, // from (optional) module name to `modules` idx
 
}
 

	
 
impl Parser {
 
    pub fn new() -> Self {
 
        Parser{
 
            heap: Heap::new(),
 
            modules: Vec::new(),
 
            module_lookup: HashMap::new()
 
        }
 
    }
 

	
 
    // TODO: @fix, temporary implementation to keep code compilable
 
    pub fn new_with_source(source: InputSource) -> Result<Self, ParseError2> {
 
        let mut parser = Parser::new();
 
        parser.feed(source)?;
 
        Ok(parser)
 
    }
 

	
 
    pub fn feed(&mut self, mut source: InputSource) -> Result<RootId, ParseError2> {
 
        // Lex the input source
 
        let mut lex = Lexer::new(&mut source);
 
        let pd = lex.consume_protocol_description(&mut self.heap)?;
 

	
 
        // Seek the module name and version
 
        let root = &self.heap[pd];
 
        let mut module_name_pos = InputPosition::default();
 
        let mut module_name = Vec::new();
 
        let mut module_version_pos = InputPosition::default();
 
        let mut module_version = None;
 

	
 
        for pragma in &root.pragmas {
 
            match &self.heap[*pragma] {
 
                Pragma::Module(module) => {
 
                    if !module_name.is_empty() {
 
                        return Err(
 
                            ParseError2::new_error(&source, module.position, "Double definition of module name in the same file")
 
                                .with_postfixed_info(&source, module_name_pos, "Previous definition was here")
 
                        )
 
                    }
 

	
 
                    module_name_pos = module.position.clone();
 
                    module_name = module.value.clone();
 
                },
 
                Pragma::Version(version) => {
 
                    if module_version.is_some() {
 
                        return Err(
 
                            ParseError2::new_error(&source, version.position, "Double definition of module version")
 
                                .with_postfixed_info(&source, module_version_pos, "Previous definition was here")
 
                        )
 
                    }
 

	
 
                    module_version_pos = version.position.clone();
 
                    module_version = Some(version.version);
 
                },
 
            }
 
        }
 

	
 
        // Add module to list of modules and prevent naming conflicts
 
        let cur_module_idx = self.modules.len();
 
        if let Some(prev_module_idx) = self.module_lookup.get(&module_name) {
 
            // Find `#module` statement in other module again
 
            let prev_module = &self.modules[*prev_module_idx];
 
            let prev_module_pos = self.heap[prev_module.root_id].pragmas
 
                .iter()
 
                .find_map(|p| {
 
                    match &self.heap[*p] {
 
                        Pragma::Module(module) => Some(module.position.clone()),
 
                        _ => None
 
                    }
 
                })
 
                .unwrap_or(InputPosition::default());
 

	
 
            let module_name_msg = if module_name.is_empty() {
 
                format!("a nameless module")
 
            } else {
 
                format!("module '{}'", String::from_utf8_lossy(&module_name))
 
            };
 

	
 
            return Err(
 
                ParseError2::new_error(&source, module_name_pos, &format!("Double definition of {} across files", module_name_msg))
 
                    .with_postfixed_info(&prev_module.source, prev_module_pos, "Other definition was here")
 
            );
 
        }
 

	
 
        self.modules.push(LexedModule{
 
            source,
 
            module_name: module_name.clone(),
 
            version: module_version,
 
            root_id: pd
 
        });
 
        self.module_lookup.insert(module_name, cur_module_idx);
 
        Ok(pd)
 
    }
 

	
 
    pub fn compile(&mut self) {
 
        // Build module lookup
 
    }
 

	
 
    fn resolve_symbols_and_types(&mut self) -> Result<(SymbolTable, TypeTable), ParseError2> {
 
        // Construct the symbol table to resolve any imports and/or definitions,
 
        // then use the symbol table to actually annotate all of the imports.
 
        // If the type table is constructed correctly then all imports MUST be
 
        // resolvable.
 
        // TODO: Update once namespaced identifiers are implemented
 
        let symbol_table = SymbolTable::new(&self.heap, &self.modules)?;
 

	
 
        // Not pretty, but we need to work around rust's borrowing rules, it is
 
        // totally safe to mutate the contents of an AST element that we are
 
        // not borrowing anywhere else.
 
        // TODO: Maybe directly access heap's members to allow borrowing from
 
        //  mutliple members of Heap? Not pretty though...
 
        let mut module_index = 0;
 
        let mut import_index = 0;
 
        loop {
 
            if module_index >= self.modules.len() {
 
                break;
 
            }
 

	
 
            let module_root_id = self.modules[module_index].root_id;
 
            let import_id = {
 
                let root = &self.heap[module_root_id];
 
                if import_index >= root.imports.len() {
 
                    module_index += 1;
 
                    import_index = 0;
 
                    continue
 
                }
 
                root.imports[import_index]
 
            };
 

	
 
            let import = &mut self.heap[import_id];
 
            match import {
 
                Import::Module(import) => {
 
                    debug_assert!(import.module_id.is_none(), "module import already resolved");
 
                    let target_module_id = symbol_table.resolve_module(&import.module_name)
 
                        .expect("module import is resolved by symbol table");
 
                    import.module_id = Some(target_module_id)
 
                },
 
                Import::Symbols(import) => {
 
                    debug_assert!(import.module_id.is_none(), "module of symbol import already resolved");
 
                    let target_module_id = symbol_table.resolve_module(&import.module_name)
 
                        .expect("symbol import's module is resolved by symbol table");
 
                    import.module_id = Some(target_module_id);
 

	
 
                    for symbol in &mut import.symbols {
 
                        debug_assert!(symbol.definition_id.is_none(), "symbol import already resolved");
 
                        let (_, target_definition_id) = symbol_table.resolve_symbol(module_root_id, &symbol.alias)
 
                            .expect("symbol import is resolved by symbol table")
 
                            .as_definition()
 
                            .expect("symbol import does not resolve to namespace symbol");
 
                        symbol.definition_id = Some(target_definition_id);
 
                    }
 
                }
 
            }
 
        }
 

	
 
        // All imports in the AST are now annotated. We now use the symbol table
 
        // to construct the type table.
 
        let mut type_ctx = TypeCtx::new(&symbol_table, &mut self.heap, &self.modules);
 
        let type_table = TypeTable::new(&mut type_ctx)?;
 

	
 
        Ok((symbol_table, type_table))
 
    }
 

	
 
    // TODO: @fix, temporary impl to keep code compilable
 
    pub fn parse(&mut self) -> Result<RootId, ParseError2> {
 
        assert_eq!(self.modules.len(), 1, "Fix meeeee");
 
        let root_id = self.modules[0].root_id;
 

	
 
        let (mut symbol_table, mut type_table) = self.resolve_symbols_and_types()?;
 

	
 
        // TODO: @cleanup
 
        let mut ctx = visitor::Ctx{
 
            heap: &mut self.heap,
 
            module: &self.modules[0],
 
            symbols: &mut symbol_table,
 
            types: &mut type_table,
 
        };
 
        let mut visit = ValidityAndLinkerVisitor::new();
 
        visit.visit_module(&mut ctx)?;
 
        let mut type_visit = TypeResolvingVisitor::new();
 
        let mut queue = ResolveQueue::new();
 
        TypeResolvingVisitor::queue_module_definitions(&ctx, &mut queue);
 
        while !queue.is_empty() {
 
            let top = queue.pop().unwrap();
 
            println!("Resolving root={}, def={}, mono={:?}", top.root_id.index, top.definition_id.index, top.monomorph_types);
 
            type_visit.handle_module_definition(&mut ctx, &mut queue, top)?;
 
        }
 

	
 
        if let Err((position, message)) = Self::parse_inner(&mut self.heap, root_id) {
 
            return Err(ParseError2::new_error(&self.modules[0].source, position, &message))
 
        }
 

	
 
        let mut writer = ASTWriter::new();
 
        let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap();
 
        writer.write_ast(&mut file, &self.heap);
 

	
 
        Ok(root_id)
 
    }
 

	
 
    pub fn parse_inner(h: &mut Heap, pd: RootId) -> VisitorResult {
 
        // TODO: @cleanup, slowly phasing out old compiler
 
        // NestedSynchronousStatements::new().visit_protocol_description(h, pd)?;
 
        // ChannelStatementOccurrences::new().visit_protocol_description(h, pd)?;
 
        // FunctionStatementReturns::new().visit_protocol_description(h, pd)?;
 
        // ComponentStatementReturnNew::new().visit_protocol_description(h, pd)?;
 
        // CheckBuiltinOccurrences::new().visit_protocol_description(h, pd)?;
 
        // BuildSymbolDeclarations::new().visit_protocol_description(h, pd)?;
 
        // LinkCallExpressions::new().visit_protocol_description(h, pd)?;
 
        // BuildScope::new().visit_protocol_description(h, pd)?;
 
        // ResolveVariables::new().visit_protocol_description(h, pd)?;
 
        LinkStatements::new().visit_protocol_description(h, pd)?;
 
        // BuildLabels::new().visit_protocol_description(h, pd)?;
 
        // ResolveLabels::new().visit_protocol_description(h, pd)?;
 
        AssignableExpressions::new().visit_protocol_description(h, pd)?;
 
        IndexableExpressions::new().visit_protocol_description(h, pd)?;
 
        SelectableExpressions::new().visit_protocol_description(h, pd)?;
 

	
 
        Ok(())
 
    }
 
}
 

	
 
#[cfg(test)]
 
mod tests {
 
    use std::fs::File;
 
    use std::io::Read;
 
    use std::path::Path;
 

	
 
    use super::*;
 

	
 
    // #[test]
 
    fn positive_tests() {
 
        for resource in TestFileIter::new("testdata/parser/positive", "pdl") {
 
            let resource = resource.expect("read testdata filepath");
 
            // println!(" * running: {}", &resource);
 
            let path = Path::new(&resource);
 
            let source = InputSource::from_file(&path).unwrap();
 
            // println!("DEBUG -- input:\n{}", String::from_utf8_lossy(&source.input));
 
            let mut parser = Parser::new_with_source(source).expect("parse source");
 
            match parser.parse() {
 
                Ok(_) => {}
 
                Err(err) => {
 
                    println!(" > file: {}", &resource);
 
                    println!("{}", err);
 
                    assert!(false);
 
                }
 
            }
 
        }
 
    }
 

	
 
    // #[test]
 
    fn negative_tests() {
 
        for resource in TestFileIter::new("testdata/parser/negative", "pdl") {
 
            let resource = resource.expect("read testdata filepath");
 
            let path = Path::new(&resource);
 
            let expect = path.with_extension("txt");
 
            let mut source = InputSource::from_file(&path).unwrap();
 
            let mut parser = Parser::new_with_source(source).expect("construct parser");
 
            match parser.parse() {
 
                Ok(pd) => {
 
                    println!("Expected parse error:");
 

	
 
                    let mut cev: Vec<u8> = Vec::new();
 
                    let mut f = File::open(expect).unwrap();
 
                    f.read_to_end(&mut cev).unwrap();
 
                    println!("{}", String::from_utf8_lossy(&cev));
 
                    assert!(false);
 
                }
 
                Err(err) => {
 
                    let expected = format!("{}", err);
 
                    println!("{}", &expected);
 

	
 
                    let mut cev: Vec<u8> = Vec::new();
 
                    let mut f = File::open(expect).unwrap();
 
                    f.read_to_end(&mut cev).unwrap();
 
                    println!("{}", String::from_utf8_lossy(&cev));
 

	
 
                    assert_eq!(expected.as_bytes(), cev);
 
                }
 
            }
 
        }
 
    }
 

	
 
    // #[test]
 
    fn counterexample_tests() {
 
        for resource in TestFileIter::new("testdata/parser/counterexamples", "pdl") {
 
            let resource = resource.expect("read testdata filepath");
 
            let path = Path::new(&resource);
 
            let source = InputSource::from_file(&path).unwrap();
 
            let mut parser = Parser::new_with_source(source).expect("construct parser");
 

	
 
            fn print_header(s: &str) {
 
                println!("{}", "=".repeat(80));
 
                println!(" > File: {}", s);
 
                println!("{}", "=".repeat(80));
 
            }
 

	
 
            match parser.parse() {
 
                Ok(parsed) => {
 
                    print_header(&resource);
 
                    println!("\n  SUCCESS\n\n --- source:\n{}", String::from_utf8_lossy(&parser.modules[0].source.input));
 
                },
 
                Err(err) => {
 
                    print_header(&resource);
 
                    println!(
 
                        "\n  FAILURE\n\n --- error:\n{}\n --- source:\n{}",
 
                        err,
 
                        String::from_utf8_lossy(&parser.modules[0].source.input)
 
                    )
 
                }
 
            }
 
        }
 
    }
 

	
 
    struct TestFileIter {
 
        iter: std::fs::ReadDir,
 
        root: String,
 
        extension: String
 
    }
 

	
 
    impl TestFileIter {
 
        fn new(root_dir: &str, extension: &str) -> Self {
 
            let path = Path::new(root_dir);
 
            assert!(path.is_dir(), "root '{}' is not a directory", root_dir);
 

	
 
            let iter = std::fs::read_dir(path).expect("list dir contents");
 

	
 
            Self {
 
                iter,
 
                root: root_dir.to_string(),
 
                extension: extension.to_string(),
 
            }
 
        }
 
    }
 

	
 
    impl Iterator for TestFileIter {
 
        type Item = Result<String, String>;
 

	
 
        fn next(&mut self) -> Option<Self::Item> {
 
            while let Some(entry) = self.iter.next() {
 
                if let Err(e) = entry {
 
                    return Some(Err(format!("failed to read dir entry, because: {}", e)));
 
                }
 
                let entry = entry.unwrap();
 

	
 
                let path = entry.path();
 
                if !path.is_file() { continue; }
 

	
 
                let extension = path.extension();
 
                if extension.is_none() { continue; }
 
                let extension = extension.unwrap().to_string_lossy();
 
                if extension != self.extension { continue; }
 

	
 
                return Some(Ok(path.to_string_lossy().to_string()));
 
            }
 

	
 
            None
 
        }
 
    }
 
}
src/protocol/parser/type_resolver.rs
Show inline comments
 
/// type_resolver.rs
 
///
 
/// Performs type inference and type checking
 
///
 
/// TODO: Needs an optimization pass
 
/// TODO: Needs a cleanup pass
 

	
 
use std::collections::{HashMap, HashSet, VecDeque};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use super::type_table::*;
 
use super::symbol_table::*;
 
use super::visitor::{
 
    STMT_BUFFER_INIT_CAPACITY,
 
    EXPR_BUFFER_INIT_CAPACITY,
 
    Ctx,
 
    Visitor2,
 
    VisitorResult
 
};
 
use std::collections::hash_map::Entry;
 
use crate::protocol::parser::type_resolver::InferenceTypePart::IntegerLike;
 

	
 
const MESSAGE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Message ];
 
const BOOL_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Bool ];
 
const NUMBERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::NumberLike ];
 
const INTEGERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::IntegerLike ];
 
const ARRAY_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Array, InferenceTypePart::Unknown ];
 
const ARRAYLIKE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::ArrayLike, InferenceTypePart::Unknown ];
 
const PORTLIKE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::PortLike, InferenceTypePart::Unknown ];
 

	
 
/// TODO: @performance Turn into PartialOrd+Ord to simplify checks
 
#[derive(Debug, Clone, Eq, PartialEq)]
 
pub(crate) enum InferenceTypePart {
 
    // A marker with an identifier which we can use to seek subsections of the 
 
    // inferred type
 
    Marker(usize),
 
    // Completely unknown type, needs to be inferred
 
    Unknown,
 
    // Partially known type, may be inferred to to be the appropriate related 
 
    // type.
 
    // IndexLike,      // index into array/slice
 
    NumberLike,     // any kind of integer/float
 
    IntegerLike,    // any kind of integer
 
    ArrayLike,      // array or slice. Note that this must have a subtype
 
    PortLike,       // input or output port
 
    // Special types that cannot be instantiated by the user
 
    Void, // For builtin functions that do not return anything
 
    // Concrete types without subtypes
 
    Message,
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    // One subtype
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // A user-defined type with any number of subtypes
 
    Instance(DefinitionId, usize)
 
}
 

	
 
impl InferenceTypePart {
 
    fn is_marker(&self) -> bool {
 
        if let InferenceTypePart::Marker(_) = self { true } else { false }
 
    }
 

	
 
    /// Checks if the type is concrete, markers are interpreted as concrete
 
    /// types.
 
    fn is_concrete(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | 
 
            ITP::ArrayLike | ITP::PortLike => false,
 
            _ => true
 
        }
 
    }
 

	
 
    fn is_concrete_number(&self) -> bool {
 
        // TODO: @float
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_integer(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_array_or_slice(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Array | ITP::Slice => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    fn is_concrete_port(&self) -> bool {
 
        use InferenceTypePart as ITP;
 
        match self {
 
            ITP::Input | ITP::Output => true,
 
            _ => false,
 
        }
 
    }
 

	
 
    /// Checks if a part is less specific than the argument. Only checks for 
 
    /// single-part inference (i.e. not the replacement of an `Unknown` variant 
 
    /// with the argument)
 
    fn may_be_inferred_from(&self, arg: &InferenceTypePart) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        (*self == ITP::IntegerLike && arg.is_concrete_integer()) ||
 
        (*self == ITP::NumberLike && (arg.is_concrete_number() || *arg == ITP::IntegerLike)) ||
 
        (*self == ITP::ArrayLike && arg.is_concrete_array_or_slice()) ||
 
        (*self == ITP::PortLike && arg.is_concrete_port())
 
    }
 

	
 
    /// Returns the change in "iteration depth" when traversing this particular
 
    /// part. The iteration depth is used to traverse the tree in a linear 
 
    /// fashion. It is basically `number_of_subtypes - 1`
 
    fn depth_change(&self) -> i32 {
 
        use InferenceTypePart as ITP;
 
        match &self {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike |
 
            ITP::Void | ITP::Message | ITP::Bool | 
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long | 
 
            ITP::String => {
 
                -1
 
            },
 
            ITP::Marker(_) | ITP::ArrayLike | ITP::Array | ITP::Slice | 
 
            ITP::PortLike | ITP::Input | ITP::Output => {
 
                // One subtype, so do not modify depth
 
                0
 
            },
 
            ITP::Instance(_, num_args) => {
 
                (*num_args as i32) - 1
 
            }
 
        }
 
    }
 
}
 

	
 
impl From<ConcreteTypeVariant> for InferenceTypePart {
 
    fn from(v: ConcreteTypeVariant) -> InferenceTypePart {
 
        use ConcreteTypeVariant as CTV;
 
impl From<ConcreteTypePart> for InferenceTypePart {
 
    fn from(v: ConcreteTypePart) -> InferenceTypePart {
 
        use ConcreteTypePart as CTP;
 
        use InferenceTypePart as ITP;
 

	
 
        match v {
 
            CTV::Message => ITP::Message,
 
            CTV::Bool => ITP::Bool,
 
            CTV::Byte => ITP::Byte,
 
            CTV::Short => ITP::Short,
 
            CTV::Int => ITP::Int,
 
            CTV::Long => ITP::Long,
 
            CTV::String => ITP::String,
 
            CTV::Array => ITP::Array,
 
            CTV::Slice => ITP::Slice,
 
            CTV::Input => ITP::Input,
 
            CTV::Output => ITP::Output,
 
            CTV::Instance(id, num) => ITP::Instance(id, num),
 
            CTP::Void => ITP::Void,
 
            CTP::Message => ITP::Message,
 
            CTP::Bool => ITP::Bool,
 
            CTP::Byte => ITP::Byte,
 
            CTP::Short => ITP::Short,
 
            CTP::Int => ITP::Int,
 
            CTP::Long => ITP::Long,
 
            CTP::String => ITP::String,
 
            CTP::Array => ITP::Array,
 
            CTP::Slice => ITP::Slice,
 
            CTP::Input => ITP::Input,
 
            CTP::Output => ITP::Output,
 
            CTP::Instance(id, num) => ITP::Instance(id, num),
 
        }
 
    }
 
}
 

	
 
#[derive(Debug)]
 
struct InferenceType {
 
    has_marker: bool,
 
    is_done: bool,
 
    parts: Vec<InferenceTypePart>,
 
}
 

	
 
impl InferenceType {
 
    fn new(has_marker: bool, is_done: bool, parts: Vec<InferenceTypePart>) -> Self {
 
        if cfg!(debug_assertions) {
 
            debug_assert!(!parts.is_empty());
 
            if !has_marker {
 
                debug_assert!(parts.iter().all(|v| !v.is_marker()));
 
            }
 
            if is_done {
 
                debug_assert!(parts.iter().all(|v| v.is_concrete()));
 
            }
 
        }
 
        Self{ has_marker, is_done, parts }
 
    }
 

	
 
    fn replace_subtree(&mut self, start_idx: usize, with: &[InferenceTypePart]) {
 
         let end_idx = Self::find_subtree_end_idx(&self.parts, start_idx);
 
        debug_assert_eq!(with.len(), Self::find_subtree_end_idx(with, 0));
 
        self.parts.splice(start_idx..end_idx, with.iter().cloned());
 
        self.recompute_is_done();
 
    }
 

	
 
    // TODO: @performance, might all be done inline in the type inference methods
 
    fn recompute_is_done(&mut self) {
 
        self.is_done = self.parts.iter().all(|v| v.is_concrete());
 
    }
 

	
 
    /// Checks if type is, or may be inferred as, a number
 
    // TODO: @float
 
    fn might_be_number(&self) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        // TODO: @marker?
 
        if self.parts.len() != 1 { return false; }
 
        match self.parts[0] {
 
            ITP::Unknown | ITP::NumberLike | ITP::IntegerLike |
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long =>
 
                true,
 
            _ =>
 
                false,
 
        }
 
    }
 

	
 
    /// Checks if type is, or may be inferred as, an integer
 
    fn might_be_integer(&self) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        // TODO: @marker?
 
        if self.parts.len() != 1 { return false; }
 
        match self.parts[0] {
 
            ITP::Unknown | ITP::IntegerLike |
 
            ITP::Byte | ITP::Short | ITP::Int | ITP::Long =>
 
                true,
 
            _ =>
 
                false,
 
        }
 
    }
 

	
 
    /// Checks if type is, or may be inferred as, a boolean
 
    fn might_be_boolean(&self) -> bool {
 
        use InferenceTypePart as ITP;
 

	
 
        // TODO: @marker?
 
        if self.parts.len() != 1 { return false; }
 
        match self.parts[0] {
 
            ITP::Unknown | ITP::Bool => true,
 
            _ => false
 
        }
 
    }
 

	
 
    /// Returns an iterator over all markers and the partial type tree that
 
    /// follows those markers.
 
    fn marker_iter(&self) -> InferenceTypeMarkerIter {
 
        InferenceTypeMarkerIter::new(&self.parts)
 
    }
 

	
 
    /// Attempts to find a marker with a specific value appearing at or after
 
    /// the specified index. If found then the partial type tree's bounding
 
    /// indices that follow that marker are returned.
 
    fn find_subtree_idx_for_marker(&self, marker: usize, mut idx: usize) -> Option<(usize, usize)> {
 
        // Seek ahead to find a marker
 
        let marker = InferenceTypePart::Marker(marker);
 
        while idx < self.parts.len() {
 
            if marker == self.parts[idx] {
 
                // Found the marker
 
                let start_idx = idx + 1;
 
                let end_idx = Self::find_subtree_end_idx(&self.parts, start_idx);
 
                return Some((start_idx, end_idx))
 
            }
 

	
 
            idx += 1;
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Given that the `parts` are a depth-first serialized tree of types, this
 
    /// function finds the subtree anchored at a specific node. The returned 
 
    /// index is exclusive.
 
    fn find_subtree_end_idx(parts: &[InferenceTypePart], start_idx: usize) -> usize {
 
        let mut depth = 1;
 
        let mut idx = start_idx;
 

	
 
        while idx < parts.len() {
 
            depth += parts[idx].depth_change();
 
            if depth == 0 {
 
                return idx + 1;
 
            }
 
            idx += 1;
 
        }
 

	
 
        // If here, then the inference type is malformed
 
        unreachable!();
 
    }
 

	
 
    /// Call that attempts to infer the part at `to_infer.parts[to_infer_idx]` 
 
    /// using the subtree at `template.parts[template_idx]`. Will return 
 
    /// `Some(depth_change_due_to_traversal)` if type inference has been 
 
    /// applied. In this case the indices will also be modified to point to the 
 
    /// next part in both templates. If type inference has not (or: could not) 
 
    /// be applied then `None` will be returned. Note that this might mean that 
 
    /// the types are incompatible.
 
    ///
 
    /// As this is a helper functions, some assumptions: the parts are not 
 
    /// exactly equal, and neither of them contains a marker. Also: only the
 
    /// `to_infer` parts are checked for inference. It might be that this 
 
    /// function returns `None`, but that that `template` is still compatible
 
    /// with `to_infer`, e.g. when `template` has an `Unknown` part.
 
    fn infer_part_for_single_type(
 
        to_infer: &mut InferenceType, to_infer_idx: &mut usize,
 
        template_parts: &[InferenceTypePart], template_idx: &mut usize,
 
    ) -> Option<i32> {
 
        use InferenceTypePart as ITP;
 

	
 
        let to_infer_part = &to_infer.parts[*to_infer_idx];
 
        let template_part = &template_parts[*template_idx];
 

	
 
        // Check for programmer mistakes
 
        debug_assert_ne!(to_infer_part, template_part);
 
        debug_assert!(!to_infer_part.is_marker(), "marker encountered in 'infer part'");
 
        debug_assert!(!template_part.is_marker(), "marker encountered in 'template part'");
 

	
 
        // Inference of a somewhat-specified type
 
        if to_infer_part.may_be_inferred_from(template_part) {
 
            let depth_change = to_infer_part.depth_change();
 
            debug_assert_eq!(depth_change, template_part.depth_change());
 
            to_infer.parts[*to_infer_idx] = template_part.clone();
 
            *to_infer_idx += 1;
 
            *template_idx += 1;
 
            return Some(depth_change);
 
        }
 

	
 
        // Inference of a completely unknown type
 
        if *to_infer_part == ITP::Unknown {
 
            // template part is different, so cannot be unknown, hence copy the
 
            // entire subtree
 
            let template_end_idx = Self::find_subtree_end_idx(template_parts, *template_idx);
 
            to_infer.parts[*to_infer_idx] = template_part.clone();
 
            *to_infer_idx += 1;
 
            for insert_idx in (*template_idx + 1)..template_end_idx {
 
                to_infer.parts.insert(*to_infer_idx, template_parts[insert_idx].clone());
 
                *to_infer_idx += 1;
 
            }
 
            *template_idx = template_end_idx;
 

	
 
            // Note: by definition the LHS was Unknown and the RHS traversed a 
 
            // full subtree.
 
            return Some(-1);
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Call that checks if the `to_check` part is compatible with the `infer`
 
    /// part. This essentially implements `infer_part_for_single_type` but skips
 
    /// over the matching parts.
 
    fn check_part_for_single_type(
 
        to_check_parts: &[InferenceTypePart], to_check_idx: &mut usize,
 
        template_parts: &[InferenceTypePart], template_idx: &mut usize
 
    ) -> Option<i32> {
 
        use InferenceTypePart as ITP;
 

	
 
        let to_check_part = &to_check_parts[*to_check_idx];
 
        let template_part = &template_parts[*template_idx];
 

	
 
        // Checking programmer errors
 
        debug_assert_ne!(to_check_part, template_part);
 
        debug_assert!(!to_check_part.is_marker(), "marker encountered in 'to_check part'");
 
        debug_assert!(!template_part.is_marker(), "marker encountered in 'template part'");
 

	
 
        if to_check_part.may_be_inferred_from(template_part) {
 
            let depth_change = to_check_part.depth_change();
 
            debug_assert_eq!(depth_change, template_part.depth_change());
 
            *to_check_idx += 1;
 
            *template_idx += 1;
 
            return Some(depth_change);
 
        }
 

	
 
        if *to_check_part == ITP::Unknown {
 
            *to_check_idx += 1;
 
            *template_idx = Self::find_subtree_end_idx(template_parts, *template_idx);
 

	
 
            // By definition LHS and RHS had depth change of -1
 
            return Some(-1);
 
        }
 

	
 
        None
 
    }
 

	
 
    /// Attempts to infer types between two `InferenceType` instances. This 
 
    /// function is unsafe as it accepts pointers to work around Rust's 
 
    /// borrowing rules. The caller must ensure that the pointers are distinct.
 
    unsafe fn infer_subtrees_for_both_types(
 
        type_a: *mut InferenceType, start_idx_a: usize,
 
        type_b: *mut InferenceType, start_idx_b: usize
 
    ) -> DualInferenceResult {
 
        use InferenceTypePart as ITP;
 

	
 
        debug_assert!(!std::ptr::eq(type_a, type_b), "same inference types");
 
        let type_a = &mut *type_a;
 
        let type_b = &mut *type_b;
 

	
 
        let mut modified_a = false;
 
        let mut modified_b = false;
 
        let mut idx_a = start_idx_a;
 
        let mut idx_b = start_idx_b;
 
        let mut depth = 1;
 

	
 
        while depth > 0 {
 
            // Advance indices if we encounter markers or equal parts
 
            let part_a = &type_a.parts[idx_a];
 
            let part_b = &type_b.parts[idx_b];
 
            
 
            if part_a == part_b {
 
                depth += part_a.depth_change();
 
                debug_assert_eq!(depth, part_b.depth_change());
 
                let depth_change = part_a.depth_change();
 
                depth += depth_change;
 
                debug_assert_eq!(depth_change, part_b.depth_change());
 
                idx_a += 1;
 
                idx_b += 1;
 
                continue;
 
            }
 
            if let ITP::Marker(_) = part_a { idx_a += 1; continue; }
 
            if let ITP::Marker(_) = part_b { idx_b += 1; continue; }
 

	
 
            // Types are not equal and are both not markers
 
            if let Some(depth_change) = Self::infer_part_for_single_type(type_a, &mut idx_a, &type_b.parts, &mut idx_b) {
 
                depth += depth_change;
 
                modified_a = true;
 
                continue;
 
            }
 
            if let Some(depth_change) = Self::infer_part_for_single_type(type_b, &mut idx_b, &type_a.parts, &mut idx_a) {
 
                depth += depth_change;
 
                modified_b = true;
 
                continue;
 
            }
 

	
 
            // And can also not be inferred in any way: types must be incompatible
 
            return DualInferenceResult::Incompatible;
 
        }
 

	
 
        if modified_a { type_a.recompute_is_done(); }
 
        if modified_b { type_b.recompute_is_done(); }
 

	
 
        // If here then we completely inferred the subtrees.
 
        match (modified_a, modified_b) {
 
            (false, false) => DualInferenceResult::Neither,
 
            (false, true) => DualInferenceResult::Second,
 
            (true, false) => DualInferenceResult::First,
 
            (true, true) => DualInferenceResult::Both
 
        }
 
    }
 

	
 
    /// Attempts to infer the first subtree based on the template. Like
 
    /// `infer_subtrees_for_both_types`, but now only applying inference to
 
    /// `to_infer` based on the type information in `template`.
 
    /// Secondary use is to make sure that a type follows a certain template.
 
    fn infer_subtree_for_single_type(
 
        to_infer: &mut InferenceType, mut to_infer_idx: usize,
 
        template: &[InferenceTypePart], mut template_idx: usize,
 
    ) -> SingleInferenceResult {
 
        let mut modified = false;
 
        let mut depth = 1;
 

	
 
        while depth > 0 {
 
            let to_infer_part = &to_infer.parts[to_infer_idx];
 
            let template_part = &template[template_idx];
 

	
 
            if to_infer_part == template_part {
 
                depth += to_infer_part.depth_change();
 
                debug_assert_eq!(depth, template_part.depth_change());
 
                let depth_change = to_infer_part.depth_change();
 
                depth += depth_change;
 
                debug_assert_eq!(depth_change, template_part.depth_change());
 
                to_infer_idx += 1;
 
                template_idx += 1;
 
                continue;
 
            }
 
            if to_infer_part.is_marker() { to_infer_idx += 1; continue; }
 
            if template_part.is_marker() { template_idx += 1; continue; }
 

	
 
            // Types are not equal and not markers. So check if we can infer 
 
            // anything
 
            if let Some(depth_change) = Self::infer_part_for_single_type(
 
                to_infer, &mut to_infer_idx, template, &mut template_idx
 
            ) {
 
                depth += depth_change;
 
                modified = true;
 
                continue;
 
            }
 

	
 
            // We cannot infer anything, but the template may still be 
 
            // compatible with the type we're inferring
 
            if let Some(depth_change) = Self::check_part_for_single_type(
 
                template, &mut template_idx, &to_infer.parts, &mut to_infer_idx
 
            ) {
 
                depth += depth_change;
 
                continue;
 
            }
 

	
 
            return SingleInferenceResult::Incompatible
 
        }
 

	
 
        return if modified {
 
            to_infer.recompute_is_done();
 
            SingleInferenceResult::Modified
 
        } else {
 
            SingleInferenceResult::Unmodified
 
        }
 
    }
 

	
 
    /// Checks if both types are compatible, doesn't perform any inference
 
    fn check_subtrees(
 
        type_parts_a: &[InferenceTypePart], start_idx_a: usize,
 
        type_parts_b: &[InferenceTypePart], start_idx_b: usize
 
    ) -> bool {
 
        let mut depth = 1;
 
        let mut idx_a = start_idx_a;
 
        let mut idx_b = start_idx_b;
 

	
 
        while depth > 0 {
 
            let part_a = &type_parts_a[idx_a];
 
            let part_b = &type_parts_b[idx_b];
 

	
 
            if part_a == part_b {
 
                depth += part_a.depth_change();
 
                debug_assert_eq!(depth, part_b.depth_change());
 
                let depth_change = part_a.depth_change();
 
                depth += depth_change;
 
                debug_assert_eq!(depth_change, part_b.depth_change());
 
                idx_a += 1;
 
                idx_b += 1;
 
                continue;
 
            }
 
            
 
            if part_a.is_marker() { idx_a += 1; continue; }
 
            if part_b.is_marker() { idx_b += 1; continue; }
 

	
 
            if let Some(depth_change) = Self::check_part_for_single_type(
 
                type_parts_a, &mut idx_a, type_parts_b, &mut idx_b
 
            ) {
 
                depth += depth_change;
 
                continue;
 
            }
 
            if let Some(depth_change) = Self::check_part_for_single_type(
 
                type_parts_b, &mut idx_b, type_parts_a, &mut idx_a
 
            ) {
 
                depth += depth_change;
 
                continue;
 
            }
 

	
 
            return false;
 
        }
 

	
 
        true
 
    }
 

	
 
    /// Returns a human-readable version of the type. Only use for debugging
 
    /// or returning errors (since it allocates a string).
 
    /// Performs the conversion of the inference type into a concrete type.
 
    /// By calling this function you must make sure that no unspecified types
 
    /// (e.g. Unknown or IntegerLike) exist in the type.
 
    fn write_concrete_type(&self, concrete_type: &mut ConcreteType) {
 
        use InferenceTypePart as ITP;
 
        use ConcreteTypePart as CTP;
 

	
 
        debug_assert!(concrete_type.parts.is_empty());
 
        concrete_type.parts.reserve(self.parts.len());
 

	
 
        for part in &self.parts {
 
            let converted_part = match part {
 
                ITP::Marker(_) => { continue; },
 
                ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | ITP::ArrayLike | ITP::PortLike => {
 
                    debug_assert!(false, "Attempted to convert inference type part {:?} into concrete type", part);
 
                    unreachable!();
 
                },
 
                ITP::Void => CTP::Void,
 
                ITP::Message => CTP::Message,
 
                ITP::Bool => CTP::Bool,
 
                ITP::Byte => CTP::Byte,
 
                ITP::Short => CTP::Short,
 
                ITP::Int => CTP::Int,
 
                ITP::Long => CTP::Long,
 
                ITP::String => CTP::String,
 
                ITP::Array => CTP::Array,
 
                ITP::Slice => CTP::Slice,
 
                ITP::Input => CTP::Input,
 
                ITP::Output => CTP::Output,
 
                ITP::Instance(id, num) => CTP::Instance(*id, *num),
 
            };
 
            concrete_type.parts.push(converted_part);
 
        }
 
    }
 

	
 
    /// Writes a human-readable version of the type to a string. Mostly a
 
    /// function for interior use.
 
    fn write_display_name(
 
        buffer: &mut String, heap: &Heap, parts: &[InferenceTypePart], mut idx: usize
 
    ) -> usize {
 
        use InferenceTypePart as ITP;
 

	
 
        match &parts[idx] {
 
            ITP::Marker(_) => {},
 
            ITP::Marker(_) => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1)
 
            },
 
            ITP::Unknown => buffer.push_str("?"),
 
            ITP::NumberLike => buffer.push_str("num?"),
 
            ITP::IntegerLike => buffer.push_str("int?"),
 
            ITP::ArrayLike => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[?]");
 
            },
 
            ITP::PortLike => {
 
                buffer.push_str("port?<");
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            }
 
            ITP::Void => buffer.push_str("void"),
 
            ITP::Message => buffer.push_str("msg"),
 
            ITP::Bool => buffer.push_str("bool"),
 
            ITP::Byte => buffer.push_str("byte"),
 
            ITP::Short => buffer.push_str("short"),
 
            ITP::Int => buffer.push_str("int"),
 
            ITP::Long => buffer.push_str("long"),
 
            ITP::String => buffer.push_str("str"),
 
            ITP::Array => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[]");
 
            },
 
            ITP::Slice => {
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push_str("[..]");
 
            },
 
            ITP::Input => {
 
                buffer.push_str("in<");
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Output => {
 
                buffer.push_str("out<");
 
                idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                buffer.push('>');
 
            },
 
            ITP::Instance(definition_id, num_sub) => {
 
                let definition = &heap[*definition_id];
 
                buffer.push_str(&String::from_utf8_lossy(&definition.identifier().value));
 
                if *num_sub > 0 {
 
                    buffer.push('<');
 
                    idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    for _sub_idx in 1..*num_sub {
 
                        buffer.push_str(", ");
 
                        idx = Self::write_display_name(buffer, heap, parts, idx + 1);
 
                    }
 
                    buffer.push('>');
 
                }
 
            },
 
        }
 

	
 
        idx
 
    }
 

	
 
    /// Returns the display name of a (part of) the type tree. Will allocate a
 
    /// string.
 
    fn partial_display_name(heap: &Heap, parts: &[InferenceTypePart]) -> String {
 
        let mut buffer = String::with_capacity(parts.len() * 6);
 
        Self::write_display_name(&mut buffer, heap, parts, 0);
 
        buffer
 
    }
 

	
 
    /// Returns the display name of the full type tree. Will allocate a string.
 
    fn display_name(&self, heap: &Heap) -> String {
 
        Self::partial_display_name(heap, &self.parts)
 
    }
 
}
 

	
 
/// Iterator over the subtrees that follow a marker in an `InferenceType`
 
/// instance. Returns immutable slices over the internal parts
 
struct InferenceTypeMarkerIter<'a> {
 
    parts: &'a [InferenceTypePart],
 
    idx: usize,
 
}
 

	
 
impl<'a> InferenceTypeMarkerIter<'a> {
 
    fn new(parts: &'a [InferenceTypePart]) -> Self {
 
        Self{ parts, idx: 0 }
 
    }
 
}
 

	
 
impl<'a> Iterator for InferenceTypeMarkerIter<'a> {
 
    type Item = (usize, &'a [InferenceTypePart]);
 

	
 
    fn next(&mut self) -> Option<Self::Item> {
 
        // Iterate until we find a marker
 
        while self.idx < self.parts.len() {
 
            if let InferenceTypePart::Marker(marker) = self.parts[self.idx] {
 
                // Found a marker, find the subtree end
 
                let start_idx = self.idx + 1;
 
                let end_idx = InferenceType::find_subtree_end_idx(self.parts, start_idx);
 

	
 
                // Modify internal index, then return items
 
                self.idx = end_idx;
 
                return Some((marker, &self.parts[start_idx..end_idx]))
 
            }
 

	
 
            self.idx += 1;
 
        }
 

	
 
        None
 
    }
 
}
 

	
 
#[derive(PartialEq, Eq)]
 
#[derive(Debug, PartialEq, Eq)]
 
enum DualInferenceResult {
 
    Neither,        // neither argument is clarified
 
    First,          // first argument is clarified using the second one
 
    Second,         // second argument is clarified using the first one
 
    Both,           // both arguments are clarified
 
    Incompatible,   // types are incompatible: programmer error
 
}
 

	
 
impl DualInferenceResult {
 
    fn modified_any(&self) -> bool {
 
        match self {
 
            DualInferenceResult::First | DualInferenceResult::Second | DualInferenceResult::Both => true,
 
            _ => false
 
        }
 
    }
 
    fn modified_lhs(&self) -> bool {
 
        match self {
 
            DualInferenceResult::First | DualInferenceResult::Both => true,
 
            _ => false
 
        }
 
    }
 
    fn modified_rhs(&self) -> bool {
 
        match self {
 
            DualInferenceResult::Second | DualInferenceResult::Both => true,
 
            _ => false
 
        }
 
    }
 
}
 

	
 
#[derive(PartialEq, Eq)]
 
#[derive(Debug, PartialEq, Eq)]
 
enum SingleInferenceResult {
 
    Unmodified,
 
    Modified,
 
    Incompatible
 
}
 

	
 
enum DefinitionType{
 
    None,
 
    Component(ComponentId),
 
    Function(FunctionId),
 
}
 

	
 
pub(crate) struct ResolveQueueElement {
 
    pub(crate) root_id: RootId,
 
    pub(crate) definition_id: DefinitionId,
 
    pub(crate) monomorph_types: Vec<ConcreteType>,
 
}
 

	
 
pub(crate) type ResolveQueue = Vec<ResolveQueueElement>;
 

	
 
/// This particular visitor will recurse depth-first into the AST and ensures
 
/// that all expressions have the appropriate types. At the moment this implies:
 
///
 
///     - Type checking arguments to unary and binary operators.
 
///     - Type checking assignment, indexing, slicing and select expressions.
 
///     - Checking arguments to functions and component instantiations.
 
///
 
/// This will be achieved by slowly descending into the AST. At any given
 
/// expression we may depend on
 
/// that all expressions have the appropriate types.
 
pub(crate) struct TypeResolvingVisitor {
 
    // Current definition we're typechecking.
 
    definition_type: DefinitionType,
 
    poly_vars: Vec<ConcreteType>,
 

	
 
    // Buffers for iteration over substatements and subexpressions
 
    stmt_buffer: Vec<StatementId>,
 
    expr_buffer: Vec<ExpressionId>,
 

	
 
    // If instantiating a monomorph of a polymorphic proctype, then we store the
 
    // values of the polymorphic values here. There should be as many, and in
 
    // the same order as, in the definition's polyargs.
 
    polyvars: Vec<ConcreteType>,
 
    // Mapping from parser type to inferred type. We attempt to continue to
 
    // specify these types until we're stuck or we've fully determined the type.
 
    var_types: HashMap<VariableId, VarData>,      // types of variables
 
    expr_types: HashMap<ExpressionId, InferenceType>,   // types of expressions
 
    extra_data: HashMap<ExpressionId, ExtraData>,       // data for function call inference
 

	
 
    // Keeping track of which expressions need to be reinferred because the
 
    // expressions they're linked to made progression on an associated type
 
    expr_queued: HashSet<ExpressionId>,
 
}
 

	
 
// TODO: @rename used for calls and struct literals, maybe union literals?
 
struct ExtraData {
 
    /// Progression of polymorphic variables (if any)
 
    poly_vars: Vec<InferenceType>,
 
    /// Progression of types of call arguments or struct members
 
    embedded: Vec<InferenceType>,
 
    returned: InferenceType,
 
}
 

	
 
struct VarData {
 
    var_type: InferenceType,
 
    used_at: Vec<ExpressionId>,
 
}
 

	
 
impl TypeResolvingVisitor {
 
    pub(crate) fn new() -> Self {
 
        TypeResolvingVisitor{
 
            definition_type: DefinitionType::None,
 
            poly_vars: Vec::new(),
 
            stmt_buffer: Vec::with_capacity(STMT_BUFFER_INIT_CAPACITY),
 
            expr_buffer: Vec::with_capacity(EXPR_BUFFER_INIT_CAPACITY),
 
            polyvars: Vec::new(),
 
            var_types: HashMap::new(),
 
            expr_types: HashMap::new(),
 
            extra_data: HashMap::new(),
 
            expr_queued: HashSet::new(),
 
        }
 
    }
 

	
 
    // TODO: @cleanup Unsure about this, maybe a pattern will arise after
 
    //  a while.
 
    pub(crate) fn queue_module_definitions(ctx: &Ctx, queue: &mut ResolveQueue) {
 
        let root_id = ctx.module.root_id;
 
        let root = &ctx.heap.protocol_descriptions[root_id];
 
        for definition_id in &root.definitions {
 
            let definition = &ctx.heap[*definition_id];
 
            match definition {
 
                Definition::Function(definition) => {
 
                    if definition.poly_vars.is_empty() {
 
                        queue.push(ResolveQueueElement{
 
                            root_id,
 
                            definition_id: *definition_id,
 
                            monomorph_types: Vec::new(),
 
                        })
 
                    }
 
                },
 
                Definition::Component(definition) => {
 
                    if definition.poly_vars.is_empty() {
 
                        queue.push(ResolveQueueElement{
 
                            root_id,
 
                            definition_id: *definition_id,
 
                            monomorph_types: Vec::new(),
 
                        })
 
                    }
 
                },
 
                Definition::Enum(_) | Definition::Struct(_) => {},
 
            }
 
        }
 
    }
 

	
 
    pub(crate) fn handle_module_definition(
 
        &mut self, ctx: &mut Ctx, queue: &mut ResolveQueue, element: ResolveQueueElement
 
    ) -> VisitorResult {
 
        // Visit the definition
 
        debug_assert_eq!(ctx.module.root_id, element.root_id);
 
        self.visit_definition(ctx, element.definition_id)?;
 

	
 
        // Keep resolving types
 
        self.resolve_types(ctx, queue)?;
 
        Ok(())
 
    }
 

	
 
    fn reset(&mut self) {
 
        self.definition_type = DefinitionType::None;
 
        self.poly_vars.clear();
 
        self.stmt_buffer.clear();
 
        self.expr_buffer.clear();
 
        self.polyvars.clear();
 
        self.var_types.clear();
 
        self.expr_types.clear();
 
        self.extra_data.clear();
 
        self.expr_queued.clear();
 
    }
 
}
 

	
 
impl Visitor2 for TypeResolvingVisitor {
 
    // Definitions
 

	
 
    fn visit_component_definition(&mut self, ctx: &mut Ctx, id: ComponentId) -> VisitorResult {
 
        self.reset();
 
        self.definition_type = DefinitionType::Component(id);
 

	
 
        let comp_def = &ctx.heap[id];
 
        debug_assert_eq!(comp_def.poly_vars.len(), self.polyvars.len(), "component polyvars do not match imposed polyvars");
 
        debug_assert_eq!(comp_def.poly_vars.len(), self.poly_vars.len(), "component polyvars do not match imposed polyvars");
 

	
 
        for param_id in comp_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type(ctx, param.parser_type, true);
 
            debug_assert!(var_type.is_done, "expected component arguments to be concrete types");
 
            self.var_types.insert(param_id.upcast(), VarData{ var_type, used_at: Vec::new() });
 
        }
 

	
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    fn visit_function_definition(&mut self, ctx: &mut Ctx, id: FunctionId) -> VisitorResult {
 
        self.reset();
 
        self.definition_type = DefinitionType::Function(id);
 

	
 
        let func_def = &ctx.heap[id];
 
        debug_assert_eq!(func_def.poly_vars.len(), self.polyvars.len(), "function polyvars do not match imposed polyvars");
 
        debug_assert_eq!(func_def.poly_vars.len(), self.poly_vars.len(), "function polyvars do not match imposed polyvars");
 

	
 
        for param_id in func_def.parameters.clone() {
 
            let param = &ctx.heap[param_id];
 
            let var_type = self.determine_inference_type_from_parser_type(ctx, param.parser_type, true);
 
            debug_assert!(var_type.is_done, "expected function arguments to be concrete types");
 
            self.var_types.insert(param_id.upcast(), VarData{ var_type, used_at: Vec::new() });
 
        }
 

	
 
        let body_stmt_id = ctx.heap[id].body;
 
        self.visit_stmt(ctx, body_stmt_id)
 
    }
 

	
 
    // Statements
 

	
 
    fn visit_block_stmt(&mut self, ctx: &mut Ctx, id: BlockStatementId) -> VisitorResult {
 
        // Transfer statements for traversal
 
        let block = &ctx.heap[id];
 

	
 
        for stmt_id in block.statements.clone() {
 
            self.visit_stmt(ctx, stmt_id);
 
            self.visit_stmt(ctx, stmt_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_memory_stmt(&mut self, ctx: &mut Ctx, id: MemoryStatementId) -> VisitorResult {
 
        let memory_stmt = &ctx.heap[id];
 

	
 
        let local = &ctx.heap[memory_stmt.variable];
 
        let var_type = self.determine_inference_type_from_parser_type(ctx, local.parser_type, true);
 
        self.var_types.insert(memory_stmt.variable.upcast(), VarData{ var_type, used_at: Vec::new() });
 

	
 
        let expr_id = memory_stmt.initial;
 
        self.visit_expr(ctx, expr_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_channel_stmt(&mut self, ctx: &mut Ctx, id: ChannelStatementId) -> VisitorResult {
 
        let channel_stmt = &ctx.heap[id];
 

	
 
        let from_local = &ctx.heap[channel_stmt.from];
 
        let from_var_type = self.determine_inference_type_from_parser_type(ctx, from_local.parser_type, true);
 
        self.var_types.insert(from_local.this.upcast(), VarData{ var_type: from_var_type, used_at: Vec::new() });
 

	
 
        let to_local = &ctx.heap[channel_stmt.to];
 
        let to_var_type = self.determine_inference_type_from_parser_type(ctx, to_local.parser_type, true);
 
        self.var_types.insert(to_local.this.upcast(), VarData{ var_type: to_var_type, used_at: Vec::new() });
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_labeled_stmt(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> VisitorResult {
 
        let labeled_stmt = &ctx.heap[id];
 
        let substmt_id = labeled_stmt.body;
 
        self.visit_stmt(ctx, substmt_id)
 
    }
 

	
 
    fn visit_if_stmt(&mut self, ctx: &mut Ctx, id: IfStatementId) -> VisitorResult {
 
        let if_stmt = &ctx.heap[id];
 

	
 
        let true_body_id = if_stmt.true_body;
 
        let false_body_id = if_stmt.false_body;
 
        let test_expr_id = if_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_stmt(ctx, true_body_id)?;
 
        self.visit_stmt(ctx, false_body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_while_stmt(&mut self, ctx: &mut Ctx, id: WhileStatementId) -> VisitorResult {
 
        let while_stmt = &ctx.heap[id];
 

	
 
        let body_id = while_stmt.body;
 
        let test_expr_id = while_stmt.test;
 

	
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_stmt(ctx, body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_synchronous_stmt(&mut self, ctx: &mut Ctx, id: SynchronousStatementId) -> VisitorResult {
 
        let sync_stmt = &ctx.heap[id];
 
        let body_id = sync_stmt.body;
 

	
 
        self.visit_stmt(ctx, body_id)
 
    }
 

	
 
    fn visit_return_stmt(&mut self, ctx: &mut Ctx, id: ReturnStatementId) -> VisitorResult {
 
        let return_stmt = &ctx.heap[id];
 
        let expr_id = return_stmt.expression;
 

	
 
        self.visit_expr(ctx, expr_id)
 
    }
 

	
 
    fn visit_assert_stmt(&mut self, ctx: &mut Ctx, id: AssertStatementId) -> VisitorResult {
 
        let assert_stmt = &ctx.heap[id];
 
        let test_expr_id = assert_stmt.expression;
 

	
 
        self.visit_expr(ctx, test_expr_id)
 
    }
 

	
 
    fn visit_new_stmt(&mut self, ctx: &mut Ctx, id: NewStatementId) -> VisitorResult {
 
        let new_stmt = &ctx.heap[id];
 
        let call_expr_id = new_stmt.expression;
 

	
 
        self.visit_call_expr(ctx, call_expr_id)
 
    }
 

	
 
    fn visit_expr_stmt(&mut self, ctx: &mut Ctx, id: ExpressionStatementId) -> VisitorResult {
 
        let expr_stmt = &ctx.heap[id];
 
        let subexpr_id = expr_stmt.expression;
 

	
 
        self.visit_expr(ctx, subexpr_id)
 
    }
 

	
 
    // Expressions
 

	
 
    fn visit_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let assign_expr = &ctx.heap[id];
 
        let left_expr_id = assign_expr.left;
 
        let right_expr_id = assign_expr.right;
 

	
 
        self.visit_expr(ctx, left_expr_id)?;
 
        self.visit_expr(ctx, right_expr_id)?;
 

	
 
        self.progress_assignment_expr(ctx, id)
 
    }
 

	
 
    fn visit_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let conditional_expr = &ctx.heap[id];
 
        let test_expr_id = conditional_expr.test;
 
        let true_expr_id = conditional_expr.true_expression;
 
        let false_expr_id = conditional_expr.false_expression;
 

	
 
        self.expr_types.insert(test_expr_id, InferenceType::new(false, true, vec![InferenceTypePart::Bool]));
 
        self.visit_expr(ctx, test_expr_id)?;
 
        self.visit_expr(ctx, true_expr_id)?;
 
        self.visit_expr(ctx, false_expr_id)?;
 

	
 
        self.progress_conditional_expr(ctx, id)
 
    }
 

	
 
    fn visit_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let binary_expr = &ctx.heap[id];
 
        let lhs_expr_id = binary_expr.left;
 
        let rhs_expr_id = binary_expr.right;
 

	
 
        self.visit_expr(ctx, lhs_expr_id)?;
 
        self.visit_expr(ctx, rhs_expr_id)?;
 

	
 
        self.progress_binary_expr(ctx, id)
 
    }
 

	
 
    fn visit_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let unary_expr = &ctx.heap[id];
 
        let arg_expr_id = unary_expr.expression;
 

	
 
        self.visit_expr(ctx, arg_expr_id)?;
 

	
 
        self.progress_unary_expr(ctx, id)
 
    }
 

	
 
    fn visit_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let indexing_expr = &ctx.heap[id];
 
        let subject_expr_id = indexing_expr.subject;
 
        let index_expr_id = indexing_expr.index;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.visit_expr(ctx, index_expr_id)?;
 

	
 
        self.progress_indexing_expr(ctx, id)
 
    }
 

	
 
    fn visit_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let slicing_expr = &ctx.heap[id];
 
        let subject_expr_id = slicing_expr.subject;
 
        let from_expr_id = slicing_expr.from_index;
 
        let to_expr_id = slicing_expr.to_index;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 
        self.visit_expr(ctx, from_expr_id)?;
 
        self.visit_expr(ctx, to_expr_id)?;
 

	
 
        self.progress_slicing_expr(ctx, id)
 
    }
 

	
 
    fn visit_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let select_expr = &ctx.heap[id];
 
        let subject_expr_id = select_expr.subject;
 

	
 
        self.visit_expr(ctx, subject_expr_id)?;
 

	
 
        self.progress_select_expr(ctx, id)
 
    }
 

	
 
    fn visit_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let array_expr = &ctx.heap[id];
 
        // TODO: @performance
 
        for element_id in array_expr.elements.clone().into_iter() {
 
            self.visit_expr(ctx, element_id)?;
 
        }
 

	
 
        self.progress_array_expr(ctx, id)
 
    }
 

	
 
    fn visit_constant_expr(&mut self, ctx: &mut Ctx, id: ConstantExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        self.progress_constant_expr(ctx, id)
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        self.insert_initial_call_polymorph_data(ctx, id);
 

	
 
        // TODO: @performance
 
        let call_expr = &ctx.heap[id];
 
        for arg_expr_id in call_expr.arguments.clone() {
 
            self.visit_expr(ctx, arg_expr_id)?;
 
        }
 

	
 
        self.progress_call_expr(ctx, id)
 
    }
 

	
 
    fn visit_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 

	
 
        let var_expr = &ctx.heap[id];
 
        debug_assert!(var_expr.declaration.is_some());
 
        let var_data = self.var_types.get_mut(var_expr.declaration.as_ref().unwrap()).unwrap();
 
        var_data.used_at.push(upcast_id);
 

	
 
        self.progress_variable_expr(ctx, id)
 
    }
 
}
 

	
 
macro_rules! debug_assert_expr_ids_unique_and_known {
 
    // Base case for a single expression ID
 
    ($resolver:ident, $id:ident) => {
 
        if cfg!(debug_assertions) {
 
            $resolver.expr_types.contains_key(&$id);
 
        }
 
    };
 
    // Base case for two expression IDs
 
    ($resolver:ident, $id1:ident, $id2:ident) => {
 
        debug_assert_ne!($id1, $id2);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2);
 
    };
 
    // Generic case
 
    ($resolver:ident, $id1:ident, $id2:ident, $($tail:ident),+) => {
 
        debug_assert_ne!($id1, $id2);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id1);
 
        debug_assert_expr_ids_unique_and_known!($resolver, $id2, $($tail),+);
 
    };
 
}
 

	
 
macro_rules! debug_assert_ptrs_distinct {
 
    // Base case
 
    ($ptr1:ident, $ptr2:ident) => {
 
        debug_assert!(!std::ptr::eq($ptr1, $ptr2));
 
    };
 
    // Generic case
 
    ($ptr1:ident, $ptr2:ident, $($tail:ident),+) => {
 
        debug_assert_ptrs_distinct!($ptr1, $ptr2);
 
        debug_assert_ptrs_distinct!($ptr2, $($tail),+);
 
    };
 
}
 

	
 
enum TypeConstraintResult {
 
    Progress, // Success: Made progress in applying constraints
 
    NoProgess, // Success: But did not make any progress in applying constraints
 
    ErrExprType, // Error: Expression type did not match the argument(s) of the expression type
 
    ErrArgType, // Error: Expression argument types did not match
 
impl TypeResolvingVisitor {
 
    fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError2> {
 
        // Keep inferring until we can no longer make any progress
 
        println!("DEBUG: Resolve queue is {:?}", &self.expr_queued);
 
        while let Some(next_expr_id) = self.expr_queued.iter().next() {
 
            let next_expr_id = *next_expr_id;
 
            self.expr_queued.remove(&next_expr_id);
 
            self.progress_expr(ctx, next_expr_id)?;
 
        }
 

	
 
        // Should have inferred everything
 
        for (expr_id, expr_type) in self.expr_types.iter() {
 
            if !expr_type.is_done {
 
                let mut buffer = std::fs::File::create("type_debug.txt").unwrap();
 
                use crate::protocol::ast_printer::ASTWriter;
 
                let mut w = ASTWriter::new();
 
                w.write_ast(&mut buffer, &ctx.heap);
 
                // TODO: Auto-inference of integerlike types
 
                let expr = &ctx.heap[*expr_id];
 
                return Err(ParseError2::new_error(
 
                    &ctx.module.source, expr.position(),
 
                    &format!(
 
                        "Could not fully infer the type of this expression (got '{}')",
 
                        expr_type.display_name(&ctx.heap)
 
                    )
 
                ))
 
            }
 

	
 
            let concrete_type = ctx.heap[*expr_id].get_type_mut();
 
            expr_type.write_concrete_type(concrete_type);
 
        }
 

	
 
        // Check all things we need to monomorphize
 
        // TODO: Struct/enum/union monomorphization
 
        for (call_expr_id, extra_data) in self.extra_data.iter() {
 
            if extra_data.poly_vars.is_empty() { continue; }
 

	
 
            // We have a polymorph
 
            let mut monomorph_types = Vec::with_capacity(extra_data.poly_vars.len());
 
            for (poly_idx, poly_type) in extra_data.poly_vars.iter().enumerate() {
 
                if !poly_type.is_done {
 
                    // TODO: Single clean function for function signatures and polyvars.
 
                    // TODO: Better error message
 
                    let expr = &ctx.heap[*call_expr_id];
 
                    return Err(ParseError2::new_error(
 
                        &ctx.module.source, expr.position(),
 
                        &format!(
 
                            "Could not fully infer the type of polymorphic variable {} of this expression (got '{}')",
 
                            poly_idx, poly_type.display_name(&ctx.heap)
 
                        )
 
                    ))
 
                }
 

	
 
                let mut concrete_type = ConcreteType::default();
 
                poly_type.write_concrete_type(&mut concrete_type);
 
                monomorph_types.insert(poly_idx, concrete_type);
 
            }
 

	
 
            // Resolve to call expression's definition
 
            let call_expr = if let Expression::Call(call_expr) = &ctx.heap[*call_expr_id] {
 
                call_expr
 
            } else {
 
                todo!("implement different kinds of polymorph expressions");
 
            };
 

	
 
            if let Method::Symbolic(symbolic) = &call_expr.method {
 
                let definition_id = symbolic.definition.unwrap();
 
                let root_id = ctx.types
 
                    .get_base_definition(&definition_id)
 
                    .unwrap()
 
                    .ast_root;
 

	
 
                queue.push(ResolveQueueElement{
 
                    root_id,
 
                    definition_id,
 
                    monomorph_types,
 
                })
 
            }
 
        }
 

	
 
        // Finally, if the currently resolved definition is a monomoprh, then we
 
        // add it to the type table
 
        if !self.poly_vars.is_empty() {
 
            let definition_id = match &self.definition_type {
 
                DefinitionType::None => unreachable!(),
 
                DefinitionType::Function(id) => id.upcast(),
 
                DefinitionType::Component(id) => id.upcast(),
 
            };
 
            ctx.types.instantiate_monomorph(&definition_id, &self.poly_vars)
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> Result<(), ParseError2> {
 
        match &ctx.heap[id] {
 
            Expression::Assignment(expr) => {
 
                let id = expr.this;
 
                self.progress_assignment_expr(ctx, id)
 
            },
 
            Expression::Conditional(expr) => {
 
                let id = expr.this;
 
                self.progress_conditional_expr(ctx, id)
 
            },
 
            Expression::Binary(expr) => {
 
                let id = expr.this;
 
                self.progress_binary_expr(ctx, id)
 
            },
 
            Expression::Unary(expr) => {
 
                let id = expr.this;
 
                self.progress_unary_expr(ctx, id)
 
            },
 
            Expression::Indexing(expr) => {
 
                let id = expr.this;
 
                self.progress_indexing_expr(ctx, id)
 
            },
 
            Expression::Slicing(expr) => {
 
                let id = expr.this;
 
                self.progress_slicing_expr(ctx, id)
 
            },
 
            Expression::Select(expr) => {
 
                let id = expr.this;
 
                self.progress_select_expr(ctx, id)
 
            },
 
            Expression::Array(expr) => {
 
                let id = expr.this;
 
                self.progress_array_expr(ctx, id)
 
            },
 
            Expression::Constant(expr) => {
 
                let id = expr.this;
 
                self.progress_constant_expr(ctx, id)
 
            },
 
            Expression::Call(expr) => {
 
                let id = expr.this;
 
                self.progress_call_expr(ctx, id)
 
            },
 
            Expression::Variable(expr) => {
 
                let id = expr.this;
 
                self.progress_variable_expr(ctx, id)
 
            }
 
        }
 
    }
 

	
 
impl TypeResolvingVisitor {
 
    fn progress_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> Result<(), ParseError2> {
 
        use AssignmentOperator as AO;
 

	
 
        // TODO: Assignable check
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.left;
 
        let arg2_expr_id = expr.right;
 

	
 
        let progress_base = match expr.operation {
 
            AO::Set =>
 
                false,
 
            AO::Multiplied | AO::Divided | AO::Added | AO::Subtracted =>
 
                self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?,
 
            AO::Remained | AO::ShiftedLeft | AO::ShiftedRight |
 
            AO::BitwiseAnded | AO::BitwiseXored | AO::BitwiseOred =>
 
                self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?,
 
        };
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = self.apply_equal3_constraint(
 
            ctx, upcast_id, arg1_expr_id, arg2_expr_id, 0
 
        )?;
 

	
 
        if progress_base || progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_conditional_expr(&mut self, ctx: &mut Ctx, id: ConditionalExpressionId) -> Result<(), ParseError2> {
 
        // Note: test expression type is already enforced
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_expr_id = expr.true_expression;
 
        let arg2_expr_id = expr.false_expression;
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = self.apply_equal3_constraint(
 
            ctx, upcast_id, arg1_expr_id, arg2_expr_id, 0
 
        )?;
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_expr_id); }
 
        if progress_arg2 { self.queue_expr(arg2_expr_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_binary_expr(&mut self, ctx: &mut Ctx, id: BinaryExpressionId) -> Result<(), ParseError2> {
 
        // Note: our expression type might be fixed by our parent, but we still
 
        // need to make sure it matches the type associated with our operation.
 
        use BinaryOperator as BO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg1_id = expr.left;
 
        let arg2_id = expr.right;
 

	
 
        let (progress_expr, progress_arg1, progress_arg2) = match expr.operation {
 
            BO::Concatenate => {
 
                // Arguments may be arrays/slices, output is always an array
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &ARRAY_TEMPLATE)?;
 
                let progress_arg1 = self.apply_forced_constraint(ctx, arg1_id, &ARRAYLIKE_TEMPLATE)?;
 
                let progress_arg2 = self.apply_forced_constraint(ctx, arg2_id, &ARRAYLIKE_TEMPLATE)?;
 

	
 
                // If they're all arraylike, then we want the subtype to match
 
                let (subtype_expr, subtype_arg1, subtype_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 1)?;
 

	
 
                (progress_expr || subtype_expr, progress_arg1 || subtype_arg1, progress_arg2 || subtype_arg2)
 
            },
 
            BO::LogicalOr | BO::LogicalAnd => {
 
                // Forced boolean on all
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg1 = self.apply_forced_constraint(ctx, arg1_id, &BOOL_TEMPLATE)?;
 
                let progress_arg2 = self.apply_forced_constraint(ctx, arg2_id, &BOOL_TEMPLATE)?;
 

	
 
                (progress_expr, progress_arg1, progress_arg2)
 
            },
 
            BO::BitwiseOr | BO::BitwiseXor | BO::BitwiseAnd | BO::Remainder | BO::ShiftLeft | BO::ShiftRight => {
 
                // All equal of integer type
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg1, progress_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg1, progress_base || progress_arg2)
 
            },
 
            BO::Equality | BO::Inequality | BO::LessThan | BO::GreaterThan | BO::LessThanEqual | BO::GreaterThanEqual => {
 
                // Equal2 on args, forced boolean output
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg_base = self.apply_forced_constraint(ctx, arg1_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_arg1, progress_arg2) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, arg1_id, 0, arg2_id, 0)?;
 

	
 
                (progress_expr, progress_arg_base || progress_arg1, progress_arg_base || progress_arg2)
 
            },
 
            BO::Add | BO::Subtract | BO::Multiply | BO::Divide => {
 
                // All equal of number type
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg1, progress_arg2) =
 
                    self.apply_equal3_constraint(ctx, upcast_id, arg1_id, arg2_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg1, progress_base || progress_arg2)
 
            },
 
        };
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg1 { self.queue_expr(arg1_id); }
 
        if progress_arg2 { self.queue_expr(arg2_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_unary_expr(&mut self, ctx: &mut Ctx, id: UnaryExpressionId) -> Result<(), ParseError2> {
 
        use UnaryOperation as UO;
 

	
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let arg_id = expr.expression;
 

	
 
        let (progress_expr, progress_arg) = match expr.operation {
 
            UO::Positive | UO::Negative => {
 
                // Equal types of numeric class
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &NUMBERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, arg_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg)
 
            },
 
            UO::BitwiseNot | UO::PreIncrement | UO::PreDecrement | UO::PostIncrement | UO::PostDecrement => {
 
                // Equal types of integer class
 
                let progress_base = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 
                let (progress_expr, progress_arg) =
 
                    self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, arg_id, 0)?;
 

	
 
                (progress_base || progress_expr, progress_base || progress_arg)
 
            },
 
            UO::LogicalNot => {
 
                // Both booleans
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                let progress_arg = self.apply_forced_constraint(ctx, upcast_id, &BOOL_TEMPLATE)?;
 
                (progress_expr, progress_arg)
 
            }
 
        };
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_arg { self.queue_expr(arg_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_indexing_expr(&mut self, ctx: &mut Ctx, id: IndexingExpressionId) -> Result<(), ParseError2> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let subject_id = expr.subject;
 
        let index_id = expr.index;
 

	
 
        // Make sure subject is arraylike and index is integerlike
 
        let progress_subject_base = self.apply_forced_constraint(ctx, subject_id, &ARRAYLIKE_TEMPLATE)?;
 
        let progress_index = self.apply_forced_constraint(ctx, index_id, &INTEGERLIKE_TEMPLATE)?;
 

	
 
        // Make sure if output is of T then subject is Array<T>
 
        let (progress_expr, progress_subject) =
 
            self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, subject_id, 1)?;
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_subject_base || progress_subject { self.queue_expr(subject_id); }
 
        if progress_index { self.queue_expr(index_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_slicing_expr(&mut self, ctx: &mut Ctx, id: SlicingExpressionId) -> Result<(), ParseError2> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let subject_id = expr.subject;
 
        let from_id = expr.from_index;
 
        let to_id = expr.to_index;
 

	
 
        // Make sure subject is arraylike and indices are of equal integerlike
 
        let progress_subject_base = self.apply_forced_constraint(ctx, subject_id, &ARRAYLIKE_TEMPLATE)?;
 
        let progress_idx_base = self.apply_forced_constraint(ctx, from_id, &INTEGERLIKE_TEMPLATE)?;
 
        let (progress_from, progress_to) = self.apply_equal2_constraint(ctx, upcast_id, from_id, 0, to_id, 0)?;
 

	
 
        // Make sure if output is of T then subject is Array<T>
 
        let (progress_expr, progress_subject) =
 
            self.apply_equal2_constraint(ctx, upcast_id, upcast_id, 0, subject_id, 1)?;
 

	
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 
        if progress_subject_base || progress_subject { self.queue_expr(subject_id); }
 
        if progress_idx_base || progress_from { self.queue_expr(from_id); }
 
        if progress_idx_base || progress_to { self.queue_expr(to_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_select_expr(&mut self, ctx: &mut Ctx, id: SelectExpressionId) -> Result<(), ParseError2> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let subject_id = expr.subject;
 

	
 
        let (progress_subject, progress_expr) = match &expr.field {
 
            Field::Length => {
 
                let progress_subject = self.apply_forced_constraint(ctx, subject_id, &ARRAYLIKE_TEMPLATE)?;
 
                let progress_expr = self.apply_forced_constraint(ctx, upcast_id, &INTEGERLIKE_TEMPLATE)?;
 
                (progress_subject, progress_expr)
 
            },
 
            Field::Symbolic(_field) => {
 
                todo!("implement select expr for symbolic fields");
 
            }
 
        };
 

	
 
        if progress_subject { self.queue_expr(subject_id); }
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_array_expr(&mut self, ctx: &mut Ctx, id: ArrayExpressionId) -> Result<(), ParseError2> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let expr_elements = expr.elements.clone(); // TODO: @performance
 

	
 
        // All elements should have an equal type
 
        let progress = self.apply_equal_n_constraint(ctx, upcast_id, &expr_elements)?;
 
        let mut any_progress = false;
 
        for (progress_arg, arg_id) in progress.iter().zip(expr_elements.iter()) {
 
            if *progress_arg {
 
                any_progress = true;
 
                self.queue_expr(*arg_id);
 
            }
 
        }
 

	
 
        // And the output should be an array of the element types
 
        let mut expr_progress = self.apply_forced_constraint(ctx, upcast_id, &ARRAY_TEMPLATE)?;
 
        if !expr_elements.is_empty() {
 
            let first_arg_id = expr_elements[0];
 
            let (inner_expr_progress, arg_progress) = self.apply_equal2_constraint(
 
                ctx, upcast_id, upcast_id, 1, first_arg_id, 0
 
            )?;
 

	
 
            expr_progress = expr_progress || inner_expr_progress;
 

	
 
            // Note that if the array type progressed the type of the arguments,
 
            // then we should enqueue this progression function again
 
            // TODO: @fix Make apply_equal_n accept a start idx as well
 
            if arg_progress { self.queue_expr(upcast_id); }
 
        }
 

	
 
        if expr_progress { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_constant_expr(&mut self, ctx: &mut Ctx, id: ConstantExpressionId) -> Result<(), ParseError2> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let template = match &expr.value {
 
            Constant::Null => &MESSAGE_TEMPLATE,
 
            Constant::Integer(_) => &INTEGERLIKE_TEMPLATE,
 
            Constant::True | Constant::False => &BOOL_TEMPLATE,
 
            Constant::Character(_) => todo!("character literals")
 
        };
 

	
 
        let progress = self.apply_forced_constraint(ctx, upcast_id, template)?;
 
        if progress { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    // TODO: @cleanup, see how this can be cleaned up once I implement
 
    //  polymorphic struct/enum/union literals. These likely follow the same
 
    //  pattern as here.
 
    fn progress_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> Result<(), ParseError2> {
 
        println!("DEBUG: Processing call {}", id.0.index);
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 

	
 
        // Check if we can make progress using the arguments and/or return types
 
        // while keeping track of the polyvars we've extended
 
        let mut poly_progress = HashSet::new();
 
        debug_assert_eq!(extra.embedded.len(), expr.arguments.len());
 
        let mut poly_infer_error = false;
 

	
 
        for (arg_idx, arg_id) in expr.arguments.clone().into_iter().enumerate() {
 
            let signature_type = &mut extra.embedded[arg_idx];
 
            let argument_type: *mut _ = self.expr_types.get_mut(&arg_id).unwrap();
 
            let (progress_sig, progress_arg) = Self::apply_equal2_constraint_types(
 
                ctx, upcast_id, signature_type, 0, argument_type, 0
 
            )?;
 

	
 
            println!("DEBUG Arg  {}: {} <--> {}", arg_idx, signature_type.display_name(&ctx.heap), unsafe{&*argument_type}.display_name(&ctx.heap));
 

	
 
            if progress_sig {
 
                // Progressed signature, so also apply inference to the 
 
                // polymorph types using the markers 
 
                debug_assert!(signature_type.has_marker, "progress on signature argument type without markers");
 
                for (poly_idx, poly_section) in signature_type.marker_iter() {
 
                    let polymorph_type = &mut extra.poly_vars[poly_idx];
 
                    match Self::apply_forced_constraint_types(
 
                        polymorph_type, 0, poly_section, 0
 
                    ) {
 
                        Ok(true) => { poly_progress.insert(poly_idx); },
 
                        Ok(false) => {},
 
                        Err(()) => { poly_infer_error = true; }
 
                    }
 
                    println!("DEBUG Poly {}: {} <--> {}", poly_idx, polymorph_type.display_name(&ctx.heap), InferenceType::partial_display_name(&ctx.heap, poly_section));
 
                }
 
            }
 
            if progress_arg {
 
                // Progressed argument expression
 
                self.expr_queued.insert(arg_id);
 
            }
 
        }
 

	
 
        // Do the same for the return type
 
        let signature_type = &mut extra.returned;
 
        let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 
        let (progress_sig, progress_expr) = Self::apply_equal2_constraint_types(
 
            ctx, upcast_id, signature_type, 0, expr_type, 0
 
        )?;
 

	
 
        println!("DEBUG Ret  {} <--> {}", signature_type.display_name(&ctx.heap), unsafe{&*expr_type}.display_name(&ctx.heap));
 

	
 
        if progress_sig {
 
            // As above: apply inference to polyargs as well
 
            debug_assert!(signature_type.has_marker, "progress on signature return type without markers");
 
            for (poly_idx, poly_section) in signature_type.marker_iter() {
 
                let polymorph_type = &mut extra.poly_vars[poly_idx];
 
                match Self::apply_forced_constraint_types(
 
                    polymorph_type, 0, poly_section, 0
 
                ) {
 
                    Ok(true) => { poly_progress.insert(poly_idx); },
 
                    Ok(false) => {},
 
                    Err(()) => { poly_infer_error = true; }
 
                }
 
                println!("DEBUG Poly {}: {} <--> {}", poly_idx, polymorph_type.display_name(&ctx.heap), InferenceType::partial_display_name(&ctx.heap, poly_section));
 
            }
 
        }
 
        if progress_expr {
 
            if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                self.expr_queued.insert(parent_id);
 
            }
 
        }
 

	
 
        // If we had an error in the polymorphic variable's inference, then we
 
        // need to provide a human readable error: find a pair of inference
 
        // types in the arguments/return type that do not agree on the
 
        // polymorphic variable's type
 
        if poly_infer_error { return Err(self.construct_poly_arg_error(ctx, id)) }
 

	
 
        // If we did not have an error in the polymorph inference above, then
 
        // reapplying the polymorph type to each argument type and the return
 
        // type should always succeed.
 
        // TODO: @performance If the algorithm is changed to be more "on demand
 
        //  argument re-evaluation", instead of "all-argument re-evaluation",
 
        //  then this is no longer true
 
        for poly_idx in poly_progress.into_iter() {
 
            // For each polymorphic argument: first extend the signature type,
 
            // then reapply the equal2 constraint to the expressions
 
            let poly_type = &extra.poly_vars[poly_idx];
 
            for (arg_idx, sig_type) in extra.embedded.iter_mut().enumerate() {
 
                let mut seek_idx = 0;
 
                let mut modified_sig = false;
 
                while let Some((start_idx, end_idx)) = sig_type.find_subtree_idx_for_marker(poly_idx, seek_idx) {
 
                    let modified_at_marker = Self::apply_forced_constraint_types(
 
                        sig_type, start_idx, &poly_type.parts, 0
 
                    ).unwrap();
 
                    modified_sig = modified_sig || modified_at_marker;
 
                    seek_idx = end_idx;
 
                }
 

	
 
                if !modified_sig { continue; }
 

	
 
                // Part of signature was modified, so update expression used as
 
                // argument as well
 
                let arg_expr_id = expr.arguments[arg_idx];
 
                let arg_type: *mut _ = self.expr_types.get_mut(&arg_expr_id).unwrap();
 
                let (progress_arg, _) = Self::apply_equal2_constraint_types(
 
                    ctx, arg_expr_id, arg_type, 0, sig_type, 0
 
                ).expect("no inference error at argument type");
 
                if progress_arg { self.expr_queued.insert(arg_expr_id); }
 
            }
 

	
 
            // Again: do the same for the return type
 
            let sig_type = &mut extra.returned;
 
            let mut seek_idx = 0;
 
            let mut modified_sig = false;
 
            while let Some((start_idx, end_idx)) = sig_type.find_subtree_idx_for_marker(poly_idx, seek_idx) {
 
                let modified_at_marker = Self::apply_forced_constraint_types(
 
                    sig_type, start_idx, &poly_type.parts, 0
 
                ).unwrap();
 
                modified_sig = modified_sig || modified_at_marker;
 
                seek_idx = end_idx;
 
            }
 

	
 
            if modified_sig {
 
                let ret_type = self.expr_types.get_mut(&upcast_id).unwrap();
 
                let (progress_ret, _) = Self::apply_equal2_constraint_types(
 
                    ctx, upcast_id, ret_type, 0, sig_type, 0
 
                ).expect("no inference error at return type");
 
                if progress_ret {
 
                    if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() {
 
                        self.expr_queued.insert(parent_id);
 
                    }
 
                }
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_variable_expr(&mut self, ctx: &mut Ctx, id: VariableExpressionId) -> Result<(), ParseError2> {
 
        let upcast_id = id.upcast();
 
        let var_expr = &ctx.heap[id];
 
        let var_id = var_expr.declaration.unwrap();
 

	
 
        // Retrieve shared variable type and expression type and apply inference
 
        let var_data = self.var_types.get_mut(&var_id).unwrap();
 
        let expr_type = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            &mut var_data.var_type as *mut _, 0, expr_type, 0
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            let var_decl = &ctx.heap[var_id];
 
            return Err(ParseError2::new_error(
 
                &ctx.module.source, var_decl.position(),
 
                &format!(
 
                    "Conflicting types for this variable, previously assigned the type '{}'",
 
                    var_data.var_type.display_name(&ctx.heap)
 
                )
 
            ).with_postfixed_info(
 
                &ctx.module.source, var_expr.position,
 
                &format!(
 
                    "But inferred to have incompatible type '{}' here",
 
                    expr_type.display_name(&ctx.heap)
 
                )
 
            ))
 
        }
 

	
 
        let progress_var = infer_res.modified_lhs();
 
        let progress_expr = infer_res.modified_rhs();
 

	
 
        if progress_var {
 
            for other_expr in var_data.used_at.iter() {
 
                if *other_expr != upcast_id {
 
                    self.expr_queued.insert(*other_expr);
 
                }
 
            }
 
        }
 
        if progress_expr { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn queue_expr_parent(&mut self, ctx: &Ctx, expr_id: ExpressionId) {
 
        if let ExpressionParent::Expression(parent_expr_id, _) = &ctx.heap[expr_id].parent() {
 
            self.expr_queued.insert(*parent_expr_id);
 
        }
 
    }
 

	
 
    fn queue_expr(&mut self, expr_id: ExpressionId) {
 
        self.expr_queued.insert(expr_id);
 
    }
 

	
 
    /// Applies a forced type constraint: the type associated with the supplied
 
    /// expression will be molded into the provided "template". The template may
 
    /// be fully specified (e.g. a bool) or contain "inference" variables (e.g.
 
    /// an array of T)
 
    fn apply_forced_constraint(
 
        &mut self, ctx: &mut Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
    ) -> Result<bool, ParseError2> {
 
        debug_assert_expr_ids_unique_and_known!(self, expr_id);
 
        let expr_type = self.expr_types.get_mut(&expr_id).unwrap();
 
        match InferenceType::infer_subtree_for_single_type(expr_type, 0, template, 0) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(
 
                self.construct_template_type_error(ctx, expr_id, template)
 
            )
 
        }
 
    }
 

	
 
    fn apply_forced_constraint_types(
 
        to_infer: *mut InferenceType, to_infer_start_idx: usize,
 
        template: &[InferenceTypePart], template_start_idx: usize
 
    ) -> Result<bool, ()> {
 
        match InferenceType::infer_subtree_for_single_type(
 
            unsafe{ &mut *to_infer }, to_infer_start_idx,
 
            template, template_start_idx
 
        ) {
 
            SingleInferenceResult::Modified => Ok(true),
 
            SingleInferenceResult::Unmodified => Ok(false),
 
            SingleInferenceResult::Incompatible => Err(()),
 
        }
 
    }
 

	
 
    /// Applies a type constraint that expects the two provided types to be
 
    /// equal. We attempt to make progress in inferring the types. If the call
 
    /// is successful then the composition of all types are made equal.
 
    /// The "parent" `expr_id` is provided to construct errors.
 
    fn apply_equal2_constraint(
 
        &mut self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg1_start_idx: usize,
 
        arg2_id: ExpressionId, arg2_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError2> {
 
        debug_assert_expr_ids_unique_and_known!(self, arg1_id, arg2_id);
 
        let arg1_type: *mut _ = self.expr_types.get_mut(&arg1_id).unwrap();
 
        let arg2_type: *mut _ = self.expr_types.get_mut(&arg2_id).unwrap();
 

	
 
        let infer_res = unsafe{ InferenceType::infer_subtrees_for_both_types(
 
            arg1_type, arg1_start_idx,
 
            arg2_type, arg2_start_idx
 
        ) };
 
        if infer_res == DualInferenceResult::Incompatible {
 
            return Err(self.construct_arg_type_error(ctx, expr_id, arg1_id, arg2_id));
 
        }
 

	
 
        Ok((infer_res.modified_lhs(), infer_res.modified_rhs()))
 
    }
 

	
 
    fn apply_equal2_constraint_types(
 
        ctx: &Ctx, expr_id: ExpressionId,
 
        type1: *mut InferenceType, type1_start_idx: usize, 
 
        type2: *mut InferenceType, type2_start_idx: usize
 
    ) -> Result<(bool, bool), ParseError2> {
 
        debug_assert_ptrs_distinct!(type1, type2);
 
        let infer_res = unsafe { 
 
@@ -1778,456 +2009,527 @@ impl TypeResolvingVisitor {
 
                    self.determine_inference_type_from_parser_type(ctx, return_parser_type_id, true)
 
                } else {
 
                    // Cannot happen: definition always set upon body traversal
 
                    // and "return" calls in components are illegal.
 
                    unreachable!();
 
                },
 
            EP::New(_) =>
 
                // Must be a component call, which we assign a "Void" return
 
                // type
 
                InferenceType::new(false, true, vec![ITP::Void]),
 
        };
 

	
 
        match self.expr_types.entry(expr_id) {
 
            Entry::Vacant(vacant) => {
 
                vacant.insert(inference_type);
 
            },
 
            Entry::Occupied(mut preexisting) => {
 
                // We already have an entry, this happens if our parent fixed
 
                // our type (e.g. we're used in a conditional expression's test)
 
                // but we have a different type.
 
                // TODO: Is this ever called? Seems like it can't
 
                debug_assert!(false, "I am actually called, my ID is {}", expr_id.index);
 
                let old_type = preexisting.get_mut();
 
                if let SingleInferenceResult::Incompatible = InferenceType::infer_subtree_for_single_type(
 
                    old_type, 0, &inference_type.parts, 0
 
                ) {
 
                    return Err(self.construct_expr_type_error(ctx, expr_id, expr_id))
 
                }
 
            }
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn insert_initial_call_polymorph_data(
 
        &mut self, ctx: &mut Ctx, call_id: CallExpressionId
 
    ) {
 
        use InferenceTypePart as ITP;
 

	
 
        // Note: the polymorph variables may be partially specified and may
 
        // contain references to the wrapping definition's (i.e. the proctype
 
        // we are currently visiting) polymorphic arguments.
 
        //
 
        // The arguments of the call may refer to polymorphic variables in the
 
        // definition of the function we're calling, not of the wrapping
 
        // definition. We insert markers in these inferred types to be able to
 
        // map them back and forth to the polymorphic arguments of the function
 
        // we are calling.
 
        let call = &ctx.heap[call_id];
 
        debug_assert!(!call.poly_args.is_empty());
 

	
 
        // Handle the polymorphic variables themselves
 
        let mut poly_vars = Vec::with_capacity(call.poly_args.len());
 
        for poly_arg_type_id in call.poly_args.clone() { // TODO: @performance
 
            poly_vars.push(self.determine_inference_type_from_parser_type(ctx, poly_arg_type_id, true));
 
        }
 

	
 
        // Handle the arguments
 
        // TODO: @cleanup: Maybe factor this out for reuse in the validator/linker, should also
 
        //  make the code slightly more robust.
 
        let (embedded_types, return_type) = match &call.method {
 
            Method::Create => {
 
                // Not polymorphic
 
                unreachable!("insert initial polymorph data for builtin 'create()' call")
 
            },
 
            Method::Fires => {
 
                // bool fires<T>(PortLike<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::PortLike, ITP::Marker(0), ITP::Unknown])],
 
                    InferenceType::new(false, true, vec![ITP::Bool])
 
                )
 
            },
 
            Method::Get => {
 
                // T get<T>(input<T> arg)
 
                (
 
                    vec![InferenceType::new(true, false, vec![ITP::Input, ITP::Marker(0), ITP::Unknown])],
 
                    InferenceType::new(true, false, vec![ITP::Marker(0), ITP::Unknown])
 
                )
 
            },
 
            Method::Put => {
 
                // void Put<T>(output<T> port, T msg)
 
                (
 
                    vec![
 
                        InferenceType::new(true, false, vec![ITP::Output, ITP::Marker(0), ITP::Unknown]),
 
                        InferenceType::new(true, false, vec![ITP::Marker(0), ITP::Unknown])
 
                    ],
 
                    InferenceType::new(false, true, vec![ITP::Void])
 
                )
 
            }
 
            Method::Symbolic(symbolic) => {
 
                let definition = &ctx.heap[symbolic.definition.unwrap()];
 

	
 
                match definition {
 
                    Definition::Component(definition) => {
 
                        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
                        for param_id in definition.parameters.clone() {
 
                            let param = &ctx.heap[param_id];
 
                            let param_parser_type_id = param.parser_type;
 
                            parameter_types.push(self.determine_inference_type_from_parser_type(ctx, param_parser_type_id, false));
 
                        }
 

	
 
                        (parameter_types, InferenceType::new(false, true, vec![InferenceTypePart::Void]))
 
                    },
 
                    Definition::Function(definition) => {
 
                        let mut parameter_types = Vec::with_capacity(definition.parameters.len());
 
                        for param_id in definition.parameters.clone() {
 
                            let param = &ctx.heap[param_id];
 
                            let param_parser_type_id = param.parser_type;
 
                            parameter_types.push(self.determine_inference_type_from_parser_type(ctx, param_parser_type_id, false));
 
                        }
 

	
 
                        let return_type = self.determine_inference_type_from_parser_type(ctx, definition.return_type, false);
 
                        (parameter_types, return_type)
 
                    },
 
                    Definition::Struct(_) | Definition::Enum(_) => {
 
                        unreachable!("insert initial polymorph data for struct/enum");
 
                    }
 
                }
 
            }
 
        };
 

	
 
        self.extra_data.insert(call_id.upcast(), ExtraData {
 
            poly_vars,
 
            embedded: embedded_types,
 
            returned: return_type
 
        });
 
    }
 

	
 
    /// Determines the initial InferenceType from the provided ParserType. This
 
    /// may be called with two kinds of intentions:
 
    /// 1. To resolve a ParserType within the body of a function, or on
 
    ///     polymorphic arguments to calls/instantiations within that body. This
 
    ///     means that the polymorphic variables are known and can be replaced
 
    ///     with the monomorph we're instantiating.
 
    /// 2. To resolve a ParserType on a called function's definition or on
 
    ///     an instantiated datatype's members. This means that the polymorphic
 
    ///     arguments inside those ParserTypes refer to the polymorphic
 
    ///     variables in the called/instantiated type's definition.
 
    /// In the second case we place InferenceTypePart::Marker instances such
 
    /// that we can perform type inference on the polymorphic variables.
 
    fn determine_inference_type_from_parser_type(
 
        &mut self, ctx: &Ctx, parser_type_id: ParserTypeId,
 
        parser_type_in_body: bool
 
    ) -> InferenceType {
 
        use ParserTypeVariant as PTV;
 
        use InferenceTypePart as ITP;
 

	
 
        let mut to_consider = VecDeque::with_capacity(16);
 
        to_consider.push_back(parser_type_id);
 

	
 
        let mut infer_type = Vec::new();
 
        let mut has_inferred = false;
 
        let mut has_markers = false;
 

	
 
        while !to_consider.is_empty() {
 
            let parser_type_id = to_consider.pop_front().unwrap();
 
            let parser_type = &ctx.heap[parser_type_id];
 
            match &parser_type.variant {
 
                PTV::Message => { infer_type.push(ITP::Message); },
 
                PTV::Bool => { infer_type.push(ITP::Bool); },
 
                PTV::Byte => { infer_type.push(ITP::Byte); },
 
                PTV::Short => { infer_type.push(ITP::Short); },
 
                PTV::Int => { infer_type.push(ITP::Int); },
 
                PTV::Long => { infer_type.push(ITP::Long); },
 
                PTV::String => { infer_type.push(ITP::String); },
 
                PTV::IntegerLiteral => { unreachable!("integer literal type on variable type"); },
 
                PTV::Inferred => {
 
                    infer_type.push(ITP::Unknown);
 
                    has_inferred = true;
 
                },
 
                PTV::Array(subtype_id) => {
 
                    infer_type.push(ITP::Array);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Input(subtype_id) => {
 
                    infer_type.push(ITP::Input);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Output(subtype_id) => {
 
                    infer_type.push(ITP::Output);
 
                    to_consider.push_front(*subtype_id);
 
                },
 
                PTV::Symbolic(symbolic) => {
 
                    debug_assert!(symbolic.variant.is_some(), "symbolic variant not yet determined");
 
                    match symbolic.variant.as_ref().unwrap() {
 
                        SymbolicParserTypeVariant::PolyArg(_, arg_idx) => {
 
                            // Retrieve concrete type of argument and add it to
 
                            // the inference type.
 
                            let arg_idx = *arg_idx;
 
                            debug_assert!(symbolic.poly_args.is_empty()); // TODO: @hkt
 

	
 
                            if parser_type_in_body {
 
                                debug_assert!(arg_idx < self.polyvars.len());
 
                                for concrete_part in &self.polyvars[arg_idx].v {
 
                                debug_assert!(arg_idx < self.poly_vars.len());
 
                                for concrete_part in &self.poly_vars[arg_idx].parts {
 
                                    infer_type.push(ITP::from(*concrete_part));
 
                                }
 
                            } else {
 
                                has_markers = true;
 
                                has_inferred = true;
 
                                infer_type.push(ITP::Marker(arg_idx));
 
                                infer_type.push(ITP::Unknown);
 
                            }
 
                        },
 
                        SymbolicParserTypeVariant::Definition(definition_id) => {
 
                            // TODO: @cleanup
 
                            if cfg!(debug_assertions) {
 
                                let definition = &ctx.heap[*definition_id];
 
                                debug_assert!(definition.is_struct() || definition.is_enum()); // TODO: @function_ptrs
 
                                let num_poly = match definition {
 
                                    Definition::Struct(v) => v.poly_vars.len(),
 
                                    Definition::Enum(v) => v.poly_vars.len(),
 
                                    _ => unreachable!(),
 
                                };
 
                                debug_assert_eq!(symbolic.poly_args.len(), num_poly);
 
                            }
 

	
 
                            infer_type.push(ITP::Instance(*definition_id, symbolic.poly_args.len()));
 
                            let mut poly_arg_idx = symbolic.poly_args.len();
 
                            while poly_arg_idx > 0 {
 
                                poly_arg_idx -= 1;
 
                                to_consider.push_front(symbolic.poly_args[poly_arg_idx]);
 
                            }
 
                        }
 
                    }
 
                }
 
            }
 
        }
 

	
 
        InferenceType::new(has_markers, !has_inferred, infer_type)
 
    }
 

	
 
    /// Construct an error when an expression's type does not match. This
 
    /// happens if we infer the expression type from its arguments (e.g. the
 
    /// expression type of an addition operator is the type of the arguments)
 
    /// But the expression type was already set due to our parent (e.g. an
 
    /// "if statement" or a "logical not" always expecting a boolean)
 
    fn construct_expr_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, arg_id: ExpressionId
 
    ) -> ParseError2 {
 
        // TODO: Expand and provide more meaningful information for humans
 
        let expr = &ctx.heap[expr_id];
 
        let arg_expr = &ctx.heap[arg_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 
        let arg_type = self.expr_types.get(&arg_id).unwrap();
 

	
 
        return ParseError2::new_error(
 
            &ctx.module.source, expr.position(),
 
            &format!(
 
                "Incompatible types: this expression expected a '{}'", 
 
                expr_type.display_name(&ctx.heap)
 
            )
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg_expr.position(),
 
            &format!(
 
                "But this expression yields a '{}'",
 
                arg_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_arg_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId,
 
        arg1_id: ExpressionId, arg2_id: ExpressionId
 
    ) -> ParseError2 {
 
        let expr = &ctx.heap[expr_id];
 
        let arg1 = &ctx.heap[arg1_id];
 
        let arg2 = &ctx.heap[arg2_id];
 

	
 
        let arg1_type = self.expr_types.get(&arg1_id).unwrap();
 
        let arg2_type = self.expr_types.get(&arg2_id).unwrap();
 

	
 
        return ParseError2::new_error(
 
            &ctx.module.source, expr.position(),
 
            "Incompatible types: cannot apply this expression"
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg1.position(),
 
            &format!(
 
                "Because this expression has type '{}'",
 
                arg1_type.display_name(&ctx.heap)
 
            )
 
        ).with_postfixed_info(
 
            &ctx.module.source, arg2.position(),
 
            &format!(
 
                "But this expression has type '{}'",
 
                arg2_type.display_name(&ctx.heap)
 
            )
 
        )
 
    }
 

	
 
    fn construct_template_type_error(
 
        &self, ctx: &Ctx, expr_id: ExpressionId, template: &[InferenceTypePart]
 
    ) -> ParseError2 {
 
        let expr = &ctx.heap[expr_id];
 
        let expr_type = self.expr_types.get(&expr_id).unwrap();
 

	
 
        return ParseError2::new_error(
 
            &ctx.module.source, expr.position(),
 
            &format!(
 
                "Incompatible types: got a '{}' but expected a '{}'",
 
                expr_type.display_name(&ctx.heap), 
 
                InferenceType::partial_display_name(&ctx.heap, template)
 
            )
 
        )
 
    }
 

	
 
    /// Constructs a human interpretable error in the case that type inference
 
    /// on a polymorphic variable to a function call failed. This may only be
 
    /// caused by a pair of inference types (which may come from arguments or
 
    /// the return type) having two different inferred values for that
 
    /// polymorphic variable.
 
    ///
 
    /// So we find this pair (which may be a argument type or return type
 
    /// conflicting with itself) and construct the error using it.
 
    fn construct_poly_arg_error(
 
        &self, ctx: &Ctx, call_id: CallExpressionId
 
    ) -> ParseError2 {
 
        // Helper function to check for polymorph mismatch between two inference
 
        // types.
 
        fn has_poly_mismatch<'a>(type_a: &'a InferenceType, type_b: &'a InferenceType) -> Option<(usize, &'a [InferenceTypePart], &'a [InferenceTypePart])> {
 
            if !type_a.has_marker || !type_b.has_marker {
 
                return None
 
            }
 

	
 
            for (marker_a, section_a) in type_a.marker_iter() {
 
                for (marker_b, section_b) in type_b.marker_iter() {
 
                    if marker_a != marker_b {
 
                        // Not the same polymorphic variable
 
                        continue;
 
                    }
 

	
 
                    if !InferenceType::check_subtrees(section_a, 0, section_b, 0) {
 
                        // Not compatible
 
                        return Some((marker_a, section_a, section_b))
 
                    }
 
                }
 
            }
 

	
 
            None
 
        }
 

	
 
        // Helpers function to retrieve polyvar name and function name
 
        fn get_poly_var_and_func_name(ctx: &Ctx, poly_var_idx: usize, expr: &CallExpression) -> (String, String) {
 
            match &expr.method {
 
                Method::Create => unreachable!(),
 
                Method::Fires => (String::from('T'), String::from("fires")),
 
                Method::Get => (String::from('T'), String::from("get")),
 
                Method::Put => (String::from('T'), String::from("put")),
 
                Method::Symbolic(symbolic) => {
 
                    let definition = &ctx.heap[symbolic.definition.unwrap()];
 
                    let poly_var = match definition {
 
                        Definition::Struct(_) | Definition::Enum(_) => unreachable!(),
 
                        Definition::Function(definition) => {
 
                            String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string()
 
                        },
 
                        Definition::Component(definition) => {
 
                            String::from_utf8_lossy(&definition.poly_vars[poly_var_idx].value).to_string()
 
                        }
 
                    };
 
                    let func_name = String::from_utf8_lossy(&symbolic.identifier.value).to_string();
 
                    (poly_var, func_name)
 
                }
 
            }
 
        }
 

	
 
        // Helper function to construct initial error
 
        fn construct_main_error(ctx: &Ctx, poly_var_idx: usize, expr: &CallExpression) -> ParseError2 {
 
            let (poly_var, func_name) = get_poly_var_and_func_name(ctx, poly_var_idx, expr);
 
            return ParseError2::new_error(
 
                &ctx.module.source, expr.position(),
 
                &format!(
 
                    "Conflicting type for polymorphic variable '{}' of '{}'",
 
                    poly_var, func_name
 
                )
 
            )
 
        }
 

	
 
        // Actual checking
 
        let extra = self.extra_data.get(&call_id.upcast()).unwrap();
 
        let expr = &ctx.heap[call_id];
 

	
 
        // - check return type with itself
 
        if let Some((poly_idx, section_a, section_b)) = has_poly_mismatch(&extra.returned, &extra.returned) {
 
            return construct_main_error(ctx, poly_idx, expr)
 
                .with_postfixed_info(
 
                    &ctx.module.source, expr.position(),
 
                    &format!(
 
                        "The return type inferred the conflicting types '{}' and '{}'",
 
                        InferenceType::partial_display_name(&ctx.heap, section_a),
 
                        InferenceType::partial_display_name(&ctx.heap, section_b)
 
                    )
 
                )
 
        }
 

	
 
        // - check arguments with each other argument and with return type
 
        for (arg_a_idx, arg_a) in extra.embedded.iter().enumerate() {
 
            for (arg_b_idx, arg_b) in extra.embedded.iter().enumerate() {
 
                if arg_b_idx > arg_a_idx {
 
                    break;
 
                }
 

	
 
                if let Some((poly_idx, section_a, section_b)) = has_poly_mismatch(&arg_a, &arg_b) {
 
                    let error = construct_main_error(ctx, poly_idx, expr);
 
                    if arg_a_idx == arg_b_idx {
 
                        // Same argument
 
                        let arg = &ctx.heap[expr.arguments[arg_a_idx]];
 
                        return error.with_postfixed_info(
 
                            &ctx.module.source, arg.position(),
 
                            &format!(
 
                                "This argument inferred the conflicting types '{}' and '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_a),
 
                                InferenceType::partial_display_name(&ctx.heap, section_b)
 
                            )
 
                        )
 
                    } else {
 
                        let arg_a = &ctx.heap[expr.arguments[arg_a_idx]];
 
                        let arg_b = &ctx.heap[expr.arguments[arg_b_idx]];
 
                        return error.with_postfixed_info(
 
                            &ctx.module.source, arg_a.position(),
 
                            &format!(
 
                                "This argument inferred it to '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_a)
 
                            )
 
                        ).with_postfixed_info(
 
                            &ctx.module.source, arg_b.position(),
 
                            &format!(
 
                                "While this argument inferred it to '{}'",
 
                                InferenceType::partial_display_name(&ctx.heap, section_b)
 
                            )
 
                        )
 
                    }
 
                }
 
            }
 

	
 
            // Check with return type
 
            if let Some((poly_idx, section_arg, section_ret)) = has_poly_mismatch(arg_a, &extra.returned) {
 
                let arg = &ctx.heap[expr.arguments[arg_a_idx]];
 
                return construct_main_error(ctx, poly_idx, expr)
 
                    .with_postfixed_info(
 
                        &ctx.module.source, arg.position(),
 
                        &format!(
 
                            "This argument inferred it to '{}'",
 
                            InferenceType::partial_display_name(&ctx.heap, section_arg)
 
                        )
 
                    )
 
                    .with_postfixed_info(
 
                        &ctx.module.source, expr.position,
 
                        &format!(
 
                            "While the return type inferred it to '{}'",
 
                            InferenceType::partial_display_name(&ctx.heap, section_ret)
 
                        )
 
                    )
 
            }
 
        }
 

	
 
        unreachable!("construct_poly_arg_error without actual error found?")
 
    }
 
}
 

	
 
#[cfg(test)]
 
mod tests {
 
    use super::*;
 
    use crate::protocol::arena::Id;
 
    use InferenceTypePart as ITP;
 
    use InferenceType as IT;
 

	
 
    #[test]
 
    fn test_single_part_inference() {
 
        // lhs argument inferred from rhs
 
        let pairs = [
 
            (ITP::NumberLike, ITP::Byte),
 
            (ITP::IntegerLike, ITP::Int),
 
            (ITP::Unknown, ITP::Message),
 
            (ITP::Unknown, ITP::String)
 
        ];
 
        for (lhs, rhs) in pairs.iter() {
 
            // Using infer-both
 
            let mut lhs_type = IT::new(false, false, vec![lhs.clone()]);
 
            let mut rhs_type = IT::new(false, true, vec![rhs.clone()]);
 
            let result = unsafe{ IT::infer_subtrees_for_both_types(
 
                &mut lhs_type, 0, &mut rhs_type, 0
 
            ) };
 
            assert_eq!(DualInferenceResult::First, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts);
 

	
 
            // Using infer-single
 
            let mut lhs_type = IT::new(false, false, vec![lhs.clone()]);
 
            let mut rhs_type = IT::new(false, true, vec![rhs.clone()]);
 
            let result = unsafe{ IT::infer_subtree_for_single_type(
 
                &mut lhs_type, 0, &rhs_type.parts, 0
 
            ) };
 
            assert_eq!(SingleInferenceResult::Modified, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts);
 
        }
 
    }
 

	
 
    #[test]
 
    fn test_multi_part_inference() {
 
        let pairs = [
 
            (vec![ITP::ArrayLike, ITP::NumberLike], vec![ITP::Slice, ITP::Byte]),
 
            (vec![ITP::Unknown], vec![ITP::Input, ITP::Array, ITP::String]),
 
            (vec![ITP::PortLike, ITP::Int], vec![ITP::Input, ITP::Int]),
 
            (vec![ITP::Unknown], vec![ITP::Output, ITP::Int]),
 
            (
 
                vec![ITP::Instance(Id::new(0), 2), ITP::Input, ITP::Unknown, ITP::Output, ITP::Unknown],
 
                vec![ITP::Instance(Id::new(0), 2), ITP::Input, ITP::Array, ITP::Int, ITP::Output, ITP::Int]
 
            )
 
        ];
 

	
 
        for (lhs, rhs) in pairs.iter() {
 
            let mut lhs_type = IT::new(false, false, lhs.clone());
 
            let mut rhs_type = IT::new(false, false, rhs.clone());
 
            let result = unsafe{ IT::infer_subtrees_for_both_types(
 
                &mut lhs_type, 0, &mut rhs_type, 0
 
            ) };
 
            assert_eq!(DualInferenceResult::First, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts);
 

	
 
            let mut lhs_type = IT::new(false, false, lhs.clone());
 
            let mut rhs_type = IT::new(false, false, rhs.clone());
 
            let result = unsafe{ IT::infer_subtree_for_single_type(
 
                &mut lhs_type, 0, &rhs_type.parts, 0
 
            ) };
 
            assert_eq!(SingleInferenceResult::Modified, result);
 
            assert_eq!(lhs_type.parts, rhs_type.parts)
 
        }
 
    }
 
}
 
\ No newline at end of file
src/protocol/parser/type_table.rs
Show inline comments
 
/**
 
TypeTable
 

	
 
Contains the type table: a datastructure that, when compilation succeeds,
 
contains a concrete type definition for each AST type definition. In general
 
terms the type table will go through the following phases during the compilation
 
process:
 

	
 
    1. The base type definitions are resolved after the parser phase has
 
        finished. This implies that the AST is fully constructed, but not yet
 
        annotated.
 
    2. With the base type definitions resolved, the validation/linker phase will
 
        use the type table (together with the symbol table) to disambiguate
 
        terms (e.g. does an expression refer to a variable, an enum, a constant,
 
        etc.)
 
    3. During the type checking/inference phase the type table is used to ensure
 
        that the AST contains valid use of types in expressions and statements.
 
        At the same time type inference will find concrete instantiations of
 
        polymorphic types, these will be stored in the type table as monomorphed
 
        instantiations of a generic type.
 
    4. After type checking and inference (and possibly when constructing byte
 
        code) the type table will construct a type graph and solidify each
 
        non-polymorphic type and monomorphed instantiations of polymorphic types
 
        into concrete types.
 

	
 
So a base type is defined by its (optionally polymorphic) representation in the
 
AST. A concrete type has concrete types for each of the polymorphic arguments. A
 
struct, enum or union may have polymorphic arguments but not actually be a
 
polymorphic type. This happens when the polymorphic arguments are not used in
 
the type definition itself. Similarly for functions/components: but here we just
 
check the arguments/return type of the signature.
 

	
 
Apart from base types and concrete types, we also use the term "embedded type"
 
for types that are embedded within another type, such as a type of a struct
 
struct field or of a union variant. Embedded types may themselves have
 
polymorphic arguments and therefore form an embedded type tree.
 

	
 
NOTE: for now a polymorphic definition of a function/component is illegal if the
 
    polymorphic arguments are not used in the arguments/return type. It should
 
    be legal, but we disallow it for now.
 

	
 
TODO: Allow potentially cyclic datatypes and reject truly cyclic datatypes.
 
TODO: Allow for the full potential of polymorphism
 
TODO: Detect "true" polymorphism: for datatypes like structs/enum/unions this
 
    is simple. For functions we need to check the entire body. Do it here? Or
 
    do it somewhere else?
 
TODO: Do we want to check fn argument collision here, or in validation phase?
 
TODO: Make type table an on-demand thing instead of constructing all base types.
 
TODO: Cleanup everything, feels like a lot can be written cleaner and with less
 
    assumptions on each function call.
 
// TODO: Review all comments
 
*/
 

	
 
use std::fmt::{Formatter, Result as FmtResult};
 
use std::collections::{HashMap, VecDeque};
 

	
 
use crate::protocol::ast::*;
 
use crate::protocol::parser::symbol_table::{SymbolTable, Symbol};
 
use crate::protocol::inputsource::*;
 
use crate::protocol::parser::*;
 

	
 
//------------------------------------------------------------------------------
 
// Defined Types
 
//------------------------------------------------------------------------------
 

	
 
#[derive(Copy, Clone, PartialEq, Eq)]
 
pub enum TypeClass {
 
    Enum,
 
    Union,
 
    Struct,
 
    Function,
 
    Component
 
}
 

	
 
impl TypeClass {
 
    pub(crate) fn display_name(&self) -> &'static str {
 
        match self {
 
            TypeClass::Enum => "enum",
 
            TypeClass::Union => "enum",
 
            TypeClass::Struct => "struct",
 
            TypeClass::Function => "function",
 
            TypeClass::Component => "component",
 
        }
 
    }
 

	
 
    pub(crate) fn is_data_type(&self) -> bool {
 
        *self == TypeClass::Enum || *self == TypeClass::Union || *self == TypeClass::Struct
 
    }
 

	
 
    pub(crate) fn is_proc_type(&self) -> bool {
 
        *self == TypeClass::Function || *self == TypeClass::Component
 
    }
 
}
 

	
 
impl std::fmt::Display for TypeClass {
 
    fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
 
        write!(f, "{}", self.display_name())
 
    }
 
}
 

	
 
/// Struct wrapping around a potentially polymorphic type. If the type does not
 
/// have any polymorphic arguments then it will not have any monomorphs and
 
/// `is_polymorph` will be set to `false`. A type with polymorphic arguments
 
/// only has `is_polymorph` set to `true` if the polymorphic arguments actually
 
/// appear in the types associated types (function return argument, struct
 
/// field, enum variant, etc.). Otherwise the polymorphic argument is just a
 
/// marker and does not influence the bytesize of the type.
 
pub struct DefinedType {
 
    pub(crate) ast_root: RootId,
 
    pub(crate) ast_definition: DefinitionId,
 
    pub(crate) definition: DefinedTypeVariant,
 
    pub(crate) poly_args: Vec<PolyArg>,
 
    pub(crate) is_polymorph: bool,
 
    pub(crate) is_pointerlike: bool,
 
    pub(crate) monomorphs: Vec<u32>, // TODO: ?
 
    // TODO: @optimize
 
    pub(crate) monomorphs: Vec<Vec<ConcreteType>>,
 
}
 

	
 
pub enum DefinedTypeVariant {
 
    Enum(EnumType),
 
    Union(UnionType),
 
    Struct(StructType),
 
    Function(FunctionType),
 
    Component(ComponentType)
 
}
 

	
 
pub struct PolyArg {
 
    identifier: Identifier,
 
    /// Whether the polymorphic argument is used directly in the definition of
 
    /// the type (not including bodies of function/component types)
 
    is_in_use: bool,
 
}
 

	
 
impl DefinedTypeVariant {
 
    pub(crate) fn type_class(&self) -> TypeClass {
 
        match self {
 
            DefinedTypeVariant::Enum(_) => TypeClass::Enum,
 
            DefinedTypeVariant::Union(_) => TypeClass::Union,
 
            DefinedTypeVariant::Struct(_) => TypeClass::Struct,
 
            DefinedTypeVariant::Function(_) => TypeClass::Function,
 
            DefinedTypeVariant::Component(_) => TypeClass::Component
 
        }
 
    }
 
}
 

	
 
/// `EnumType` is the classical C/C++ enum type. It has various variants with
 
/// an assigned integer value. The integer values may be user-defined,
 
/// compiler-defined, or a mix of the two. If a user assigns the same enum
 
/// value multiple times, we assume the user is an expert and we consider both
 
/// variants to be equal to one another.
 
pub struct EnumType {
 
    variants: Vec<EnumVariant>,
 
    representation: PrimitiveType,
 
}
 

	
 
// TODO: Also support maximum u64 value
 
pub struct EnumVariant {
 
    identifier: Identifier,
 
    value: i64,
 
}
 

	
 
/// `UnionType` is the algebraic datatype (or sum type, or discriminated union).
 
/// A value is an element of the union, identified by its tag, and may contain
 
/// a single subtype.
 
pub struct UnionType {
 
    variants: Vec<UnionVariant>,
 
    tag_representation: PrimitiveType
 
}
 

	
 
pub struct UnionVariant {
 
    identifier: Identifier,
 
    parser_type: Option<ParserTypeId>,
 
    tag_value: i64,
 
}
 

	
 
pub struct StructType {
 
    fields: Vec<StructField>,
 
}
 

	
 
pub struct StructField {
 
    identifier: Identifier,
 
    parser_type: ParserTypeId,
 
}
 

	
 
pub struct FunctionType {
 
    pub return_type: ParserTypeId,
 
    pub arguments: Vec<FunctionArgument>
 
}
 

	
 
pub struct ComponentType {
 
    variant: ComponentVariant,
 
    arguments: Vec<FunctionArgument>
 
}
 

	
 
pub struct FunctionArgument {
 
    identifier: Identifier,
 
    parser_type: ParserTypeId,
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Type table
 
//------------------------------------------------------------------------------
 

	
 
// TODO: @cleanup Do I really need this, doesn't make the code that much cleaner
 
struct TypeIterator {
 
    breadcrumbs: Vec<(RootId, DefinitionId)>
 
}
 

	
 
impl TypeIterator {
 
    fn new() -> Self {
 
        Self{ breadcrumbs: Vec::with_capacity(32) }
 
    }
 

	
 
    fn reset(&mut self, root_id: RootId, definition_id: DefinitionId) {
 
        self.breadcrumbs.clear();
 
        self.breadcrumbs.push((root_id, definition_id))
 
    }
 

	
 
    fn push(&mut self, root_id: RootId, definition_id: DefinitionId) {
 
        self.breadcrumbs.push((root_id, definition_id));
 
    }
 

	
 
    fn contains(&self, root_id: RootId, definition_id: DefinitionId) -> bool {
 
        for (stored_root_id, stored_definition_id) in self.breadcrumbs.iter() {
 
            if *stored_root_id == root_id && *stored_definition_id == definition_id { return true; }
 
        }
 

	
 
        return false
 
    }
 

	
 
    fn top(&self) -> Option<(RootId, DefinitionId)> {
 
        self.breadcrumbs.last().map(|(r, d)| (*r, *d))
 
    }
 

	
 
    fn pop(&mut self) {
 
        debug_assert!(!self.breadcrumbs.is_empty());
 
        self.breadcrumbs.pop();
 
    }
 
}
 

	
 
#[derive(Copy, Clone)]
 
pub(crate) enum ConcreteTypeVariant {
 
    // No subtypes
 
    Message,
 
    Bool,
 
    Byte,
 
    Short,
 
    Int,
 
    Long,
 
    String,
 
    // One subtype
 
    Array,
 
    Slice,
 
    Input,
 
    Output,
 
    // Multiple subtypes (definition of thing and number of poly args)
 
    Instance(DefinitionId, usize)
 
}
 

	
 
pub(crate) struct ConcreteType {
 
    // serialized version (interpret as serialized depth-first tree, with
 
    // variant indicating the number of children (subtypes))
 
    pub(crate) v: Vec<ConcreteTypeVariant>
 
}
 

	
 
/// Result from attempting to resolve a `ParserType` using the symbol table and
 
/// the type table.
 
enum ResolveResult {
 
    /// ParserType is a builtin type
 
    BuiltIn,
 
    /// ParserType points to a polymorphic argument, contains the index of the
 
    /// polymorphic argument in the outermost definition (e.g. we may have 
 
    /// structs nested three levels deep, but in the innermost struct we can 
 
    /// only use the polyargs that are specified in the type definition of the
 
    /// outermost struct).
 
    PolyArg(usize),
 
    /// ParserType points to a user-defined type that is already resolved in the
 
    /// type table.
 
    Resolved((RootId, DefinitionId)),
 
    /// ParserType points to a user-defined type that is not yet resolved into
 
    /// the type table.
 
    Unresolved((RootId, DefinitionId))
 
}
 

	
 
pub(crate) struct TypeTable {
 
    /// Lookup from AST DefinitionId to a defined type. Considering possible
 
    /// polymorphs is done inside the `DefinedType` struct.
 
    lookup: HashMap<DefinitionId, DefinedType>,
 
    /// Iterator over `(module, definition)` tuples used as workspace to make sure
 
    /// that each base definition of all a type's subtypes are resolved.
 
    iter: TypeIterator,
 
    /// Iterator over `parser type`s during the process where `parser types` are
 
    /// resolved into a `(module, definition)` tuple.
 
    parser_type_iter: VecDeque<ParserTypeId>,
 
}
 

	
 
pub(crate) struct TypeCtx<'a> {
 
    symbols: &'a SymbolTable,
 
    heap: &'a mut Heap,
 
    modules: &'a [LexedModule]
 
}
 

	
 
impl<'a> TypeCtx<'a> {
 
    pub(crate) fn new(symbols: &'a SymbolTable, heap: &'a mut Heap, modules: &'a [LexedModule]) -> Self {
 
        Self{ symbols, heap, modules }
 
    }
 
}
 

	
 
impl TypeTable {
 
    /// Construct a new type table without any resolved types. Types will be
 
    /// resolved on-demand.
 
    pub(crate) fn new(ctx: &mut TypeCtx) -> Result<Self, ParseError2> {
 
        // Make sure we're allowed to cast root_id to index into ctx.modules
 
        if cfg!(debug_assertions) {
 
            for (index, module) in ctx.modules.iter().enumerate() {
 
                debug_assert_eq!(index, module.root_id.index as usize);
 
            }
 
        }
 

	
 
        // Use context to guess hashmap size
 
        let reserve_size = ctx.heap.definitions.len();
 
        let mut table = Self{
 
            lookup: HashMap::with_capacity(reserve_size),
 
            iter: TypeIterator::new(),
 
            parser_type_iter: VecDeque::with_capacity(64),
 
        };
 

	
 
        // TODO: @cleanup Rework this hack
 
        for root_idx in 0..ctx.modules.len() {
 
            let last_definition_idx = ctx.heap[ctx.modules[root_idx].root_id].definitions.len();
 
            for definition_idx in 0..last_definition_idx {
 
                let definition_id = ctx.heap[ctx.modules[root_idx].root_id].definitions[definition_idx];
 
                table.resolve_base_definition(ctx, definition_id)?;
 
            }
 
        }
 

	
 
        debug_assert_eq!(table.lookup.len(), reserve_size, "mismatch in reserved size of type table");
 

	
 
        Ok(table)
 
    }
 

	
 
    /// Retrieves base definition from type table. We must be able to retrieve
 
    /// it as we resolve all base types upon type table construction (for now).
 
    /// However, in the future we might do on-demand type resolving, so return
 
    /// an option anyway
 
    pub(crate) fn get_base_definition(&self, definition_id: &DefinitionId) -> Option<&DefinedType> {
 
        self.lookup.get(&definition_id)
 
    }
 

	
 
    /// Instantiates a monomorph for a given base definition.
 
    pub(crate) fn instantiate_monomorph(&mut self, definition_id: &DefinitionId, monomorph: &Vec<ConcreteType>) {
 
        debug_assert!(
 
            self.lookup.contains_key(definition_id),
 
            "attempting to instantiate monomorph of definition unknown to type table"
 
        );
 
        let definition = self.lookup.get_mut(definition_id).unwrap();
 
        debug_assert_eq!(
 
            monomorph.len(), definition.poly_args.len(),
 
            "attempting to instantiate monomorph with {} types, but definition requires {}",
 
            monomorph.len(), definition.poly_args.len()
 
        );
 

	
 
        definition.monomorphs.push(monomorph.clone())
 
    }
 

	
 
    /// This function will resolve just the basic definition of the type, it
 
    /// will not handle any of the monomorphized instances of the type.
 
    fn resolve_base_definition<'a>(&'a mut self, ctx: &mut TypeCtx, definition_id: DefinitionId) -> Result<(), ParseError2> {
 
        // Check if we have already resolved the base definition
 
        if self.lookup.contains_key(&definition_id) { return Ok(()); }
 

	
 
        let root_id = Self::find_root_id(ctx, definition_id);
 
        self.iter.reset(root_id, definition_id);
 

	
 
        while let Some((root_id, definition_id)) = self.iter.top() {
 
            // We have a type to resolve
 
            let definition = &ctx.heap[definition_id];
 

	
 
            let can_pop_breadcrumb = match definition {
 
                // TODO: @cleanup Borrow rules hax
 
                Definition::Enum(_) => self.resolve_base_enum_definition(ctx, root_id, definition_id),
 
                Definition::Struct(_) => self.resolve_base_struct_definition(ctx, root_id, definition_id),
 
                Definition::Component(_) => self.resolve_base_component_definition(ctx, root_id, definition_id),
 
                Definition::Function(_) => self.resolve_base_function_definition(ctx, root_id, definition_id),
 
            }?;
 

	
 
            // Otherwise: `ingest_resolve_result` has pushed a new breadcrumb
 
            // that we must follow before we can resolve the current type
 
            if can_pop_breadcrumb {
 
                self.iter.pop();
 
            }
 
        }
 

	
 
        // We must have resolved the type
 
        debug_assert!(self.lookup.contains_key(&definition_id), "base type not resolved");
 
        Ok(())
 
    }
 

	
 
    /// Resolve the basic enum definition to an entry in the type table. It will
 
    /// not instantiate any monomorphized instances of polymorphic enum
 
    /// definitions. If a subtype has to be resolved first then this function
 
    /// will return `false` after calling `ingest_resolve_result`.
 
    fn resolve_base_enum_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
        debug_assert!(ctx.heap[definition_id].is_enum());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base enum already resolved");
 
        
 
        let definition = ctx.heap[definition_id].as_enum();
 

	
 
        // Check if the enum should be implemented as a classic enumeration or
 
        // a tagged union. Keep track of variant index for error messages. Make
 
        // sure all embedded types are resolved.
 
        let mut first_tag_value = None;
 
        let mut first_int_value = None;
 
        for variant in &definition.variants {
 
            match &variant.value {
 
                EnumVariantValue::None => {},
 
                EnumVariantValue::Integer(_) => if first_int_value.is_none() {
 
                    first_int_value = Some(variant.position);
 
                },
 
                EnumVariantValue::Type(variant_type_id) => {
 
                    if first_tag_value.is_none() {
 
                        first_tag_value = Some(variant.position);
 
                    }
 

	
 
                    // Check if the embedded type needs to be resolved
 
                    let resolve_result = self.resolve_base_parser_type(ctx, &definition.poly_vars, root_id, *variant_type_id)?;
 
                    if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                        return Ok(false)
 
                    }
 
                }
 
            }
 
        }
 

	
 
        if first_tag_value.is_some() && first_int_value.is_some() {
 
            // Not illegal, but useless and probably a programmer mistake
 
            let module_source = &ctx.modules[root_id.index as usize].source;
 
            let tag_pos = first_tag_value.unwrap();
 
            let int_pos = first_int_value.unwrap();
 
            return Err(
 
                ParseError2::new_error(
 
                    module_source, definition.position,
 
                    "Illegal combination of enum integer variant(s) and enum union variant(s)"
 
                )
 
                    .with_postfixed_info(module_source, int_pos, "Assigning an integer value here")
 
                    .with_postfixed_info(module_source, tag_pos, "Embedding a type in a union variant here")
 
            );
 
        }
 

	
 
        // Enumeration is legal
 
        if first_tag_value.is_some() {
 
            // Implement as a tagged union
 

	
 
            // Determine the union variants
 
            let mut tag_value = -1;
 
            let mut variants = Vec::with_capacity(definition.variants.len());
 
            for variant in &definition.variants {
 
                tag_value += 1;
 
                let parser_type = match &variant.value {
 
                    EnumVariantValue::None => {
 
                        None
 
                    },
 
                    EnumVariantValue::Type(parser_type_id) => {
 
                        // Type should be resolvable, we checked this above
 
                        Some(*parser_type_id)
 
                    },
 
                    EnumVariantValue::Integer(_) => {
 
                        debug_assert!(false, "Encountered `Integer` variant after asserting enum is a discriminated union");
 
                        unreachable!();
 
                    }
 
                };
 

	
 
                variants.push(UnionVariant{
 
                    identifier: variant.identifier.clone(),
 
                    parser_type,
 
                    tag_value,
 
                })
 
            }
 

	
 
            // Ensure union names and polymorphic args do not conflict
 
            self.check_identifier_collision(
 
                ctx, root_id, &variants, |variant| &variant.identifier, "enum variant"
 
            )?;
 
            self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 

	
 
            let mut poly_args = self.create_initial_poly_args(&definition.poly_vars);
 
            for variant in &variants {
 
                if let Some(embedded) = variant.parser_type {
 
                    self.check_and_resolve_embedded_type_and_modify_poly_args(ctx, definition_id, &mut poly_args, root_id, embedded)?;
 
                }
 
            }
 
            let is_polymorph = poly_args.iter().any(|arg| arg.is_in_use);
 

	
 
            // Insert base definition in type table
 
            self.lookup.insert(definition_id, DefinedType {
 
                ast_root: root_id,
 
                ast_definition: definition_id,
 
                definition: DefinedTypeVariant::Union(UnionType{
 
                    variants,
 
                    tag_representation: Self::enum_tag_type(-1, tag_value),
 
                }),
 
                poly_args,
 
                is_polymorph,
 
                is_pointerlike: false, // TODO: @cyclic_types
 
                monomorphs: Vec::new()
 
            });
 
        } else {
 
            // Implement as a regular enum
 
            let mut enum_value = -1;
 
            let mut min_enum_value = 0;
 
            let mut max_enum_value = 0;
 
            let mut variants = Vec::with_capacity(definition.variants.len());
 
            for variant in &definition.variants {
 
                enum_value += 1;
 
                match &variant.value {
 
                    EnumVariantValue::None => {
 
                        variants.push(EnumVariant{
 
                            identifier: variant.identifier.clone(),
 
                            value: enum_value,
 
                        });
 
                    },
 
                    EnumVariantValue::Integer(override_value) => {
 
                        enum_value = *override_value;
 
                        variants.push(EnumVariant{
 
                            identifier: variant.identifier.clone(),
 
                            value: enum_value,
 
                        });
 
                    },
 
                    EnumVariantValue::Type(_) => {
 
                        debug_assert!(false, "Encountered `Type` variant after asserting enum is not a discriminated union");
 
                        unreachable!();
 
                    }
 
                }
 
                if enum_value < min_enum_value { min_enum_value = enum_value; }
 
                else if enum_value > max_enum_value { max_enum_value = enum_value; }
 
            }
 

	
 
            // Ensure enum names and polymorphic args do not conflict
 
            self.check_identifier_collision(
 
                ctx, root_id, &variants, |variant| &variant.identifier, "enum variant"
 
            )?;
 
            self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 

	
 
            // Note: although we cannot have embedded type dependent on the
 
            // polymorphic variables, they might still be present as tokens
 
            let definition_id = definition.this.upcast();
 
            self.lookup.insert(definition_id, DefinedType {
 
                ast_root: root_id,
 
                ast_definition: definition_id,
 
                definition: DefinedTypeVariant::Enum(EnumType{
 
                    variants,
 
                    representation: Self::enum_tag_type(min_enum_value, max_enum_value)
 
                }),
 
                poly_args: self.create_initial_poly_args(&definition.poly_vars),
 
                is_polymorph: false,
 
                is_pointerlike: false,
 
                monomorphs: Vec::new()
 
            });
 
        }
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic struct definition to an entry in the type table. It
 
    /// will not instantiate any monomorphized instances of polymorphic struct
 
    /// definitions.
 
    fn resolve_base_struct_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
        debug_assert!(ctx.heap[definition_id].is_struct());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base struct already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_struct();
 

	
 
        // Make sure all fields point to resolvable types
 
        for field_definition in &definition.fields {
 
            let resolve_result = self.resolve_base_parser_type(ctx, &definition.poly_vars, root_id, field_definition.parser_type)?;
 
            if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                return Ok(false)
 
            }
 
        }
 

	
 
        // All fields types are resolved, construct base type
 
        let mut fields = Vec::with_capacity(definition.fields.len());
 
        for field_definition in &definition.fields {
 
            fields.push(StructField{
 
                identifier: field_definition.field.clone(),
 
                parser_type: field_definition.parser_type,
 
            })
 
        }
 

	
 
        // And make sure no conflicts exist in field names and/or polymorphic args
 
        self.check_identifier_collision(
 
            ctx, root_id, &fields, |field| &field.identifier, "struct field"
 
        )?;
 
        self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 

	
 
        // Construct representation of polymorphic arguments
 
        let mut poly_args = self.create_initial_poly_args(&definition.poly_vars);
 
        for field in &fields {
 
            self.check_and_resolve_embedded_type_and_modify_poly_args(ctx, definition_id, &mut poly_args, root_id, field.parser_type)?;
 
        }
 

	
 
        let is_polymorph = poly_args.iter().any(|arg| arg.is_in_use);
 

	
 
        self.lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Struct(StructType{
 
                fields,
 
            }),
 
            poly_args,
 
            is_polymorph,
 
            is_pointerlike: false, // TODO: @cyclic
 
            monomorphs: Vec::new(),
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic function definition to an entry in the type table. It
 
    /// will not instantiate any monomorphized instances of polymorphic function
 
    /// definitions.
 
    fn resolve_base_function_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
        debug_assert!(ctx.heap[definition_id].is_function());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base function already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_function();
 
        let return_type = definition.return_type;
 

	
 
        // Check the return type
 
        let resolve_result = self.resolve_base_parser_type(
 
            ctx, &definition.poly_vars, root_id, definition.return_type
 
        )?;
 
        if !self.ingest_resolve_result(ctx, resolve_result)? {
 
            return Ok(false)
 
        }
 

	
 
        // Check the argument types
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            let resolve_result = self.resolve_base_parser_type(
 
                ctx, &definition.poly_vars, root_id, param.parser_type
 
            )?;
 
            if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                return Ok(false)
 
            }
 
        }
 

	
 
        // Construct arguments to function
 
        let mut arguments = Vec::with_capacity(definition.parameters.len());
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            arguments.push(FunctionArgument{
 
                identifier: param.identifier.clone(),
 
                parser_type: param.parser_type,
 
            })
 
        }
 

	
 
        // Check conflict of argument and polyarg identifiers
 
        self.check_identifier_collision(
 
            ctx, root_id, &arguments, |arg| &arg.identifier, "function argument"
 
        )?;
 
        self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 

	
 
        // Construct polymorphic arguments
 
        let mut poly_args = self.create_initial_poly_args(&definition.poly_vars);
 
        let return_type_id = definition.return_type;
 
        self.check_and_resolve_embedded_type_and_modify_poly_args(ctx, definition_id, &mut poly_args, root_id, return_type_id)?;
 
        for argument in &arguments {
 
            self.check_and_resolve_embedded_type_and_modify_poly_args(ctx, definition_id, &mut poly_args, root_id, argument.parser_type)?;
 
        }
 

	
 
        let is_polymorph = poly_args.iter().any(|arg| arg.is_in_use);
 

	
 
        // Construct entry in type table
 
        self.lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Function(FunctionType{
 
                return_type,
 
                arguments,
 
            }),
 
            poly_args,
 
            is_polymorph,
 
            is_pointerlike: false, // TODO: @cyclic
 
            monomorphs: Vec::new(),
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Resolves the basic component definition to an entry in the type table.
 
    /// It will not instantiate any monomorphized instancees of polymorphic
 
    /// component definitions.
 
    fn resolve_base_component_definition(&mut self, ctx: &mut TypeCtx, root_id: RootId, definition_id: DefinitionId) -> Result<bool, ParseError2> {
 
        debug_assert!(ctx.heap[definition_id].is_component());
 
        debug_assert!(!self.lookup.contains_key(&definition_id), "base component already resolved");
 

	
 
        let definition = ctx.heap[definition_id].as_component();
 
        let component_variant = definition.variant;
 

	
 
        // Check argument types
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            let resolve_result = self.resolve_base_parser_type(
 
                ctx, &definition.poly_vars, root_id, param.parser_type
 
            )?;
 
            if !self.ingest_resolve_result(ctx, resolve_result)? {
 
                return Ok(false)
 
            }
 
        }
 

	
 
        // Construct argument types
 
        let mut arguments = Vec::with_capacity(definition.parameters.len());
 
        for param_id in &definition.parameters {
 
            let param = &ctx.heap[*param_id];
 
            arguments.push(FunctionArgument{
 
                identifier: param.identifier.clone(),
 
                parser_type: param.parser_type
 
            })
 
        }
 

	
 
        // Check conflict of argument and polyarg identifiers
 
        self.check_identifier_collision(
 
            ctx, root_id, &arguments, |arg| &arg.identifier, "component argument"
 
        )?;
 
        self.check_poly_args_collision(ctx, root_id, &definition.poly_vars)?;
 

	
 
        // Construct polymorphic arguments
 
        let mut poly_args = self.create_initial_poly_args(&definition.poly_vars);
 
        for argument in &arguments {
 
            self.check_and_resolve_embedded_type_and_modify_poly_args(ctx, definition_id, &mut poly_args, root_id, argument.parser_type)?;
 
        }
 

	
 
        let is_polymorph = poly_args.iter().any(|v| v.is_in_use);
 

	
 
        // Construct entry in type table
 
        self.lookup.insert(definition_id, DefinedType{
 
            ast_root: root_id,
 
            ast_definition: definition_id,
 
            definition: DefinedTypeVariant::Component(ComponentType{
 
                variant: component_variant,
 
                arguments,
 
            }),
 
            poly_args,
 
            is_polymorph,
 
            is_pointerlike: false, // TODO: @cyclic
 
            monomorphs: Vec::new(),
 
        });
 

	
 
        Ok(true)
 
    }
 

	
 
    /// Takes a ResolveResult and returns `true` if the caller can happily
 
    /// continue resolving its current type, or `false` if the caller must break
 
    /// resolving the current type and exit to the outer resolving loop. In the
 
    /// latter case the `result` value was `ResolveResult::Unresolved`, implying
 
    /// that the type must be resolved first.
 
    fn ingest_resolve_result(&mut self, ctx: &TypeCtx, result: ResolveResult) -> Result<bool, ParseError2> {
 
        match result {
 
            ResolveResult::BuiltIn | ResolveResult::PolyArg(_) => Ok(true),
 
            ResolveResult::Resolved(_) => Ok(true),
 
            ResolveResult::Unresolved((root_id, definition_id)) => {
 
                if self.iter.contains(root_id, definition_id) {
 
                    // Cyclic dependency encountered
 
                    // TODO: Allow this
 
                    let mut error = ParseError2::new_error(
 
                        &ctx.modules[root_id.index as usize].source, ctx.heap[definition_id].position(),
 
                        "Evaluating this type definition results in a cyclic type"
 
                    );
 

	
 
                    for (breadcrumb_idx, (root_id, definition_id)) in self.iter.breadcrumbs.iter().enumerate() {
 
                        let msg = if breadcrumb_idx == 0 {
 
                            "The cycle started with this definition"
 
                        } else {
 
                            "Which depends on this definition"
 
                        };
 

	
 
                        error = error.with_postfixed_info(
 
                            &ctx.modules[root_id.index as usize].source,
 
                            ctx.heap[*definition_id].position(), msg
 
                        );
 
                    }
 

	
 
                    Err(error)
 
                } else {
 
                    // Type is not yet resolved, so push IDs on iterator and
 
                    // continue the resolving loop
 
                    self.iter.push(root_id, definition_id);
 
                    Ok(false)
 
                }
 
            }
 
        }
 
    }
 

	
 
    /// Each type definition may consist of several embedded subtypes. This
 
    /// function checks whether that embedded type is a builtin, a direct
 
    /// reference to a polymorphic argument, or an (un)resolved type definition.
 
    /// If the embedded type's symbol cannot be found then this function returns
 
    /// an error.
 
    ///
 
    /// If the embedded type is resolved, then one always receives the type's
 
    /// (module, definition) tuple. If any of the types in the embedded type's
 
    /// tree is not yet resolved, then one may receive a (module, definition)
 
    /// tuple that does not correspond to the `parser_type_id` passed into this
 
    /// function.
 
    fn resolve_base_parser_type(&mut self, ctx: &TypeCtx, poly_vars: &Vec<Identifier>, root_id: RootId, parser_type_id: ParserTypeId) -> Result<ResolveResult, ParseError2> {
 
        use ParserTypeVariant as PTV;
 

	
 
        // Prepping iterator
 
        self.parser_type_iter.clear();
 
        self.parser_type_iter.push_back(parser_type_id);
 

	
 
        // Result for the very first time we resolve a
 
        let mut resolve_result = None;
 
        let mut set_resolve_result = |v: ResolveResult| {
 
            if resolve_result.is_none() { resolve_result = Some(v); }
 
        };
 

	
 
        'resolve_loop: while let Some(parser_type_id) = self.parser_type_iter.pop_back() {
 
            let parser_type = &ctx.heap[parser_type_id];
 

	
 
            match &parser_type.variant {
 
                // Builtin types. An array is a builtin as it is implemented as a
 
                // couple of pointers, so we do not require the subtype to be fully
 
                // resolved. Similar for input/output ports.
 
                PTV::Array(_) | PTV::Input(_) | PTV::Output(_) | PTV::Message |
 
                PTV::Bool | PTV::Byte | PTV::Short | PTV::Int | PTV::Long |
 
                PTV::String => {
 
                    set_resolve_result(ResolveResult::BuiltIn);
 
                },
 
                // IntegerLiteral types and the inferred marker are not allowed in
 
                // definitions of types
 
                PTV::IntegerLiteral |
 
                PTV::Inferred => {
 
                    debug_assert!(false, "Encountered illegal ParserTypeVariant within type definition");
 
                    unreachable!();
 
                },
 
                // Symbolic type, make sure its base type, and the base types
 
                // of all members of the embedded type tree are resolved. We
 
                // don't care about monomorphs yet.
 
                PTV::Symbolic(symbolic) => {
 
                    // Check if the symbolic type is one of the definition's
 
                    // polymorphic arguments. If so then we can halt the
 
                    // execution
 
                    for (poly_arg_idx, poly_arg) in poly_vars.iter().enumerate() {
 
                        if poly_arg.value == symbolic.identifier.value {
 
                            set_resolve_result(ResolveResult::PolyArg(poly_arg_idx));
 
                            continue 'resolve_loop;
 
                        }
 
                    }
 

	
 
                    // Lookup the definition in the symbol table
 
                    let symbol = ctx.symbols.resolve_namespaced_symbol(root_id, &symbolic.identifier);
 
                    if symbol.is_none() {
 
                        return Err(ParseError2::new_error(
 
                            &ctx.modules[root_id.index as usize].source, symbolic.identifier.position,
 
                            "Could not resolve type"
 
                        ))
 
                    }
 

	
 
                    let (symbol_value, mut ident_iter) = symbol.unwrap();
 
                    match symbol_value.symbol {
 
                        Symbol::Namespace(_) => {
 
                            // Reference to a namespace instead of a type
 
                            return if ident_iter.num_remaining() == 0 {
 
                                Err(ParseError2::new_error(
 
                                    &ctx.modules[root_id.index as usize].source, symbolic.identifier.position,
 
                                    "Expected a type, got a module name"
 
                                ))
 
                            } else {
 
                                let next_identifier = ident_iter.next().unwrap();
 
                                Err(ParseError2::new_error(
 
                                    &ctx.modules[root_id.index as usize].source, symbolic.identifier.position,
 
                                    &format!("Could not find symbol '{}' with this module", String::from_utf8_lossy(next_identifier))
 
                                ))
 
                            }
 
                        },
 
                        Symbol::Definition((root_id, definition_id)) => {
 
                            let definition = &ctx.heap[definition_id];
 
                            if ident_iter.num_remaining() > 0 {
 
                                // Namespaced identifier is longer than the type
 
                                // we found. Return the appropriate message
 
                                return if definition.is_struct() || definition.is_enum() {
 
                                    Err(ParseError2::new_error(
 
                                        &ctx.modules[root_id.index as usize].source, symbolic.identifier.position,
 
                                        &format!(
 
                                            "Unknown type '{}', did you mean to use '{}'?",
 
                                            String::from_utf8_lossy(&symbolic.identifier.value),
 
                                            String::from_utf8_lossy(&definition.identifier().value)
 
                                        )
 
                                    ))
 
                                } else {
 
                                    Err(ParseError2::new_error(
 
                                        &ctx.modules[root_id.index as usize].source, symbolic.identifier.position,
 
                                        "Unknown type"
 
                                    ))
 
                                }
 
                            }
 

	
 
                            // Found a match, make sure it is a datatype
 
                            if !(definition.is_struct() || definition.is_enum()) {
 
                                return Err(ParseError2::new_error(
 
                                    &ctx.modules[root_id.index as usize].source, symbolic.identifier.position,
 
                                    "Embedded types must be datatypes (structs or enums)"
 
                                ))
 
                            }
 

	
 
                            // Found a struct/enum definition
 
                            if !self.lookup.contains_key(&definition_id) {
 
                                // Type is not yet resoled, immediately return
 
                                // this
 
                                return Ok(ResolveResult::Unresolved((root_id, definition_id)));
 
                            }
 

	
 
                            // Type is resolved, so set as result, but continue
 
                            // iterating over the parser types in the embedded
 
                            // type's tree
 
                            set_resolve_result(ResolveResult::Resolved((root_id, definition_id)));
 

	
 
                            // Note: because we're resolving parser types, not
 
                            // embedded types, we're parsing a tree, so we can't
 
                            // get stuck in a cyclic loop.
 
                            for poly_arg_type_id in &symbolic.poly_args {
 
                                self.parser_type_iter.push_back(*poly_arg_type_id);
 
                            }
 
                        }
 
                    }
 
                }
 
            }
 
        }
src/protocol/parser/visitor_linker.rs
Show inline comments
 
@@ -28,385 +28,385 @@ impl DefinitionType {
 
    fn is_composite(&self) -> bool { if let Self::Composite(_) = self { true } else { false } }
 
    fn is_function(&self) -> bool { if let Self::Function(_) = self { true } else { false } }
 
}
 

	
 
/// This particular visitor will go through the entire AST in a recursive manner
 
/// and check if all statements and expressions are legal (e.g. no "return"
 
/// statements in component definitions), and will link certain AST nodes to
 
/// their appropriate targets (e.g. goto statements, or function calls).
 
///
 
/// This visitor will not perform control-flow analysis (e.g. making sure that
 
/// each function actually returns) and will also not perform type checking. So
 
/// the linking of function calls and component instantiations will be checked
 
/// and linked to the appropriate definitions, but the return types and/or
 
/// arguments will not be checked for validity.
 
///
 
/// The visitor visits each statement in a block in a breadth-first manner
 
/// first. We are thereby sure that we have found all variables/labels in a
 
/// particular block. In this phase nodes may queue statements for insertion
 
/// (e.g. the insertion of an `EndIf` statement for a particular `If`
 
/// statement). These will be inserted after visiting every node, after which
 
/// the visitor recurses into each statement in a block.
 
///
 
/// Because of this scheme expressions will not be visited in the breadth-first
 
/// pass.
 
pub(crate) struct ValidityAndLinkerVisitor {
 
    /// `in_sync` is `Some(id)` if the visitor is visiting the children of a
 
    /// synchronous statement. A single value is sufficient as nested
 
    /// synchronous statements are not allowed
 
    in_sync: Option<SynchronousStatementId>,
 
    /// `in_while` contains the last encountered `While` statement. This is used
 
    /// to resolve unlabeled `Continue`/`Break` statements.
 
    in_while: Option<WhileStatementId>,
 
    // Traversal state: current scope (which can be used to find the parent
 
    // scope), the definition variant we are considering, and whether the
 
    // visitor is performing breadthwise block statement traversal.
 
    cur_scope: Option<Scope>,
 
    def_type: DefinitionType,
 
    performing_breadth_pass: bool,
 
    // Parent expression (the previous stmt/expression we visited that could be
 
    // used as an expression parent)
 
    expr_parent: ExpressionParent,
 
    // Keeping track of relative position in block in the breadth-first pass.
 
    // May not correspond to block.statement[index] if any statements are
 
    // inserted after the breadth-pass
 
    relative_pos_in_block: u32,
 
    // Single buffer of statement IDs that we want to traverse in a block.
 
    // Required to work around Rust borrowing rules and to prevent constant
 
    // cloning of vectors.
 
    statement_buffer: Vec<StatementId>,
 
    // Another buffer, now with expression IDs, to prevent constant cloning of
 
    // vectors while working around rust's borrowing rules
 
    expression_buffer: Vec<ExpressionId>,
 
    // Yet another buffer, now with parser type IDs, similar to above
 
    parser_type_buffer: Vec<ParserTypeId>,
 
    // Statements to insert after the breadth pass in a single block
 
    insert_buffer: Vec<(u32, StatementId)>,
 
}
 

	
 
impl ValidityAndLinkerVisitor {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            in_sync: None,
 
            in_while: None,
 
            cur_scope: None,
 
            expr_parent: ExpressionParent::None,
 
            def_type: DefinitionType::None,
 
            performing_breadth_pass: false,
 
            relative_pos_in_block: 0,
 
            statement_buffer: Vec::with_capacity(STMT_BUFFER_INIT_CAPACITY),
 
            expression_buffer: Vec::with_capacity(EXPR_BUFFER_INIT_CAPACITY),
 
            parser_type_buffer: Vec::with_capacity(TYPE_BUFFER_INIT_CAPACITY),
 
            insert_buffer: Vec::with_capacity(32),
 
        }
 
    }
 

	
 
    fn reset_state(&mut self) {
 
        self.in_sync = None;
 
        self.in_while = None;
 
        self.cur_scope = None;
 
        self.expr_parent = ExpressionParent::None;
 
        self.def_type = DefinitionType::None;
 
        self.relative_pos_in_block = 0;
 
        self.performing_breadth_pass = false;
 
        self.statement_buffer.clear();
 
        self.expression_buffer.clear();
 
        self.parser_type_buffer.clear();
 
        self.insert_buffer.clear();
 
    }
 

	
 
    /// Debug call to ensure that we didn't make any mistakes in any of the
 
    /// employed buffers
 
    fn check_post_definition_state(&self) {
 
        debug_assert!(self.statement_buffer.is_empty());
 
        debug_assert!(self.expression_buffer.is_empty());
 
        debug_assert!(self.parser_type_buffer.is_empty());
 
        debug_assert!(self.insert_buffer.is_empty());
 
    }
 
}
 

	
 
impl Visitor2 for ValidityAndLinkerVisitor {
 
    //--------------------------------------------------------------------------
 
    // Definition visitors
 
    //--------------------------------------------------------------------------
 

	
 
    fn visit_component_definition(&mut self, ctx: &mut Ctx, id: ComponentId) -> VisitorResult {
 
        self.reset_state();
 

	
 
        self.def_type = match &ctx.heap[id].variant {
 
            ComponentVariant::Primitive => DefinitionType::Primitive(id),
 
            ComponentVariant::Composite => DefinitionType::Composite(id),
 
        };
 
        self.cur_scope = Some(Scope::Definition(id.upcast()));
 
        self.expr_parent = ExpressionParent::None;
 

	
 
        // Visit types of parameters
 
        debug_assert!(self.parser_type_buffer.is_empty());
 
        let comp_def = &ctx.heap[id];
 
        self.parser_type_buffer.extend(
 
            comp_def.parameters
 
                .iter()
 
                .map(|id| ctx.heap[*id].parser_type)
 
        );
 

	
 
        let num_types = self.parser_type_buffer.len();
 
        for idx in 0..num_types {
 
            self.visit_parser_type(ctx, self.parser_type_buffer[idx])?;
 
        }
 

	
 
        self.parser_type_buffer.clear();
 

	
 
        // Visit statements in component body
 
        let body_id = ctx.heap[id].body;
 
        self.performing_breadth_pass = true;
 
        self.visit_stmt(ctx, body_id)?;
 
        self.performing_breadth_pass = false;
 
        self.visit_stmt(ctx, body_id)?;
 

	
 
        self.check_post_definition_state();
 
        Ok(())
 
    }
 

	
 
    fn visit_function_definition(&mut self, ctx: &mut Ctx, id: FunctionId) -> VisitorResult {
 
        self.reset_state();
 

	
 
        // Set internal statement indices
 
        self.def_type = DefinitionType::Function(id);
 
        self.cur_scope = Some(Scope::Definition(id.upcast()));
 
        self.expr_parent = ExpressionParent::None;
 

	
 
        // Visit types of parameters
 
        debug_assert!(self.parser_type_buffer.is_empty());
 
        let func_def = &ctx.heap[id];
 
        self.parser_type_buffer.extend(
 
            func_def.parameters
 
                .iter()
 
                .map(|id| ctx.heap[*id].parser_type)
 
        );
 
        self.parser_type_buffer.push(func_def.return_type);
 

	
 
        let num_types = self.parser_type_buffer.len();
 
        for idx in 0..num_types {
 
            self.visit_parser_type(ctx, self.parser_type_buffer[idx])?;
 
        }
 

	
 
        self.parser_type_buffer.clear();
 

	
 
        // Visit statements in function body
 
        let body_id = ctx.heap[id].body;
 
        self.performing_breadth_pass = true;
 
        self.visit_stmt(ctx, body_id)?;
 
        self.performing_breadth_pass = false;
 
        self.visit_stmt(ctx, body_id)?;
 

	
 
        self.check_post_definition_state();
 
        Ok(())
 
    }
 

	
 
    //--------------------------------------------------------------------------
 
    // Statement visitors
 
    //--------------------------------------------------------------------------
 

	
 
    fn visit_block_stmt(&mut self, ctx: &mut Ctx, id: BlockStatementId) -> VisitorResult {
 
        self.visit_block_stmt_with_hint(ctx, id, None)
 
    }
 

	
 
    fn visit_local_memory_stmt(&mut self, ctx: &mut Ctx, id: MemoryStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let variable_id = ctx.heap[id].variable;
 
            self.checked_local_add(ctx, self.relative_pos_in_block, variable_id)?;
 
        } else {
 
            let variable_id = ctx.heap[id].variable;
 
            let parser_type_id = ctx.heap[variable_id].parser_type;
 
            self.visit_parser_type(ctx, parser_type_id);
 
            self.visit_parser_type(ctx, parser_type_id)?;
 

	
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::Memory(id);
 
            self.visit_expr(ctx, ctx.heap[id].initial)?;
 
            self.expr_parent = ExpressionParent::None;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_local_channel_stmt(&mut self, ctx: &mut Ctx, id: ChannelStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let (from_id, to_id) = {
 
                let stmt = &ctx.heap[id];
 
                (stmt.from, stmt.to)
 
            };
 
            self.checked_local_add(ctx, self.relative_pos_in_block, from_id)?;
 
            self.checked_local_add(ctx, self.relative_pos_in_block, to_id)?;
 
        } else {
 
            let chan_stmt = &ctx.heap[id];
 
            let from_type_id = ctx.heap[chan_stmt.from].parser_type;
 
            let to_type_id = ctx.heap[chan_stmt.to].parser_type;
 
            self.visit_parser_type(ctx, from_type_id)?;
 
            self.visit_parser_type(ctx, to_type_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_labeled_stmt(&mut self, ctx: &mut Ctx, id: LabeledStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            // Add label to block lookup
 
            self.checked_label_add(ctx, id)?;
 

	
 
            // Modify labeled statement itself
 
            let labeled = &mut ctx.heap[id];
 
            labeled.relative_pos_in_block = self.relative_pos_in_block;
 
            labeled.in_sync = self.in_sync.clone();
 
        }
 

	
 
        let body_id = ctx.heap[id].body;
 
        self.visit_stmt(ctx, body_id)?;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_if_stmt(&mut self, ctx: &mut Ctx, id: IfStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let position = ctx.heap[id].position;
 
            let end_if_id = ctx.heap.alloc_end_if_statement(|this| {
 
                EndIfStatement {
 
                    this,
 
                    start_if: id,
 
                    position,
 
                    next: None,
 
                }
 
            });
 
            let stmt = &mut ctx.heap[id];
 
            stmt.end_if = Some(end_if_id);
 
            self.insert_buffer.push((self.relative_pos_in_block + 1, end_if_id.upcast()));
 
        } else {
 
            // Traverse expression and bodies
 
            let (test_id, true_id, false_id) = {
 
                let stmt = &ctx.heap[id];
 
                (stmt.test, stmt.true_body, stmt.false_body)
 
            };
 

	
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::If(id);
 
            self.visit_expr(ctx, test_id)?;
 
            self.expr_parent = ExpressionParent::None;
 

	
 
            self.visit_stmt(ctx, true_id)?;
 
            self.visit_stmt(ctx, false_id)?;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_while_stmt(&mut self, ctx: &mut Ctx, id: WhileStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let position = ctx.heap[id].position;
 
            let end_while_id = ctx.heap.alloc_end_while_statement(|this| {
 
                EndWhileStatement {
 
                    this,
 
                    start_while: id,
 
                    position,
 
                    next: None,
 
                }
 
            });
 
            let stmt = &mut ctx.heap[id];
 
            stmt.end_while = Some(end_while_id);
 
            stmt.in_sync = self.in_sync.clone();
 

	
 
            self.insert_buffer.push((self.relative_pos_in_block + 1, end_while_id.upcast()));
 
        } else {
 
            let (test_id, body_id) = {
 
                let stmt = &ctx.heap[id];
 
                (stmt.test, stmt.body)
 
            };
 
            let old_while = self.in_while.replace(id);
 
            debug_assert_eq!(self.expr_parent, ExpressionParent::None);
 
            self.expr_parent = ExpressionParent::While(id);
 
            self.visit_expr(ctx, test_id)?;
 
            self.expr_parent = ExpressionParent::None;
 

	
 
            self.visit_stmt(ctx, body_id)?;
 
            self.in_while = old_while;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_break_stmt(&mut self, ctx: &mut Ctx, id: BreakStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            // Should be able to resolve break statements with a label in the
 
            // breadth pass, no need to do after resolving all labels
 
            let target_end_while = {
 
                let stmt = &ctx.heap[id];
 
                let target_while_id = self.resolve_break_or_continue_target(ctx, stmt.position, &stmt.label)?;
 
                let target_while = &ctx.heap[target_while_id];
 
                debug_assert!(target_while.end_while.is_some());
 
                target_while.end_while.unwrap()
 
            };
 

	
 
            let stmt = &mut ctx.heap[id];
 
            stmt.target = Some(target_end_while);
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_continue_stmt(&mut self, ctx: &mut Ctx, id: ContinueStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let target_while_id = {
 
                let stmt = &ctx.heap[id];
 
                self.resolve_break_or_continue_target(ctx, stmt.position, &stmt.label)?
 
            };
 

	
 
            let stmt = &mut ctx.heap[id];
 
            stmt.target = Some(target_while_id)
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_synchronous_stmt(&mut self, ctx: &mut Ctx, id: SynchronousStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            // Check for validity of synchronous statement
 
            let cur_sync_position = ctx.heap[id].position;
 
            if self.in_sync.is_some() {
 
                // Nested synchronous statement
 
                let old_sync = &ctx.heap[self.in_sync.unwrap()];
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, cur_sync_position, "Illegal nested synchronous statement")
 
                        .with_postfixed_info(&ctx.module.source, old_sync.position, "It is nested in this synchronous statement")
 
                );
 
            }
 

	
 
            if !self.def_type.is_primitive() {
 
                return Err(ParseError2::new_error(
 
                    &ctx.module.source, cur_sync_position,
 
                    "Synchronous statements may only be used in primitive components"
 
                ));
 
            }
 

	
 
            // Append SynchronousEnd pseudo-statement
 
            let sync_end_id = ctx.heap.alloc_end_synchronous_statement(|this| EndSynchronousStatement{
 
                this,
 
                position: cur_sync_position,
 
                start_sync: id,
 
                next: None,
 
            });
 
            let sync_start = &mut ctx.heap[id];
 
            sync_start.end_sync = Some(sync_end_id);
 
            self.insert_buffer.push((self.relative_pos_in_block + 1, sync_end_id.upcast()));
 
        } else {
 
            let sync_body = ctx.heap[id].body;
 
            let old = self.in_sync.replace(id);
 
            self.visit_stmt_with_hint(ctx, sync_body, Some(id))?;
 
            self.in_sync = old;
 
        }
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_return_stmt(&mut self, ctx: &mut Ctx, id: ReturnStatementId) -> VisitorResult {
 
        if self.performing_breadth_pass {
 
            let stmt = &ctx.heap[id];
 
            if !self.def_type.is_function() {
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, stmt.position, "Return statements may only appear in function bodies")
 
@@ -1334,207 +1334,207 @@ impl ValidityAndLinkerVisitor {
 
                                ParseError2::new_error(&ctx.module.source, identifier.position, "This target label skips over a variable declaration")
 
                                    .with_postfixed_info(&ctx.module.source, label.position, "Because it jumps to this label")
 
                                    .with_postfixed_info(&ctx.module.source, local.position, "Which skips over this variable")
 
                            );
 
                        }
 
                    }
 
                    return Ok(*label_id);
 
                }
 
            }
 

	
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                return Err(ParseError2::new_error(&ctx.module.source, identifier.position, "Could not find this label"));
 
            }
 

	
 
        }
 
    }
 

	
 
    /// Finds a particular symbol in the symbol table which must correspond to
 
    /// a definition of a particular type.
 
    // Note: root_id, symbols and types passed in explicitly to prevent
 
    //  borrowing errors
 
    fn find_symbol_of_type(
 
        &self, root_id: RootId, symbols: &SymbolTable, types: &TypeTable,
 
        identifier: &NamespacedIdentifier, expected_type_class: TypeClass
 
    ) -> FindOfTypeResult {
 
        // Find symbol associated with identifier
 
        let symbol = symbols.resolve_namespaced_symbol(root_id, &identifier);
 
        if symbol.is_none() { return FindOfTypeResult::NotFound; }
 

	
 
        let (symbol, iter) = symbol.unwrap();
 
        if iter.num_remaining() != 0 { return FindOfTypeResult::NotFound; }
 

	
 
        match &symbol.symbol {
 
            Symbol::Definition((_, definition_id)) => {
 
                // Make sure definition is of the expected type
 
                let definition_type = types.get_base_definition(&definition_id);
 
                debug_assert!(definition_type.is_some(), "Found symbol '{}' in symbol table, but not in type table", String::from_utf8_lossy(&identifier.value));
 
                let definition_type_class = definition_type.unwrap().definition.type_class();
 

	
 
                if definition_type_class != expected_type_class {
 
                    FindOfTypeResult::TypeMismatch(definition_type_class.display_name())
 
                } else {
 
                    FindOfTypeResult::Found(*definition_id)
 
                }
 
            },
 
            Symbol::Namespace(_) => FindOfTypeResult::TypeMismatch("namespace"),
 
        }
 
    }
 

	
 
    /// This function will check if the provided while statement ID has a block
 
    /// statement that is one of our current parents.
 
    fn has_parent_while_scope(&self, ctx: &Ctx, id: WhileStatementId) -> bool {
 
        debug_assert!(self.cur_scope.is_some());
 
        let mut scope = self.cur_scope.as_ref().unwrap();
 
        let while_stmt = &ctx.heap[id];
 
        loop {
 
            debug_assert!(scope.is_block());
 
            let block = scope.to_block();
 
            if while_stmt.body == block.upcast() {
 
                return true;
 
            }
 

	
 
            let block = &ctx.heap[block];
 
            debug_assert!(block.parent_scope.is_some(), "block scope does not have a parent");
 
            scope = block.parent_scope.as_ref().unwrap();
 
            if !scope.is_block() {
 
                return false;
 
            }
 
        }
 
    }
 

	
 
    /// This function should be called while dealing with break/continue
 
    /// statements. It will try to find the targeted while statement, using the
 
    /// target label if provided. If a valid target is found then the loop's
 
    /// ID will be returned, otherwise a parsing error is constructed.
 
    /// The provided input position should be the position of the break/continue
 
    /// statement.
 
    fn resolve_break_or_continue_target(&self, ctx: &Ctx, position: InputPosition, label: &Option<Identifier>) -> Result<WhileStatementId, ParseError2> {
 
        let target = match label {
 
            Some(label) => {
 
                let target_id = self.find_label(ctx, label)?;
 

	
 
                // Make sure break target is a while statement
 
                let target = &ctx.heap[target_id];
 
                if let Statement::While(target_stmt) = &ctx.heap[target.body] {
 
                    // Even though we have a target while statement, the break might not be
 
                    // present underneath this particular labeled while statement
 
                    if !self.has_parent_while_scope(ctx, target_stmt.this) {
 
                        ParseError2::new_error(&ctx.module.source, label.position, "Break statement is not nested under the target label's while statement")
 
                            .with_postfixed_info(&ctx.module.source, target.position, "The targeted label is found here");
 
                    }
 

	
 
                    target_stmt.this
 
                } else {
 
                    return Err(
 
                        ParseError2::new_error(&ctx.module.source, label.position, "Incorrect break target label, it must target a while loop")
 
                            .with_postfixed_info(&ctx.module.source, target.position, "The targeted label is found here")
 
                    );
 
                }
 
            },
 
            None => {
 
                // Use the enclosing while statement, the break must be
 
                // nested within that while statement
 
                if self.in_while.is_none() {
 
                    return Err(
 
                        ParseError2::new_error(&ctx.module.source, position, "Break statement is not nested under a while loop")
 
                    );
 
                }
 

	
 
                self.in_while.unwrap()
 
            }
 
        };
 

	
 
        // We have a valid target for the break statement. But we need to
 
        // make sure we will not break out of a synchronous block
 
        {
 
            let target_while = &ctx.heap[target];
 
            if target_while.in_sync != self.in_sync {
 
                // Break is nested under while statement, so can only escape a
 
                // sync block if the sync is nested inside the while statement.
 
                debug_assert!(self.in_sync.is_some());
 
                let sync_stmt = &ctx.heap[self.in_sync.unwrap()];
 
                return Err(
 
                    ParseError2::new_error(&ctx.module.source, position, "Break may not escape the surrounding synchronous block")
 
                        .with_postfixed_info(&ctx.module.source, target_while.position, "The break escapes out of this loop")
 
                        .with_postfixed_info(&ctx.module.source, sync_stmt.position, "And would therefore escape this synchronous block")
 
                );
 
            }
 
        }
 

	
 
        Ok(target)
 
    }
 

	
 
    fn visit_call_poly_args(&mut self, ctx: &mut Ctx, call_id: CallExpressionId) -> VisitorResult {
 
        let call_expr = &ctx.heap[call_id];
 

	
 
        // Determine the polyarg signature
 
        let num_expected_poly_args = match &call_expr.method {
 
            Method::Create => {
 
                0
 
            },
 
            Method::Fires => {
 
                1
 
            },
 
            Method::Get => {
 
                1
 
            },
 
            Method::Put => {
 
                1
 
            }
 
            Method::Symbolic(symbolic) => {
 
                let definition = &ctx.heap[symbolic.definition.unwrap()];
 
                if let Definition::Function(definition) = definition {
 
                    definition.poly_vars.len()
 
                } else {
 
                    debug_assert!(false, "expected function while visiting call poly args");
 
                    unreachable!();
 
                }
 
            }
 
        };
 

	
 
        // We allow zero polyargs to imply all args are inferred. Otherwise the
 
        // number of arguments must be equal
 
        if call_expr.poly_args.is_empty() {
 
            if num_expected_poly_args != 0 {
 
                // Infer all polyargs
 
                // TODO: @cleanup Not nice to use method position as implicitly
 
                //  inferred parser type pos.
 
                let pos = call_expr.position();
 
                for _ in 0..num_expected_poly_args {
 
                    self.parser_type_buffer.push(ctx.heap.alloc_parser_type(|this| ParserType {
 
                        this,
 
                        pos,
 
                        variant: ParserTypeVariant::Inferred,
 
                    }));
 
                }
 

	
 
                let call_expr = &mut ctx.heap[call_id];
 
                call_expr.poly_args.reserve(num_expected_poly_args);
 
                for _ in 0..num_expected_poly_args {
 
                    call_expr.poly_args.push(self.parser_type_buffer.pop().unwrap());
 
                }
 
            }
 
            Ok(())
 
        } else if call_expr.poly_args.len() == num_expected_poly_args {
 
            // Number of args is not 0, so parse all the specified ParserTypes
 
            let old_num_types = self.parser_type_buffer.len();
 
            self.parser_type_buffer.extend(&call_expr.poly_args);
 
            while self.parser_type_buffer.len() > old_num_types {
 
                let parser_type_id = self.parser_type_buffer.pop().unwrap();
 
                self.visit_parser_type(ctx, parser_type_id);
 
                self.visit_parser_type(ctx, parser_type_id)?;
 
            }
 
            self.parser_type_buffer.truncate(old_num_types);
 
            Ok(())
 
        } else {
 
            return Err(ParseError2::new_error(
 
                &ctx.module.source, call_expr.position,
 
                &format!(
 
                    "Expected {} polymorphic arguments (or none, to infer them), but {} were specified",
 
                    num_expected_poly_args, call_expr.poly_args.len()
 
                )
 
            ));
 
        }
 
    }
 
}
 
\ No newline at end of file
src/runtime/communication.rs
Show inline comments
 
@@ -1227,203 +1227,207 @@ impl ProtoComponentBranch {
 
    // Feed this branch received message.
 
    // It's safe to receive the same message repeatedly,
 
    // but if we receive a message with different contents,
 
    // it's a sign something has gone wrong! keys of type (port, round, predicate)
 
    // should always map to at most one message value!
 
    fn feed_msg(&mut self, getter: PortId, payload: Payload) {
 
        let e = self.inner.inbox.entry(getter);
 
        use std::collections::hash_map::Entry;
 
        match e {
 
            Entry::Vacant(ev) => {
 
                // new message
 
                ev.insert(payload);
 
            }
 
            Entry::Occupied(eo) => {
 
                // redundant recv. can happen as a result of a
 
                // component A having two branches X and Y related by
 
                assert_eq!(eo.get(), &payload);
 
            }
 
        }
 
    }
 
}
 
impl SolutionStorage {
 
    // Create a new solution storage, to manage the local solutions for
 
    // this connector and all of it's children (subtrees) in the solution tree.
 
    fn new(subtree_ids: impl Iterator<Item = SubtreeId>) -> Self {
 
        // For easy iteration, we store this SubtreeId => {Predicate}
 
        // structure instead as a pair of structures: a vector of predicate sets,
 
        // and a subtree_id-to-index lookup map
 
        let mut subtree_id_to_index: HashMap<SubtreeId, usize> = Default::default();
 
        let mut subtree_solutions = vec![];
 
        for id in subtree_ids {
 
            subtree_id_to_index.insert(id, subtree_solutions.len());
 
            subtree_solutions.push(Default::default())
 
        }
 
        // new_local U old_local represents the solutions of this connector itself:
 
        // namely, those that can be created from the union of one element from each child's solution set.
 
        // The difference between new and old is that new stores those NOT YET sent over the network
 
        // to this connector's parent in the solution tree.
 
        // invariant: old_local and new_local have an empty intersection
 
        Self {
 
            subtree_solutions,
 
            subtree_id_to_index,
 
            old_local: Default::default(),
 
            new_local: Default::default(),
 
        }
 
    }
 
    // drain old_local to new_local, visiting all new additions to old_local
 
    pub(crate) fn iter_new_local_make_old(&mut self) -> impl Iterator<Item = Predicate> + '_ {
 
        let Self { old_local, new_local, .. } = self;
 
        new_local.drain().map(move |local| {
 
            // rely on invariant: empty intersection between old and new local sets
 
            assert!(old_local.insert(local.clone()));
 
            local
 
        })
 
    }
 
    // insert a solution for the given subtree ID,
 
    // AND update new_local to include any solutions that become
 
    // possible as a result of this new addition
 
    pub(crate) fn submit_and_digest_subtree_solution(
 
        &mut self,
 
        cu: &mut impl CuUndecided,
 
        subtree_id: SubtreeId,
 
        predicate: Predicate,
 
    ) {
 
        log!(cu.logger(), "++ new component solution {:?} {:?}", subtree_id, &predicate);
 
        let Self { subtree_solutions, new_local, old_local, subtree_id_to_index } = self;
 
        let index = subtree_id_to_index[&subtree_id];
 
        let was_new = subtree_solutions[index].insert(predicate.clone());
 
        if was_new {
 
            // This is a newly-added solution! update new_local
 
            // consider ALL consistent combinations of one element from each solution set
 
            // to our right or left in the solution-set vector
 
            // but with THIS PARTICULAR predicate from our own index.
 
            let left = 0..index;
 
            let right = (index + 1)..subtree_solutions.len();
 
            // iterator over SETS of solutions, one for every component except `subtree_id` (me)
 
            let set_visitor = left.chain(right).map(|index| &subtree_solutions[index]);
 
            // Recursively enumerate all solutions matching the description above,
 
            Self::elaborate_into_new_local_rec(cu, predicate, set_visitor, old_local, new_local);
 
        }
 
    }
 

	
 
    // Recursively build local solutions for this connector,
 
    // see `submit_and_digest_subtree_solution`
 
    fn elaborate_into_new_local_rec<'a, 'b>(
 
        cu: &mut impl CuUndecided,
 
        partial: Predicate,
 
        mut set_visitor: impl Iterator<Item = &'b HashSet<Predicate>> + Clone,
 
        old_local: &'b HashSet<Predicate>,
 
        new_local: &'a mut HashSet<Predicate>,
 
    ) {
 
        if let Some(set) = set_visitor.next() {
 
            // incomplete solution. keep recursively creating combined solutions
 
            for pred in set.iter() {
 
                if let Some(elaborated) = pred.union_with(&partial) {
 
                    Self::elaborate_into_new_local_rec(
 
                        cu,
 
                        elaborated,
 
                        set_visitor.clone(),
 
                        old_local,
 
                        new_local,
 
                    )
 
                }
 
            }
 
        } else {
 
            // recursive stop condition. This is a solution for this connector...
 
            if !old_local.contains(&partial) {
 
                // ... and it hasn't been found before
 
                log!(cu.logger(), "storing NEW LOCAL SOLUTION {:?}", &partial);
 
                new_local.insert(partial);
 
            }
 
        }
 
    }
 
}
 
impl NonsyncProtoContext<'_> {
 
    // Facilitates callback from the component to the connector runtime,
 
    // creating a new component and changing the given port's ownership to that
 
    // of the new component.
 
    pub(crate) fn new_component(&mut self, moved_ports: HashSet<PortId>, state: ComponentState) {
 
        // Sanity check! The moved ports are owned by this component to begin with
 
        for port in moved_ports.iter() {
 
            assert_eq!(self.proto_component_id, self.ips.port_info.map.get(port).unwrap().owner);
 
        }
 
        // Create the new component, and schedule it to be run
 
        let new_cid = self.ips.id_manager.new_component_id();
 
        log!(
 
            self.logger,
 
            "Component {:?} added new component {:?} with state {:?}, moving ports {:?}",
 
            self.proto_component_id,
 
            new_cid,
 
            &state,
 
            &moved_ports
 
        );
 
        self.unrun_components.push((new_cid, state));
 
        // Update the ownership of the moved ports
 
        for port in moved_ports.iter() {
 
            self.ips.port_info.map.get_mut(port).unwrap().owner = new_cid;
 
        }
 
        if let Some(set) = self.ips.port_info.owned.get_mut(&self.proto_component_id) {
 
            set.retain(|x| !moved_ports.contains(x));
 
        }
 
        self.ips.port_info.owned.insert(new_cid, moved_ports.clone());
 
    }
 

	
 
    // Facilitates callback from the component to the connector runtime,
 
    // creating a new port-pair connected by an memory channel
 
    pub(crate) fn new_port_pair(&mut self) -> [PortId; 2] {
 
        // adds two new associated ports, related to each other, and exposed to the proto component
 
        let mut new_cid_fn = || self.ips.id_manager.new_port_id();
 
        let [o, i] = [new_cid_fn(), new_cid_fn()];
 
        self.ips.port_info.map.insert(
 
            o,
 
            PortInfo {
 
                route: Route::LocalComponent,
 
                peer: Some(i),
 
                polarity: Putter,
 
                owner: self.proto_component_id,
 
            },
 
        );
 
        self.ips.port_info.map.insert(
 
            i,
 
            PortInfo {
 
                route: Route::LocalComponent,
 
                peer: Some(o),
 
                polarity: Getter,
 
                owner: self.proto_component_id,
 
            },
 
        );
 
        self.ips
 
            .port_info
 
            .owned
 
            .entry(self.proto_component_id)
 
            .or_default()
 
            .extend([o, i].iter().copied());
 
        log!(
 
            self.logger,
 
            "Component {:?} port pair (out->in) {:?} -> {:?}",
 
            self.proto_component_id,
 
            o,
 
            i
 
        );
 
        [o, i]
 
    }
 
}
 
impl SyncProtoContext<'_> {
 
    // The component calls the runtime back, inspecting whether it's associated
 
    // preidcate has already determined a (speculative) value for the given port's firing variable.
 
    pub(crate) fn is_firing(&mut self, port: PortId) -> Option<bool> {
 
        let var = self.rctx.ips.port_info.spec_var_for(port);
 
        self.predicate.query(var).map(SpecVal::is_firing)
 
    }
 

	
 
    pub(crate) fn did_put_or_get(&mut self, port: PortId) -> bool {
 
        self.branch_inner.did_put_or_get.contains(&port)
 
    }
 

	
 
    // The component calls the runtime back, trying to inspect a port's message
 
    pub(crate) fn read_msg(&mut self, port: PortId) -> Option<&Payload> {
 
        let maybe_msg = self.branch_inner.inbox.get(&port);
 
        if maybe_msg.is_some() {
 
            // Make a note that this component has received
 
            // this port's message 1+ times this round
 
            self.branch_inner.did_put_or_get.insert(port);
 
        }
 
        maybe_msg
 
    }
 
}
src/runtime/mod.rs
Show inline comments
 
@@ -658,280 +658,280 @@ impl Connector {
 
        log!(cu.logger, "Added port pair (out->in) {:?} -> {:?}", o, i);
 
        [o, i]
 
    }
 

	
 
    /// Instantiates a new component for the connector runtime to manage, and passing
 
    /// the given set of ports from the interface of the native component, to that of the
 
    /// newly created component (passing their ownership).
 
    /// # Errors
 
    /// Error is returned if the moved ports are not owned by the native component,
 
    /// if the given component name is not defined in the connector's protocol,
 
    /// the given sequence of ports contains a duplicate port,
 
    /// or if the component is unfit for instantiation with the given port sequence.
 
    /// # Panics
 
    /// This function panics if the connector's (large) component id space is exhausted.
 
    pub fn add_component(
 
        &mut self,
 
        identifier: &[u8],
 
        ports: &[PortId],
 
    ) -> Result<(), AddComponentError> {
 
        // Check for error cases first before modifying `cu`
 
        use AddComponentError as Ace;
 
        let cu = &self.unphased;
 
        if let Some(port) = duplicate_port(ports) {
 
            return Err(Ace::DuplicatePort(port));
 
        }
 
        let expected_polarities = cu.proto_description.component_polarities(identifier)?;
 
        if expected_polarities.len() != ports.len() {
 
            return Err(Ace::WrongNumberOfParamaters { expected: expected_polarities.len() });
 
        }
 
        for (&expected_polarity, &port) in expected_polarities.iter().zip(ports.iter()) {
 
            let info = cu.ips.port_info.map.get(&port).ok_or(Ace::UnknownPort(port))?;
 
            if info.owner != cu.native_component_id {
 
                return Err(Ace::UnknownPort(port));
 
            }
 
            if info.polarity != expected_polarity {
 
                return Err(Ace::WrongPortPolarity { port, expected_polarity });
 
            }
 
        }
 
        // No errors! Time to modify `cu`
 
        // create a new component and identifier
 
        let Connector { phased, unphased: cu } = self;
 
        let new_cid = cu.ips.id_manager.new_component_id();
 
        cu.proto_components.insert(new_cid, cu.proto_description.new_component(identifier, ports));
 
        // update the ownership of moved ports
 
        for port in ports.iter() {
 
            match cu.ips.port_info.map.get_mut(port) {
 
                Some(port_info) => port_info.owner = new_cid,
 
                None => unreachable!(),
 
            }
 
        }
 
        if let Some(set) = cu.ips.port_info.owned.get_mut(&cu.native_component_id) {
 
            set.retain(|x| !ports.contains(x));
 
        }
 
        let moved_port_set: HashSet<PortId> = ports.iter().copied().collect();
 
        if let ConnectorPhased::Communication(comm) = phased {
 
            // Preserve invariant: batches only reason about native's ports.
 
            // Remove batch puts/gets for moved ports.
 
            for batch in comm.native_batches.iter_mut() {
 
                batch.to_put.retain(|port, _| !moved_port_set.contains(port));
 
                batch.to_get.retain(|port| !moved_port_set.contains(port));
 
            }
 
        }
 
        cu.ips.port_info.owned.insert(new_cid, moved_port_set);
 
        Ok(())
 
    }
 
}
 
impl Predicate {
 
    #[inline]
 
    pub fn singleton(k: SpecVar, v: SpecVal) -> Self {
 
        Self::default().inserted(k, v)
 
    }
 
    #[inline]
 
    pub fn inserted(mut self, k: SpecVar, v: SpecVal) -> Self {
 
        self.assigned.insert(k, v);
 
        self
 
    }
 

	
 
    // Return true whether `self` is a subset of `maybe_superset`
 
    pub fn assigns_subset(&self, maybe_superset: &Self) -> bool {
 
        for (var, val) in self.assigned.iter() {
 
            match maybe_superset.assigned.get(var) {
 
                Some(val2) if val2 == val => {}
 
                _ => return false, // var unmapped, or mapped differently
 
            }
 
        }
 
        // `maybe_superset` mirrored all my assignments!
 
        true
 
    }
 

	
 
    /// Given the two predicates {self, other}, return that whose
 
    /// assignments are the union of those of both.
 
    fn assignment_union(&self, other: &Self) -> AssignmentUnionResult {
 
        use AssignmentUnionResult as Aur;
 
        // iterators over assignments of both predicates. Rely on SORTED ordering of BTreeMap's keys.
 
        let [mut s_it, mut o_it] = [self.assigned.iter(), other.assigned.iter()];
 
        let [mut s, mut o] = [s_it.next(), o_it.next()];
 
        // populate lists of assignments in self but not other and vice versa.
 
        // do this by incrementally unfolding the iterators, keeping an eye
 
        // on the ordering between the head elements [s, o].
 
        // whenever s<o, other is certainly missing element 's', etc.
 
        let [mut s_not_o, mut o_not_s] = [vec![], vec![]];
 
        loop {
 
            match [s, o] {
 
                [None, None] => break, // both iterators are empty
 
                [None, Some(x)] => {
 
                    // self's iterator is empty.
 
                    // all remaning elements are in other but not self
 
                    o_not_s.push(x);
 
                    o_not_s.extend(o_it);
 
                    break;
 
                }
 
                [Some(x), None] => {
 
                    // other's iterator is empty.
 
                    // all remaning elements are in self but not other
 
                    s_not_o.push(x);
 
                    s_not_o.extend(s_it);
 
                    break;
 
                }
 
                [Some((sid, sb)), Some((oid, ob))] => {
 
                    if sid < oid {
 
                        // o is missing this element
 
                        s_not_o.push((sid, sb));
 
                        s = s_it.next();
 
                    } else if sid > oid {
 
                        // s is missing this element
 
                        o_not_s.push((oid, ob));
 
                        o = o_it.next();
 
                    } else if sb != ob {
 
                        assert_eq!(sid, oid);
 
                        // both predicates assign the variable but differ on the value
 
                        // No predicate exists which satisfies both!
 
                        return Aur::Nonexistant;
 
                    } else {
 
                        // both predicates assign the variable to the same value
 
                        s = s_it.next();
 
                        o = o_it.next();
 
                    }
 
                }
 
            }
 
        }
 
        // Observed zero inconsistencies. A unified predicate exists...
 
        match [s_not_o.is_empty(), o_not_s.is_empty()] {
 
            [true, true] => Aur::Equivalent,       // ... equivalent to both.
 
            [false, true] => Aur::FormerNotLatter, // ... equivalent to self.
 
            [true, false] => Aur::LatterNotFormer, // ... equivalent to other.
 
            [false, false] => {
 
                // ... which is the union of the predicates' assignments but
 
                //     is equivalent to neither self nor other.
 
                let mut new = self.clone();
 
                for (&id, &b) in o_not_s {
 
                    new.assigned.insert(id, b);
 
                }
 
                Aur::New(new)
 
            }
 
        }
 
    }
 

	
 
    // Compute the union of the assignments of the two given predicates, if it exists.
 
    // It doesn't exist if there is some value which the predicates assign to different values.
 
    pub(crate) fn union_with(&self, other: &Self) -> Option<Self> {
 
        let mut res = self.clone();
 
        for (&channel_id, &assignment_1) in other.assigned.iter() {
 
            match res.assigned.insert(channel_id, assignment_1) {
 
                Some(assignment_2) if assignment_1 != assignment_2 => return None,
 
                _ => {}
 
            }
 
        }
 
        Some(res)
 
    }
 
    pub(crate) fn query(&self, var: SpecVar) -> Option<SpecVal> {
 
        self.assigned.get(&var).copied()
 
    }
 
}
 

	
 
impl RoundCtx {
 
    // remove an arbitrary buffered message, along with the ID of the getter who receives it
 
    fn getter_pop(&mut self) -> Option<(PortId, SendPayloadMsg)> {
 
        self.payload_inbox.pop()
 
    }
 

	
 
    // buffer a message along with the ID of the getter who receives it
 
    fn getter_push(&mut self, getter: PortId, msg: SendPayloadMsg) {
 
        self.payload_inbox.push((getter, msg));
 
    }
 

	
 
    // buffer a message along with the ID of the putter who sent it
 
    fn putter_push(&mut self, cu: &mut impl CuUndecided, putter: PortId, msg: SendPayloadMsg) {
 
        if let Some(getter) = self.ips.port_info.map.get(&putter).unwrap().peer {
 
            log!(cu.logger(), "Putter add (putter:{:?} => getter:{:?})", putter, getter);
 
            self.getter_push(getter, msg);
 
        } else {
 
            log!(cu.logger(), "Putter {:?} has no known peer!", putter);
 
            panic!("Putter {:?} has no known peer!");
 
            panic!("Putter {:?} has no known peer!", putter);
 
        }
 
    }
 
}
 

	
 
impl<T: Debug + std::cmp::Ord> Debug for VecSet<T> {
 
    fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
 
        f.debug_set().entries(self.vec.iter()).finish()
 
    }
 
}
 
impl Debug for Predicate {
 
    fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
 
        struct Assignment<'a>((&'a SpecVar, &'a SpecVal));
 
        impl Debug for Assignment<'_> {
 
            fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
 
                write!(f, "{:?}={:?}", (self.0).0, (self.0).1)
 
            }
 
        }
 
        f.debug_set().entries(self.assigned.iter().map(Assignment)).finish()
 
    }
 
}
 
impl serde::Serialize for SerdeProtocolDescription {
 
    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
 
    where
 
        S: serde::Serializer,
 
    {
 
        let inner: &ProtocolDescription = &self.0;
 
        inner.serialize(serializer)
 
    }
 
}
 
impl<'de> serde::Deserialize<'de> for SerdeProtocolDescription {
 
    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
 
    where
 
        D: serde::Deserializer<'de>,
 
    {
 
        let inner: ProtocolDescription = ProtocolDescription::deserialize(deserializer)?;
 
        Ok(Self(Arc::new(inner)))
 
    }
 
}
 
impl IdParts for SpecVar {
 
    fn id_parts(self) -> (ConnectorId, U32Suffix) {
 
        self.0.id_parts()
 
    }
 
}
 
impl Debug for SpecVar {
 
    fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
 
        let (a, b) = self.id_parts();
 
        write!(f, "v{}_{}", a, b)
 
    }
 
}
 
impl SpecVal {
 
    const FIRING: Self = SpecVal(1);
 
    const SILENT: Self = SpecVal(0);
 
    fn is_firing(self) -> bool {
 
        self == Self::FIRING
 
        // all else treated as SILENT
 
    }
 
    fn iter_domain() -> impl Iterator<Item = Self> {
 
        (0..).map(SpecVal)
 
    }
 
}
 
impl Debug for SpecVal {
 
    fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
 
        self.0.fmt(f)
 
    }
 
}
 
impl Default for IoByteBuffer {
 
    fn default() -> Self {
 
        let mut byte_vec = Vec::with_capacity(Self::CAPACITY);
 
        unsafe {
 
            // safe! this vector is guaranteed to have sufficient capacity
 
            byte_vec.set_len(Self::CAPACITY);
 
        }
 
        Self { byte_vec }
 
    }
 
}
 
impl IoByteBuffer {
 
    const CAPACITY: usize = u16::MAX as usize + 1000;
 
    fn as_mut_slice(&mut self) -> &mut [u8] {
 
        self.byte_vec.as_mut_slice()
 
    }
 
}
 

	
 
impl Debug for IoByteBuffer {
 
    fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
 
        write!(f, "IoByteBuffer")
 
    }
 
}
src/runtime/tests.rs
Show inline comments
 
@@ -1212,241 +1212,241 @@ fn xrouter_comp() {
 
        channel j -> k;
 
        channel l -> m;
 
        channel n -> o;
 
        channel p -> q;
 
        channel r -> s;
 
        channel t -> u;
 

	
 
        new replicator(a, d, f);
 
        new replicator(g, t, h);
 
        new lossy(e, l);
 
        new lossy(i, j);
 
        new replicator(m, b, p);
 
        new replicator(k, n, c);
 
        new merger(q, o, r);
 
        new sync_drain(u, s);
 
    }
 
    ";
 
    let pd = reowolf::ProtocolDescription::parse(pdl).unwrap();
 
    let mut c = file_logged_configured_connector(0, test_log_path, Arc::new(pd));
 

	
 
    // setup a session between (a) native, and (b) xrouter2, connected by 3 ports.
 
    let [p0, g0] = c.new_port_pair();
 
    let [p1, g1] = c.new_port_pair();
 
    let [p2, g2] = c.new_port_pair();
 
    c.add_component(b"xrouter", &[g0, p1, p2]).unwrap();
 
    c.connect(None).unwrap();
 

	
 
    let now = std::time::Instant::now();
 
    for item in XROUTER_ITEMS.iter() {
 
        match item {
 
            XRouterItem::Silent => {}
 
            XRouterItem::GetA => {
 
                c.put(p0, TEST_MSG.clone()).unwrap();
 
                c.get(g1).unwrap();
 
            }
 
            XRouterItem::GetB => {
 
                c.put(p0, TEST_MSG.clone()).unwrap();
 
                c.get(g2).unwrap();
 
            }
 
        }
 
        assert_eq!(0, c.sync(SEC1).unwrap());
 
    }
 
    println!("COMP {:?}", now.elapsed());
 
}
 

	
 
#[test]
 
fn count_stream() {
 
    let test_log_path = Path::new("./logs/count_stream");
 
    let pdl = b"
 
    primitive count_stream(out o) {
 
        msg m = create(1);
 
        m[0] = 0;
 
        while(true) synchronous {
 
            put(o, m);
 
            m[0] += 1;
 
        }
 
    }
 
    ";
 
    let pd = reowolf::ProtocolDescription::parse(pdl).unwrap();
 
    let mut c = file_logged_configured_connector(0, test_log_path, Arc::new(pd));
 

	
 
    // setup a session between (a) native, and (b) sequencer3, connected by 3 ports.
 
    let [p0, g0] = c.new_port_pair();
 
    c.add_component(b"count_stream", &[p0]).unwrap();
 
    c.connect(None).unwrap();
 

	
 
    for expecting in 0u8..16 {
 
        c.get(g0).unwrap();
 
        c.sync(None).unwrap();
 
        assert_eq!(&[expecting], c.gotten(g0).unwrap().as_slice());
 
    }
 
}
 

	
 
#[test]
 
fn for_msg_byte() {
 
    let test_log_path = Path::new("./logs/for_msg_byte");
 
    let pdl = b"
 
    primitive for_msg_byte(out o) {
 
        byte i = 0;
 
        while(i<8) {
 
            msg m = create(1);
 
            m[0] = i;
 
            synchronous put(o, m);
 
            i++;
 
        }
 
    }
 
    ";
 
    let pd = reowolf::ProtocolDescription::parse(pdl).unwrap();
 
    let mut c = file_logged_configured_connector(0, test_log_path, Arc::new(pd));
 

	
 
    // setup a session between (a) native, and (b) sequencer3, connected by 3 ports.
 
    let [p0, g0] = c.new_port_pair();
 
    c.add_component(b"for_msg_byte", &[p0]).unwrap();
 
    c.connect(None).unwrap();
 

	
 
    for expecting in 0u8..8 {
 
        c.get(g0).unwrap();
 
        c.sync(None).unwrap();
 
        assert_eq!(&[expecting], c.gotten(g0).unwrap().as_slice());
 
    }
 
    c.sync(None).unwrap();
 
}
 

	
 
#[test]
 
fn eq_causality() {
 
    let test_log_path = Path::new("./logs/eq_causality");
 
    let pdl = b"
 
    primitive eq(in a, in b, out c) {
 
        msg ma = null;
 
        msg mb = null;
 
        while(true) synchronous {
 
            if(fires(a)) {
 
                // b and c also fire!
 
                // left first!
 
                ma = get(a);
 
                put(c, ma);
 
                mb = get(b);
 
                assert(ma == mb);
 
            }
 
        }
 
    }
 
    ";
 
    let pd = reowolf::ProtocolDescription::parse(pdl).unwrap();
 
    let mut c = file_logged_configured_connector(0, test_log_path, Arc::new(pd));
 

	
 
    /*
 
    [native]p0-->g0[eq]p1--.
 
                 g1        |
 
                 ^---------`
 
    */
 
    let [p0, g0] = c.new_port_pair();
 
    let [p1, g1] = c.new_port_pair();
 
    c.add_component(b"eq", &[g0, g1, p1]).unwrap();
 

	
 
    /*
 
                  V--------.
 
                 g2        |
 
    [native]p2-->g3[eq]p3--`
 
    */
 
    let [p2, g2] = c.new_port_pair();
 
    let [p3, g3] = c.new_port_pair();
 
    c.add_component(b"eq", &[g3, g2, p3]).unwrap();
 
    c.connect(None).unwrap();
 

	
 
    for _ in 0..4 {
 
        // everything is fine with LEFT FIRST
 
        c.put(p0, TEST_MSG.clone()).unwrap();
 
        c.sync(MS100).unwrap();
 

	
 
        // no solution when left is NOT FIRST
 
        c.put(p2, TEST_MSG.clone()).unwrap();
 
        c.sync(MS100).unwrap_err();
 
    }
 
}
 

	
 
#[test]
 
fn eq_no_causality() {
 
    let test_log_path = Path::new("./logs/eq_no_causality");
 
    let pdl = b"
 
    composite eq(in<msg> a, in<msg> b, out<msg> c) {
 
        channel leftfirsto -> leftfirsti;
 
        new eqinner(a, b, c, leftfirsto, leftfirsti);
 
    }
 
    primitive eqinner(in<msg> a, in<msg> b, out<msg> c, out<msg> leftfirsto, in<msg> leftfirsti) {
 
        msg ma = null;
 
        msg mb = null;
 
        while(true) synchronous {
 
            if(fires(a)) {
 
                // b and c also fire!
 
                if(fires(leftfirsti)) {
 
                    // left first! DO USE DUMMY
 
                    ma = get(a);
 
                    put(c, ma);
 
                    mb = get(b);
 

	
 
                    // using dummy!
 
                    put(leftfirsto, ma);
 
                    get(leftfirsti);
 
                } else {
 
                    // right first! DON'T USE DUMMY
 
                    mb = get(b);
 
                    put(c, mb);
 
                    ma = get(a);
 
                }
 
                assert(ma == mb);
 
            }
 
        }
 
    }
 
    T some_function<T>(msg a, msg b) {
 
        T something = a;
 
        return something;
 
    }
 
    primitive quick_test(in<msg> a, in<msg> b) {
 
    primitive quick_test(in<int> a, in<int> b) {
 
        // msg ma = null;
 
        msg test1 = null;
 
        msg test2 = null;
 
        msg ma = some_function(test1, test2);
 
        auto test1 = 0;
 
        auto test2 = 0;
 
        auto ma = some_function(test1, test2);
 
        while(true) synchronous {
 
            if (fires(a)) {
 
                ma = get(a);
 
            }
 
            if (fires(b)) {
 
                ma = get(b);
 
            }
 
            if (fires(a) && fires(b)) {
 
                ma = get(a) + get(b);
 
            }
 
        }
 
    }
 
    ";
 
    let pd = reowolf::ProtocolDescription::parse(pdl).unwrap();
 
    let mut c = file_logged_configured_connector(0, test_log_path, Arc::new(pd));
 

	
 
    /*
 
    [native]p0-->g0[eq]p1--.
 
                 g1        |
 
                 ^---------`
 
    */
 
    let [p0, g0] = c.new_port_pair();
 
    let [p1, g1] = c.new_port_pair();
 
    c.add_component(b"eq", &[g0, g1, p1]).unwrap();
 

	
 
    /*
 
                  V--------.
 
                 g2        |
 
    [native]p2-->g3[eq]p3--`
 
    */
 
    let [p2, g2] = c.new_port_pair();
 
    let [p3, g3] = c.new_port_pair();
 
    c.add_component(b"eq", &[g3, g2, p3]).unwrap();
 
    c.connect(None).unwrap();
 

	
 
    for _ in 0..32 {
 
        // ok when they send
 
        c.put(p0, TEST_MSG.clone()).unwrap();
 
        c.put(p2, TEST_MSG.clone()).unwrap();
 
        c.sync(SEC1).unwrap();
 
        // ok when they don't
 
        c.sync(SEC1).unwrap();
 
    }
 
}
0 comments (0 inline, 0 general)