Changeset - aaeaf5986496
[Not reviewed]
0 11 3
MH - 4 years ago 2021-03-24 20:07:06
contact@maxhenger.nl
some refactoring of comma-separated lexing, basic initial lexing tests, WIP on struct literals
14 files changed with 912 insertions and 396 deletions:
0 comments (0 inline, 0 general)
src/protocol/ast.rs
Show inline comments
 
@@ -118,13 +118,13 @@ define_new_ast_id!(ConditionalExpressionId, ExpressionId, ConditionalExpression,
 
define_new_ast_id!(BinaryExpressionId, ExpressionId, BinaryExpression, Expression::Binary, expressions);
 
define_new_ast_id!(UnaryExpressionId, ExpressionId, UnaryExpression, Expression::Unary, expressions);
 
define_new_ast_id!(IndexingExpressionId, ExpressionId, IndexingExpression, Expression::Indexing, expressions);
 
define_new_ast_id!(SlicingExpressionId, ExpressionId, SlicingExpression, Expression::Slicing, expressions);
 
define_new_ast_id!(SelectExpressionId, ExpressionId, SelectExpression, Expression::Select, expressions);
 
define_new_ast_id!(ArrayExpressionId, ExpressionId, ArrayExpression, Expression::Array, expressions);
 
define_new_ast_id!(ConstantExpressionId, ExpressionId, ConstantExpression, Expression::Constant, expressions);
 
define_new_ast_id!(LiteralExpressionId, ExpressionId, LiteralExpression, Expression::Literal, expressions);
 
define_new_ast_id!(CallExpressionId, ExpressionId, CallExpression, Expression::Call, expressions);
 
define_new_ast_id!(VariableExpressionId, ExpressionId, VariableExpression, Expression::Variable, expressions);
 

	
 
// TODO: @cleanup - pub qualifiers can be removed once done
 
#[derive(Debug, serde::Serialize, serde::Deserialize)]
 
pub struct Heap {
 
@@ -246,19 +246,19 @@ impl Heap {
 
    ) -> ArrayExpressionId {
 
        ArrayExpressionId(
 
            self.expressions
 
                .alloc_with_id(|id| Expression::Array(f(ArrayExpressionId(id)))),
 
        )
 
    }
 
    pub fn alloc_constant_expression(
 
    pub fn alloc_literal_expression(
 
        &mut self,
 
        f: impl FnOnce(ConstantExpressionId) -> ConstantExpression,
 
    ) -> ConstantExpressionId {
 
        ConstantExpressionId(
 
        f: impl FnOnce(LiteralExpressionId) -> LiteralExpression,
 
    ) -> LiteralExpressionId {
 
        LiteralExpressionId(
 
            self.expressions.alloc_with_id(|id| {
 
                Expression::Constant(f(ConstantExpressionId(id)))
 
                Expression::Literal(f(LiteralExpressionId(id)))
 
            }),
 
        )
 
    }
 
    pub fn alloc_call_expression(
 
        &mut self,
 
        f: impl FnOnce(CallExpressionId) -> CallExpression,
 
@@ -934,22 +934,42 @@ impl Display for Type {
 
        } else {
 
            Ok(())
 
        }
 
    }
 
}
 

	
 
type CharacterData = Vec<u8>;
 
type IntegerData = i64;
 
type LiteralCharacter = Vec<u8>;
 
type LiteralInteger = i64; // TODO: @int_literal
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Constant {
 
pub enum Literal {
 
    Null, // message
 
    True,
 
    False,
 
    Character(CharacterData),
 
    Integer(IntegerData),
 
    Character(LiteralCharacter),
 
    Integer(LiteralInteger),
 
    Struct(LiteralStruct),
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct LiteralStructField {
 
    // Phase 1: parser
 
    pub(crate) identifier: Identifier,
 
    pub(crate) value: ExpressionId,
 
    // Phase 2: linker
 
    pub(crate) field_idx: usize, // in struct definition
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct LiteralStruct {
 
    // Phase 1: parser
 
    pub(crate) identifier: NamespacedIdentifier,
 
    pub(crate) poly_args: Vec<ParserTypeId>,
 
    pub(crate) fields: Vec<LiteralStructField>,
 
    // Phase 2: linker
 
    pub(crate) definition: Option<DefinitionId>
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub enum Method {
 
    Get,
 
    Put,
 
@@ -1953,13 +1973,13 @@ pub enum Expression {
 
    Binary(BinaryExpression),
 
    Unary(UnaryExpression),
 
    Indexing(IndexingExpression),
 
    Slicing(SlicingExpression),
 
    Select(SelectExpression),
 
    Array(ArrayExpression),
 
    Constant(ConstantExpression),
 
    Literal(LiteralExpression),
 
    Call(CallExpression),
 
    Variable(VariableExpression),
 
}
 

	
 
impl Expression {
 
    pub fn as_assignment(&self) -> &AssignmentExpression {
 
@@ -2007,15 +2027,15 @@ impl Expression {
 
    pub fn as_array(&self) -> &ArrayExpression {
 
        match self {
 
            Expression::Array(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `ArrayExpression`"),
 
        }
 
    }
 
    pub fn as_constant(&self) -> &ConstantExpression {
 
    pub fn as_constant(&self) -> &LiteralExpression {
 
        match self {
 
            Expression::Constant(result) => result,
 
            Expression::Literal(result) => result,
 
            _ => panic!("Unable to cast `Expression` to `ConstantExpression`"),
 
        }
 
    }
 
    pub fn as_call(&self) -> &CallExpression {
 
        match self {
 
            Expression::Call(result) => result,
 
@@ -2048,13 +2068,13 @@ impl Expression {
 
            Expression::Binary(expr) => &expr.parent,
 
            Expression::Unary(expr) => &expr.parent,
 
            Expression::Indexing(expr) => &expr.parent,
 
            Expression::Slicing(expr) => &expr.parent,
 
            Expression::Select(expr) => &expr.parent,
 
            Expression::Array(expr) => &expr.parent,
 
            Expression::Constant(expr) => &expr.parent,
 
            Expression::Literal(expr) => &expr.parent,
 
            Expression::Call(expr) => &expr.parent,
 
            Expression::Variable(expr) => &expr.parent,
 
        }
 
    }
 
    // TODO: @cleanup
 
    pub fn parent_expr_id(&self) -> Option<ExpressionId> {
 
@@ -2072,13 +2092,13 @@ impl Expression {
 
            Expression::Binary(expr) => expr.parent = parent,
 
            Expression::Unary(expr) => expr.parent = parent,
 
            Expression::Indexing(expr) => expr.parent = parent,
 
            Expression::Slicing(expr) => expr.parent = parent,
 
            Expression::Select(expr) => expr.parent = parent,
 
            Expression::Array(expr) => expr.parent = parent,
 
            Expression::Constant(expr) => expr.parent = parent,
 
            Expression::Literal(expr) => expr.parent = parent,
 
            Expression::Call(expr) => expr.parent = parent,
 
            Expression::Variable(expr) => expr.parent = parent,
 
        }
 
    }
 
    // TODO: @cleanup
 
    pub fn get_type_mut(&mut self) -> &mut ConcreteType {
 
@@ -2088,13 +2108,13 @@ impl Expression {
 
            Expression::Binary(expr) => &mut expr.concrete_type,
 
            Expression::Unary(expr) => &mut expr.concrete_type,
 
            Expression::Indexing(expr) => &mut expr.concrete_type,
 
            Expression::Slicing(expr) => &mut expr.concrete_type,
 
            Expression::Select(expr) => &mut expr.concrete_type,
 
            Expression::Array(expr) => &mut expr.concrete_type,
 
            Expression::Constant(expr) => &mut expr.concrete_type,
 
            Expression::Literal(expr) => &mut expr.concrete_type,
 
            Expression::Call(expr) => &mut expr.concrete_type,
 
            Expression::Variable(expr) => &mut expr.concrete_type,
 
        }
 
    }
 
}
 

	
 
@@ -2106,13 +2126,13 @@ impl SyntaxElement for Expression {
 
            Expression::Binary(expr) => expr.position(),
 
            Expression::Unary(expr) => expr.position(),
 
            Expression::Indexing(expr) => expr.position(),
 
            Expression::Slicing(expr) => expr.position(),
 
            Expression::Select(expr) => expr.position(),
 
            Expression::Array(expr) => expr.position(),
 
            Expression::Constant(expr) => expr.position(),
 
            Expression::Literal(expr) => expr.position(),
 
            Expression::Call(expr) => expr.position(),
 
            Expression::Variable(expr) => expr.position(),
 
        }
 
    }
 
}
 

	
 
@@ -2339,24 +2359,24 @@ impl SyntaxElement for CallExpression {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
 
pub struct ConstantExpression {
 
    pub this: ConstantExpressionId,
 
pub struct LiteralExpression {
 
    pub this: LiteralExpressionId,
 
    // Phase 1: parser
 
    pub position: InputPosition,
 
    pub value: Constant,
 
    pub value: Literal,
 
    // Phase 2: linker
 
    pub parent: ExpressionParent,
 
    // Phase 3: type checking
 
    pub concrete_type: ConcreteType,
 
}
 

	
 
impl SyntaxElement for ConstantExpression {
 
impl SyntaxElement for LiteralExpression {
 
    fn position(&self) -> InputPosition {
 
        self.position
 
    }
 
}
 

	
 
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
src/protocol/ast_printer.rs
Show inline comments
 
@@ -609,23 +609,23 @@ impl ASTWriter {
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
 
            },
 
            Expression::Constant(expr) => {
 
            Expression::Literal(expr) => {
 
                self.kv(indent).with_id(PREFIX_CONST_EXPR_ID, expr.this.0.index)
 
                    .with_s_key("ConstantExpr");
 

	
 
                let val = self.kv(indent2).with_s_key("Value");
 
                match &expr.value {
 
                    Constant::Null => { val.with_s_val("null"); },
 
                    Constant::True => { val.with_s_val("true"); },
 
                    Constant::False => { val.with_s_val("false"); },
 
                    Constant::Character(char) => { val.with_ascii_val(char); },
 
                    Constant::Integer(int) => { val.with_disp_val(int); },
 
                    Literal::Null => { val.with_s_val("null"); },
 
                    Literal::True => { val.with_s_val("true"); },
 
                    Literal::False => { val.with_s_val("false"); },
 
                    Literal::Character(char) => { val.with_ascii_val(char); },
 
                    Literal::Integer(int) => { val.with_disp_val(int); },
 
                }
 

	
 
                self.kv(indent2).with_s_key("Parent")
 
                    .with_custom_val(|v| write_expression_parent(v, &expr.parent));
 
                self.kv(indent2).with_s_key("ConcreteType")
 
                    .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type));
src/protocol/eval.rs
Show inline comments
 
@@ -65,31 +65,31 @@ impl Value {
 
                    Value::Message(MessageValue(Some(Payload::new(length as usize))))
 
                }
 
            }
 
            _ => unimplemented!(),
 
        }
 
    }
 
    fn from_constant(constant: &Constant) -> Value {
 
    fn from_constant(constant: &Literal) -> Value {
 
        match constant {
 
            Constant::Null => Value::Message(MessageValue(None)),
 
            Constant::True => Value::Boolean(BooleanValue(true)),
 
            Constant::False => Value::Boolean(BooleanValue(false)),
 
            Constant::Integer(val) => {
 
            Literal::Null => Value::Message(MessageValue(None)),
 
            Literal::True => Value::Boolean(BooleanValue(true)),
 
            Literal::False => Value::Boolean(BooleanValue(false)),
 
            Literal::Integer(val) => {
 
                // Convert raw ASCII data to UTF-8 string
 
                let val = *val;
 
                if val >= BYTE_MIN && val <= BYTE_MAX {
 
                    Value::Byte(ByteValue(val as i8))
 
                } else if val >= SHORT_MIN && val <= SHORT_MAX {
 
                    Value::Short(ShortValue(val as i16))
 
                } else if val >= INT_MIN && val <= INT_MAX {
 
                    Value::Int(IntValue(val as i32))
 
                } else {
 
                    Value::Long(LongValue(val))
 
                }
 
            }
 
            Constant::Character(_data) => unimplemented!(),
 
            Literal::Character(_data) => unimplemented!(),
 
        }
 
    }
 
    fn set(&mut self, index: &Value, value: &Value) -> Option<Value> {
 
        // The index must be of integer type, and non-negative
 
        let the_index: usize;
 
        match index {
 
@@ -1514,13 +1514,13 @@ impl Store {
 
                let mut elements = Vec::new();
 
                for &elem in expr.elements.iter() {
 
                    elements.push(self.eval(h, ctx, elem)?);
 
                }
 
                todo!()
 
            }
 
            Expression::Constant(expr) => Ok(Value::from_constant(&expr.value)),
 
            Expression::Literal(expr) => Ok(Value::from_constant(&expr.value)),
 
            Expression::Call(expr) => match &expr.method {
 
                Method::Get => {
 
                    assert_eq!(1, expr.arguments.len());
 
                    let value = self.eval(h, ctx, expr.arguments[0])?;
 
                    match ctx.get(value.clone()) {
 
                        None => Err(EvalContinuation::BlockGet(value)),
src/protocol/inputsource.rs
Show inline comments
 
@@ -170,12 +170,14 @@ impl InputPosition {
 
    // fn parse_error<S: ToString>(&self, message: S) -> ParseError {
 
    //     ParseError { position: *self, message: message.to_string(), backtrace: Backtrace::new() }
 
    // }
 
    fn eval_error<S: ToString>(&self, message: S) -> EvalError {
 
        EvalError { position: *self, message: message.to_string(), backtrace: Backtrace::new() }
 
    }
 

	
 
    pub(crate) fn col(&self) -> usize { self.column }
 
}
 

	
 
impl Default for InputPosition {
 
    fn default() -> Self {
 
        Self{ line: 1, column: 1, offset: 0 }
 
    }
 
@@ -199,17 +201,17 @@ pub enum ParseErrorType {
 
    Info,
 
    Error
 
}
 

	
 
#[derive(Debug)]
 
pub struct ParseErrorStatement {
 
    error_type: ParseErrorType,
 
    position: InputPosition,
 
    filename: String,
 
    context: String,
 
    message: String,
 
    pub(crate) error_type: ParseErrorType,
 
    pub(crate) position: InputPosition,
 
    pub(crate) filename: String,
 
    pub(crate) context: String,
 
    pub(crate) message: String,
 
}
 

	
 
impl ParseErrorStatement {
 
    fn from_source(error_type: ParseErrorType, source: &InputSource, position: InputPosition, msg: &str) -> Self {
 
        // Seek line start and end
 
        let line_start = position.offset - (position.column - 1);
 
@@ -274,13 +276,13 @@ impl fmt::Display for ParseErrorStatement {
 
        Ok(())
 
    }
 
}
 

	
 
#[derive(Debug)]
 
pub struct ParseError2 {
 
    statements: Vec<ParseErrorStatement>
 
    pub(crate) statements: Vec<ParseErrorStatement>
 
}
 

	
 
impl fmt::Display for ParseError2 {
 
    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 
        if self.statements.is_empty() {
 
            return Ok(())
src/protocol/lexer.rs
Show inline comments
 
@@ -204,12 +204,87 @@ impl Lexer<'_> {
 
                return Err(self.error_at_pos(&format!("Expected {}", String::from_utf8_lossy(string))));
 
            }
 
            self.source.consume();
 
        }
 
        Ok(())
 
    }
 
    /// Generic comma-separated consumer. If opening delimiter is not found then
 
    /// `Ok(None)` will be returned. Otherwise will consume the comma separated
 
    /// values, allowing a trailing comma. If no comma is found and the closing
 
    /// delimiter is not found, then a parse error with `expected_end_msg` is
 
    /// returned.
 
    fn consume_comma_separated<T, F>(
 
        &mut self, h: &mut Heap, open: u8, close: u8, expected_end_msg: &str, func: F
 
    ) -> Result<Option<Vec<T>>, ParseError2>
 
        where F: Fn(&mut Lexer, &mut Heap) -> Result<T, ParseError2>
 
    {
 
        if Some(open) != self.source.next() {
 
            return Ok(None)
 
        }
 

	
 
        self.source.consume();
 
        self.consume_whitespace(false)?;
 
        let mut elements = Vec::new();
 
        let mut had_comma = true;
 

	
 
        loop {
 
            if Some(close) == self.source.next() {
 
                self.source.consume();
 
                break;
 
            } else if !had_comma {
 
                return Err(ParseError2::new_error(
 
                    &self.source, self.source.pos(), expected_end_msg
 
                ));
 
            }
 

	
 
            elements.push(func(self, h)?);
 
            self.consume_whitespace(false)?;
 

	
 
            had_comma = self.source.next() == Some(b',');
 
            if had_comma {
 
                self.source.consume();
 
                self.consume_whitespace(false)?;
 
            }
 
        }
 

	
 
        Ok(Some(elements))
 
    }
 
    /// Essentially the same as `consume_comma_separated`, but will not allocate
 
    /// memory. Will return `true` and leave the input position at the end of
 
    /// the comma-separated list if well formed. Otherwise returns `false` and
 
    /// leaves the input position at a "random" position.
 
    fn consume_comma_separated_spilled_without_pos_recovery<F: Fn(&mut Lexer) -> bool>(
 
        &mut self, open: u8, close: u8, func: F
 
    ) -> bool {
 
        if Some(open) != self.source.next() {
 
            return true;
 
        }
 

	
 
        self.source.consume();
 
        if self.consume_whitespace(false).is_err() { return false };
 
        let mut had_comma = true;
 
        loop {
 
            if Some(close) == self.source.next() {
 
                self.source.consume();
 
                return true;
 
            } else if !had_comma {
 
                return false;
 
            }
 

	
 
            if !func(self) { return false; }
 
            if self.consume_whitespace(false).is_err() { return false };
 

	
 
            had_comma = self.source.next() == Some(b',');
 
            if had_comma {
 
                self.source.consume();
 
                if self.consume_whitespace(false).is_err() { return false; }
 
            }
 
        }
 

	
 
        true
 
    }
 
    fn consume_ident(&mut self) -> Result<Vec<u8>, ParseError2> {
 
        if !self.has_identifier() {
 
            return Err(self.error_at_pos("Expected identifier"));
 
        }
 
        let mut result = Vec::new();
 
        let mut next = self.source.next();
 
@@ -439,13 +514,12 @@ impl Lexer<'_> {
 

	
 
            self.consume_keyword(b"auto")?;
 
            ParserTypeVariant::Inferred
 
        } else if self.has_keyword(b"in") {
 
            // TODO: @cleanup: not particularly neat to have this special case
 
            //  where we enforce polyargs in the parser-phase
 
            // TODO: @hack, temporarily allow inferred port values
 
            self.consume_keyword(b"in")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?;
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error|  {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
@@ -453,13 +527,12 @@ impl Lexer<'_> {
 
                        "Type 'in' only allows for 1 polymorphic argument"
 
                    };
 
                    ParseError2::new_error(&self.source, pos, msg)
 
                })?;
 
            ParserTypeVariant::Input(poly_arg)
 
        } else if self.has_keyword(b"out") {
 
            // TODO: @hack, temporarily allow inferred port values
 
            self.consume_keyword(b"out")?;
 
            let poly_args = self.consume_polymorphic_args(h, allow_inference)?;
 
            let poly_arg = reduce_port_poly_args(h, &pos, poly_args)
 
                .map_err(|infer_error| {
 
                    let msg = if infer_error {
 
                        "Type inference is not allowed here"
 
@@ -569,128 +642,58 @@ impl Lexer<'_> {
 
    }
 

	
 
    /// Attempts to consume polymorphic arguments without returning them. If it
 
    /// doesn't encounter well-formed polymorphic arguments, then the input
 
    /// position is left at a "random" position.
 
    fn maybe_consume_poly_args_spilled_without_pos_recovery(&mut self) -> bool {
 
        if let Some(b'<') = self.source.next() {
 
            self.source.consume();
 
            if self.consume_whitespace(false).is_err() { return false; }
 
            loop {
 
                if !self.maybe_consume_type_spilled_without_pos_recovery() { return false; }
 
                if self.consume_whitespace(false).is_err() { return false; }
 
                let has_comma = self.source.next() == Some(b',');
 
                if has_comma {
 
                    self.source.consume();
 
                    if self.consume_whitespace(false).is_err() { return false; }
 
                }
 
                if let Some(b'>') = self.source.next() {
 
                    self.source.consume();
 
                    break;
 
                } else if !has_comma {
 
                    return false;
 
                }
 
            }
 
        }
 

	
 
        return true;
 
        self.consume_comma_separated_spilled_without_pos_recovery(
 
            b'<', b'>', |lexer| {
 
                lexer.maybe_consume_type_spilled_without_pos_recovery()
 
            })
 
    }
 

	
 
    /// Consumes polymorphic arguments and its delimiters if specified. The
 
    /// input position may be at whitespace. If polyargs are present then the
 
    /// whitespace and the args are consumed and the input position will be
 
    /// placed after the polyarg list. If polyargs are not present then the
 
    /// input position will remain unmodified and an empty vector will be
 
    /// Consumes polymorphic arguments and its delimiters if specified. If
 
    /// polyargs are present then the args are consumed and the input position
 
    /// will be placed after the polyarg list. If polyargs are not present then
 
    /// the input position will remain unmodified and an empty vector will be
 
    /// returned.
 
    ///
 
    /// Polymorphic arguments represent the specification of the parametric
 
    /// types of a polymorphic type: they specify the value of the polymorphic
 
    /// type's polymorphic variables.
 
    fn consume_polymorphic_args(&mut self, h: &mut Heap, allow_inference: bool) -> Result<Vec<ParserTypeId>, ParseError2> {
 
        let backup_pos = self.source.pos();
 
        self.consume_whitespace(false)?;
 
        if let Some(b'<') = self.source.next() {
 
            // Has polymorphic args, at least one type must be specified
 
            self.source.consume();
 
            self.consume_whitespace(false)?;
 
            let mut poly_args = Vec::new();
 

	
 
            loop {
 
                // TODO: @cleanup, remove the no_more_types var
 
                poly_args.push(self.consume_type2(h, allow_inference)?);
 
                self.consume_whitespace(false)?;
 

	
 
                let has_comma = self.source.next() == Some(b',');
 
                if has_comma {
 
                    // We might not actually be getting more types when the
 
                    // comma is at the end of the line, and we get a closing
 
                    // angular bracket on the next line.
 
                    self.source.consume();
 
                    self.consume_whitespace(false)?;
 
                }
 

	
 
                if let Some(b'>') = self.source.next() {
 
                    self.source.consume();
 
                    break;
 
                } else if !has_comma {
 
                    return Err(ParseError2::new_error(
 
                        &self.source, self.source.pos(),
 
                        "Expected the end of the polymorphic argument list"
 
                    ))
 
                }
 
        match self.consume_comma_separated(
 
            h, b'<', b'>', "Expected the end of the polymorphic argument list",
 
            |lexer, heap| lexer.consume_type2(heap, allow_inference)
 
        )? {
 
            Some(poly_args) => Ok(poly_args),
 
            None => {
 
                self.source.seek(backup_pos);
 
                Ok(vec![])
 
            }
 

	
 
            Ok(poly_args)
 
        } else {
 
            // No polymorphic args
 
            self.source.seek(backup_pos);
 
            Ok(vec!())
 
        }
 
    }
 

	
 
    /// Consumes polymorphic variables. These are identifiers that are used
 
    /// within polymorphic types. The input position may be at whitespace. If
 
    /// polymorphic variables are present then the whitespace, wrapping
 
    /// delimiters and the polymorphic variables are consumed. Otherwise the
 
    /// input position will stay where it is. If no polymorphic variables are
 
    /// present then an empty vector will be returned.
 
    fn consume_polymorphic_vars(&mut self) -> Result<Vec<Identifier>, ParseError2> {
 
    fn consume_polymorphic_vars(&mut self, h: &mut Heap) -> Result<Vec<Identifier>, ParseError2> {
 
        let backup_pos = self.source.pos();
 
        if let Some(b'<') = self.source.next() {
 
            // Found the opening delimiter, we want at least one polyvar
 
            self.source.consume();
 
            self.consume_whitespace(false)?;
 
            let mut poly_vars = Vec::new();
 

	
 
            loop {
 
                poly_vars.push(self.consume_identifier()?);
 
                self.consume_whitespace(false)?;
 

	
 
                let has_comma = self.source.next() == Some(b',');
 
                if has_comma {
 
                    // We may get another variable
 
                    self.source.consume();
 
                    self.consume_whitespace(false)?;
 
                }
 

	
 
                if let Some(b'>') = self.source.next() {
 
                    self.source.consume();
 
                    break;
 
                } else if !has_comma {
 
                    return Err(ParseError2::new_error(
 
                        &self.source, self.source.pos(),
 
                        "Expected the end of the polymorphic variable list"
 
                    ))
 
                }
 
        match self.consume_comma_separated(
 
            h, b'<', b'>', "Expected the end of the polymorphic variable list",
 
            |lexer, heap| lexer.consume_identifier()
 
        )? {
 
            Some(poly_vars) => Ok(poly_vars),
 
            None => {
 
                self.source.seek(backup_pos);
 
                Ok(vec!())
 
            }
 

	
 
            Ok(poly_vars)
 
        } else {
 
            // No polymorphic args
 
            self.source.seek(backup_pos);
 
            Ok(vec!())
 
        }
 
    }
 

	
 
    // Parameters
 

	
 
    fn consume_parameter(&mut self, h: &mut Heap) -> Result<ParameterId, ParseError2> {
 
@@ -699,33 +702,25 @@ impl Lexer<'_> {
 
        let position = self.source.pos();
 
        let identifier = self.consume_identifier()?;
 
        let id =
 
            h.alloc_parameter(|this| Parameter { this, position, parser_type, identifier });
 
        Ok(id)
 
    }
 
    fn consume_parameters(
 
        &mut self,
 
        h: &mut Heap,
 
        params: &mut Vec<ParameterId>,
 
    ) -> Result<(), ParseError2> {
 
        self.consume_string(b"(")?;
 
        self.consume_whitespace(false)?;
 
        if !self.has_string(b")") {
 
            while self.source.next().is_some() {
 
                params.push(self.consume_parameter(h)?);
 
                self.consume_whitespace(false)?;
 
                if self.has_string(b")") {
 
                    break;
 
                }
 
                self.consume_string(b",")?;
 
                self.consume_whitespace(false)?;
 
    fn consume_parameters(&mut self, h: &mut Heap) -> Result<Vec<ParameterId>, ParseError2> {
 
        match self.consume_comma_separated(
 
            h, b'(', b')', "Expected the end of the parameter list",
 
            |lexer, heap| lexer.consume_parameter(heap)
 
        )? {
 
            Some(params) => Ok(params),
 
            None => {
 
                Err(ParseError2::new_error(
 
                    &self.source, self.source.pos(),
 
                    "Expected a parameter list"
 
                ))
 
            }
 
        }
 
        self.consume_string(b")")?;
 

	
 
        Ok(())
 
    }
 

	
 
    // ====================
 
    // Expressions
 
    // ====================
 

	
 
@@ -1334,18 +1329,17 @@ impl Lexer<'_> {
 
        if self.has_string(b"(") {
 
            return self.consume_paren_expression(h);
 
        }
 
        if self.has_string(b"{") {
 
            return Ok(self.consume_array_expression(h)?.upcast());
 
        }
 
        if self.has_constant()
 
            || self.has_keyword(b"null")
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
        {
 
            return Ok(self.consume_constant_expression(h)?.upcast());
 
        if self.has_builtin_literal() {
 
            return Ok(self.consume_builtin_literal_expression(h)?.upcast());
 
        }
 
        if self.has_struct_literal() {
 
            return Ok(self.consume_struct_literal_expression(h)?.upcast());
 
        }
 
        if self.has_call_expression() {
 
            return Ok(self.consume_call_expression(h)?.upcast());
 
        }
 
        Ok(self.consume_variable_expression(h)?.upcast())
 
    }
 
@@ -1371,30 +1365,33 @@ impl Lexer<'_> {
 
            position,
 
            elements,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 
    fn has_constant(&self) -> bool {
 
    fn has_builtin_literal(&self) -> bool {
 
        is_constant(self.source.next())
 
            || self.has_keyword(b"null")
 
            || self.has_keyword(b"true")
 
            || self.has_keyword(b"false")
 
    }
 
    fn consume_constant_expression(
 
    fn consume_builtin_literal_expression(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ConstantExpressionId, ParseError2> {
 
    ) -> Result<LiteralExpressionId, ParseError2> {
 
        let position = self.source.pos();
 
        let value;
 
        if self.has_keyword(b"null") {
 
            self.consume_keyword(b"null")?;
 
            value = Constant::Null;
 
            value = Literal::Null;
 
        } else if self.has_keyword(b"true") {
 
            self.consume_keyword(b"true")?;
 
            value = Constant::True;
 
            value = Literal::True;
 
        } else if self.has_keyword(b"false") {
 
            self.consume_keyword(b"false")?;
 
            value = Constant::False;
 
            value = Literal::False;
 
        } else if self.source.next() == Some(b'\'') {
 
            self.source.consume();
 
            let mut data = Vec::new();
 
            let mut next = self.source.next();
 
            while next != Some(b'\'') && (is_vchar(next) || next == Some(b' ')) {
 
                data.push(next.unwrap());
 
@@ -1402,28 +1399,87 @@ impl Lexer<'_> {
 
                next = self.source.next();
 
            }
 
            if next != Some(b'\'') || data.is_empty() {
 
                return Err(self.error_at_pos("Expected character constant"));
 
            }
 
            self.source.consume();
 
            value = Constant::Character(data);
 
            value = Literal::Character(data);
 
        } else {
 
            if !self.has_integer() {
 
                return Err(self.error_at_pos("Expected integer constant"));
 
            }
 

	
 
            value = Constant::Integer(self.consume_integer()?);
 
            value = Literal::Integer(self.consume_integer()?);
 
        }
 
        Ok(h.alloc_constant_expression(|this| ConstantExpression {
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression {
 
            this,
 
            position,
 
            value,
 
            parent: ExpressionParent::None,
 
            concrete_type: ConcreteType::default(),
 
        }))
 
    }
 

	
 
    fn has_struct_literal(&mut self) -> bool {
 
        // A struct literal is written as:
 
        //      namespace::StructName<maybe_one_of_these, auto>{ field: expr }
 
        // We will parse up until the opening brace to see if we're dealing with
 
        // a struct literal.
 
        let backup_pos = self.source.pos();
 
        let result = self.consume_namespaced_identifier_spilled().is_ok() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.maybe_consume_poly_args_spilled_without_pos_recovery() &&
 
            self.consume_whitespace(false).is_ok() &&
 
            self.source.next() == Some(b'{');
 

	
 
        self.source.seek(backup_pos);
 
        return result;
 
    }
 

	
 
    fn consume_struct_literal_expression(&mut self, h: &mut Heap) -> Result<LiteralExpressionId, ParseError2> {
 
        // Consume identifier and polymorphic arguments
 
        let position = self.source.pos();
 
        let identifier = self.consume_namespaced_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_args = self.consume_polymorphic_args(h, true)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume fields
 
        let fields = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected the end of the list of struct fields",
 
            |lexer, heap| {
 
                let identifier = lexer.consume_identifier()?;
 
                lexer.consume_whitespace(false)?;
 
                lexer.consume_string(b":")?;
 
                lexer.consume_whitespace(false)?;
 
                let value = lexer.consume_expression(heap)?;
 

	
 
                Ok(LiteralStructField{ identifier, value, field_idx: 0 })
 
            }
 
        )? {
 
            Some(fields) => fields,
 
            None => return Err(ParseError2::new_error(
 
                self.source, self.source.pos(),
 
                "A struct literal must be followed by its field values"
 
            ))
 
        };
 

	
 
        Ok(h.alloc_literal_expression(|this| LiteralExpression{
 
            this,
 
            position,
 
            value: Literal::Struct(LiteralStruct{
 
                identifier,
 
                poly_args,
 
                fields,
 
                definition: None,
 
            }),
 
            parent: ExpressionParent::None,
 
            concrete_type: Default::default()
 
        }))
 
    }
 

	
 
    fn has_call_expression(&mut self) -> bool {
 
        // We need to prevent ambiguity with various operators (because we may
 
        // be specifying polymorphic variables) and variables.
 
        if self.has_builtin_keyword() {
 
            return true;
 
        }
 
@@ -1671,28 +1727,32 @@ impl Lexer<'_> {
 
        }
 
    }
 
    fn consume_channel_statement(
 
        &mut self,
 
        h: &mut Heap,
 
    ) -> Result<ChannelStatementId, ParseError2> {
 
        // Consume channel statement and polymorphic argument if specified
 
        // Consume channel statement and polymorphic argument if specified.
 
        // Needs a tiny bit of special parsing to ensure the right amount of
 
        // whitespace is present.
 
        let position = self.source.pos();
 
        self.consume_keyword(b"channel")?;
 

	
 
        let expect_whitespace = self.source.next() != Some(b'<');
 
        self.consume_whitespace(expect_whitespace)?;
 
        let poly_args = self.consume_polymorphic_args(h, true)?;
 
        let poly_arg_id = match poly_args.len() {
 
            0 => h.alloc_parser_type(|this| ParserType{
 
                this, pos: position.clone(), variant: ParserTypeVariant::Inferred,
 
            }),
 
            1 => poly_args[0],
 
            _ => return Err(ParseError2::new_error(
 
                &self.source, self.source.pos(),
 
                "port construction using 'channel' accepts up to 1 polymorphic argument"
 
            ))
 
        };
 
        self.consume_whitespace(true)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume the output port
 
        let out_parser_type = h.alloc_parser_type(|this| ParserType{
 
            this, pos: position.clone(), variant: ParserTypeVariant::Output(poly_arg_id)
 
        });
 
        let out_identifier = self.consume_identifier()?;
 
@@ -1700,13 +1760,12 @@ impl Lexer<'_> {
 
        // Consume the "->" syntax
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b"->")?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume the input port
 
        // TODO: Unsure about this, both ports refer to the same ParserType, is this ok?
 
        let in_parser_type = h.alloc_parser_type(|this| ParserType{
 
            this, pos: position.clone(), variant: ParserTypeVariant::Input(poly_arg_id)
 
        });
 
        let in_identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        self.consume_string(b";")?;
 
@@ -1998,54 +2057,33 @@ impl Lexer<'_> {
 
        // Parse "struct" keyword, optional polyvars and its identifier
 
        let struct_pos = self.source.pos();
 
        self.consume_keyword(b"struct")?;
 
        self.consume_whitespace(true)?;
 
        let struct_ident = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars()?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Parse struct fields
 
        self.consume_string(b"{")?;
 
        let mut next = self.source.next();
 
        let mut fields = Vec::new();
 
        while next.is_some() {
 
            let char = next.unwrap();
 
            if char == b'}' {
 
                break;
 
        let fields = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected the end of the list of struct fields",
 
            |lexer, heap| {
 
                let position = lexer.source.pos();
 
                let parser_type = lexer.consume_type2(heap, false)?;
 
                lexer.consume_whitespace(true)?;
 
                let field = lexer.consume_identifier()?;
 

	
 
                Ok(StructFieldDefinition{ position, field, parser_type })
 
            }
 

	
 
            // Consume field definition
 
            self.consume_whitespace(false)?;
 
            let field_position = self.source.pos();
 
            let field_parser_type = self.consume_type2(h, false)?;
 
            self.consume_whitespace(true)?;
 
            let field_ident = self.consume_identifier()?;
 
            self.consume_whitespace(false)?;
 

	
 
            fields.push(StructFieldDefinition{
 
                position: field_position,
 
                field: field_ident,
 
                parser_type: field_parser_type,
 
            });
 

	
 
            // If we have a comma, then we may or may not have another field
 
            // definition. Otherwise we expect the struct to be fully defined
 
            // and expect a closing brace
 
            next = self.source.next();
 
            if let Some(b',') = next {
 
                self.source.consume();
 
                self.consume_whitespace(false)?;
 
                next = self.source.next();
 
            } else {
 
                break;
 
            }
 
        }
 

	
 
        // End of struct definition, so we expect a closing brace
 
        self.consume_string(b"}")?;
 
        )? {
 
            Some(fields) => fields,
 
            None => return Err(ParseError2::new_error(
 
                self.source, struct_pos,
 
                "An struct definition must be followed by its fields"
 
            )),
 
        };
 

	
 
        // Valid struct definition
 
        Ok(h.alloc_struct_definition(|this| StructDefinition{
 
            this,
 
            position: struct_pos,
 
            identifier: struct_ident,
 
@@ -2057,83 +2095,64 @@ impl Lexer<'_> {
 
        // Parse "enum" keyword, optional polyvars and its identifier
 
        let enum_pos = self.source.pos();
 
        self.consume_keyword(b"enum")?;
 
        self.consume_whitespace(true)?;
 
        let enum_ident = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars()?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Parse enum variants
 
        self.consume_string(b"{")?;
 
        let mut next = self.source.next();
 
        let mut variants = Vec::new();
 
        while next.is_some() {
 
            let char = next.unwrap();
 
            if char == b'}' {
 
                break;
 
            }
 

	
 
            // Consume variant identifier
 
            self.consume_whitespace(false)?;
 
            let variant_position = self.source.pos();
 
            let variant_ident = self.consume_identifier()?;
 
            self.consume_whitespace(false)?;
 

	
 
            // Consume variant (tag) value: may be nothing, in which case it is
 
            // assigned automatically, may be a constant integer, or an embedded
 
            // type as value, resulting in a tagged union
 
            next = self.source.next();
 
            let variant_value = if let Some(b',') = next {
 
                EnumVariantValue::None
 
            } else if let Some(b'=') = next {
 
                self.source.consume();
 
                self.consume_whitespace(false)?;
 
                if !self.has_integer() {
 
                    return Err(self.error_at_pos("expected integer"));
 
                }
 
                let variant_int = self.consume_integer()?;
 
                self.consume_whitespace(false)?;
 
                EnumVariantValue::Integer(variant_int)
 
            } else if let Some(b'(') = next {
 
                self.source.consume();
 
                self.consume_whitespace(false)?;
 
                let variant_type = self.consume_type2(h, false)?;
 
                self.consume_whitespace(false)?;
 
                self.consume_string(b")")?;
 
                self.consume_whitespace(false)?;
 
                EnumVariantValue::Type(variant_type)
 
            } else {
 
                return Err(self.error_at_pos("expected ',', '=', or '('"));
 
            };
 

	
 
            variants.push(EnumVariantDefinition{
 
                position: variant_position,
 
                identifier: variant_ident,
 
                value: variant_value
 
            });
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        let variants = match self.consume_comma_separated(
 
            h, b'{', b'}', "Expected end of enum variant list",
 
            |lexer, heap| {
 
                // Variant identifier
 
                let position = lexer.source.pos();
 
                let identifier = lexer.consume_identifier()?;
 
                lexer.consume_whitespace(false)?;
 

	
 
                // Optional variant value/type
 
                let next = lexer.source.next();
 
                let value = match next {
 
                    Some(b',') => {
 
                        // Do not consume, let `consume_comma_separated` handle
 
                        // the next item
 
                        EnumVariantValue::None
 
                    },
 
                    Some(b'=') => {
 
                        // Integer value
 
                        lexer.source.consume();
 
                        lexer.consume_whitespace(false)?;
 
                        if !lexer.has_integer() {
 
                            return Err(lexer.error_at_pos("expected integer"))
 
                        }
 
                        let value = lexer.consume_integer()?;
 
                        EnumVariantValue::Integer(value)
 
                    },
 
                    Some(b'(') => {
 
                        // Embedded type
 
                        lexer.source.consume();
 
                        lexer.consume_whitespace(false)?;
 
                        let embedded_type = lexer.consume_type2(heap, false)?;
 
                        lexer.consume_whitespace(false)?;
 
                        lexer.consume_string(b")")?;
 
                        EnumVariantValue::Type(embedded_type)
 
                    },
 
                    _ => {
 
                        return Err(lexer.error_at_pos("Expected ',', '=', or '('"));
 
                    }
 
                };
 

	
 
            // If we have a comma, then we may or may not have another variant,
 
            // otherwise we expect the enum is fully defined
 
            next = self.source.next();
 
            if let Some(b',') = next {
 
                self.source.consume();
 
                self.consume_whitespace(false)?;
 
                next = self.source.next();
 
            } else {
 
                break;
 
                Ok(EnumVariantDefinition{ position, identifier, value })
 
            }
 
        }
 

	
 
        self.consume_string(b"}")?;
 

	
 
        // An enum without variants is somewhat valid, but completely useless
 
        // within the language
 
        if variants.is_empty() {
 
            return Err(ParseError2::new_error(self.source, enum_pos, "enum definition without variants"));
 
        }
 
        )? {
 
            Some(variants) => variants,
 
            None => return Err(ParseError2::new_error(
 
                self.source, enum_pos,
 
                "An enum definition must be followed by its variants"
 
            )),
 
        };
 

	
 
        Ok(h.alloc_enum_definition(|this| EnumDefinition{
 
            this,
 
            position: enum_pos,
 
            identifier: enum_ident,
 
            poly_vars,
 
@@ -2152,18 +2171,17 @@ impl Lexer<'_> {
 
        // Parse keyword, optional polyvars and the identifier
 
        let position = self.source.pos();
 
        self.consume_keyword(b"composite")?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars()?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let mut parameters = Vec::new();
 
        self.consume_parameters(h, &mut parameters)?;
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Parse body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_component(|this| Component { 
 
            this,
 
@@ -2179,18 +2197,17 @@ impl Lexer<'_> {
 
        // Consume keyword, optional polyvars and identifier
 
        let position = self.source.pos();
 
        self.consume_keyword(b"primitive")?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars()?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let mut parameters = Vec::new();
 
        self.consume_parameters(h, &mut parameters)?;
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_component(|this| Component { 
 
            this,
 
@@ -2206,18 +2223,17 @@ impl Lexer<'_> {
 
        // Consume return type, optional polyvars and identifier
 
        let position = self.source.pos();
 
        let return_type = self.consume_type2(h, false)?;
 
        self.consume_whitespace(true)?;
 
        let identifier = self.consume_identifier()?;
 
        self.consume_whitespace(false)?;
 
        let poly_vars = self.consume_polymorphic_vars()?;
 
        let poly_vars = self.consume_polymorphic_vars(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume parameters
 
        let mut parameters = Vec::new();
 
        self.consume_parameters(h, &mut parameters)?;
 
        let parameters = self.consume_parameters(h)?;
 
        self.consume_whitespace(false)?;
 

	
 
        // Consume body
 
        let body = self.consume_block_statement(h)?;
 
        Ok(h.alloc_function(|this| Function {
 
            this,
 
@@ -2318,85 +2334,58 @@ impl Lexer<'_> {
 
                module_id: None,
 
            }))
 
        } else if self.has_string(b"::") {
 
            self.consume_string(b"::")?;
 
            self.consume_whitespace(false)?;
 

	
 
            if let Some(b'{') = self.source.next() {
 
                // Import specific symbols, optionally with an alias
 
                self.source.consume();
 
                self.consume_whitespace(false)?;
 

	
 
                let mut symbols = Vec::new();
 
                let mut next = self.source.next();
 

	
 
                while next.is_some() {
 
                    let char = next.unwrap();
 
                    if char == b'}' {
 
                        break;
 
                    }
 

	
 
                    let symbol_position = self.source.pos();
 
                    let symbol_name = self.consume_ident()?;
 
                    self.consume_whitespace(false)?;
 
                    if self.has_string(b"as") {
 
                        // Symbol has an alias
 
                        self.consume_string(b"as")?;
 
                        self.consume_whitespace(true)?;
 
                        let symbol_alias = self.consume_ident()?;
 

	
 
                        symbols.push(AliasedSymbol{
 
                            position: symbol_position,
 
                            name: symbol_name,
 
                            alias: symbol_alias,
 
                            definition_id: None,
 
                        });
 
                    } else {
 
                        // Symbol does not have an alias
 
                        symbols.push(AliasedSymbol{
 
                            position: symbol_position,
 
                            name: symbol_name.clone(),
 
                            alias: symbol_name,
 
                            definition_id: None,
 
                        });
 
                    }
 

	
 
                    // A comma indicates that we may have another symbol coming
 
                    // up (not necessary), but if not present then we expect the
 
                    // end of the symbol list
 
                    self.consume_whitespace(false)?;
 

	
 
                    next = self.source.next();
 
                    if let Some(b',') = next {
 
                        self.source.consume();
 
                        self.consume_whitespace(false)?;
 
                        next = self.source.next();
 
                    } else {
 
                        break;
 
                    }
 
                }
 

	
 
                if let Some(b'}') = next {
 
                    // We are fine, push the imported symbols
 
                    self.source.consume();
 
                    if symbols.is_empty() {
 
                        return Err(ParseError2::new_error(self.source, position, "empty symbol import list"));
 
            let next = self.source.next();
 
            if Some(b'{') == next {
 
                let symbols = match self.consume_comma_separated(
 
                    h, b'{', b'}', "Expected end of import list",
 
                    |lexer, heap| {
 
                        // Symbol name
 
                        let position = lexer.source.pos();
 
                        let name = lexer.consume_ident()?;
 
                        lexer.consume_whitespace(false)?;
 

	
 
                        // Symbol alias
 
                        if lexer.has_string(b"as") {
 
                            // With alias
 
                            lexer.consume_string(b"as")?;
 
                            lexer.consume_whitespace(true)?;
 
                            let alias = lexer.consume_ident()?;
 

	
 
                            Ok(AliasedSymbol{
 
                                position,
 
                                name,
 
                                alias,
 
                                definition_id: None
 
                            })
 
                        } else {
 
                            // Without alias
 
                            Ok(AliasedSymbol{
 
                                position,
 
                                name: name.clone(),
 
                                alias: name,
 
                                definition_id: None
 
                            })
 
                        }
 
                    }
 
                )? {
 
                    Some(symbols) => symbols,
 
                    None => unreachable!(), // because we checked for opening '{'
 
                };
 

	
 
                    h.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                        this,
 
                        position,
 
                        module_name: value,
 
                        module_id: None,
 
                        symbols,
 
                    }))
 
                } else {
 
                    return Err(self.error_at_pos("Expected '}'"));
 
                }
 
            } else if let Some(b'*') = self.source.next() {
 
                // Import all symbols without alias
 
                h.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    position,
 
                    module_name: value,
 
                    module_id: None,
 
                    symbols,
 
                }))
 
            } else if Some(b'*') == next {
 
                self.source.consume();
 
                h.alloc_import(|this| Import::Symbols(ImportSymbols{
 
                    this,
 
                    position,
 
                    module_name: value,
 
                    module_id: None,
src/protocol/mod.rs
Show inline comments
 
mod arena;
 
// mod ast;
 
mod eval;
 
pub(crate) mod inputsource;
 
// mod lexer;
 
mod parser;
 
#[cfg(test)] mod tests;
 

	
 
// TODO: Remove when not benchmarking
 
pub(crate) mod ast;
 
pub(crate) mod ast_printer;
 
pub(crate) mod lexer;
 

	
src/protocol/parser/depth_visitor.rs
Show inline comments
 
@@ -172,13 +172,13 @@ pub(crate) trait Visitor: Sized {
 
    fn visit_call_expression(&mut self, h: &mut Heap, expr: CallExpressionId) -> VisitorResult {
 
        recursive_call_expression(self, h, expr)
 
    }
 
    fn visit_constant_expression(
 
        &mut self,
 
        _h: &mut Heap,
 
        _expr: ConstantExpressionId,
 
        _expr: LiteralExpressionId,
 
    ) -> VisitorResult {
 
        Ok(())
 
    }
 
    fn visit_variable_expression(
 
        &mut self,
 
        _h: &mut Heap,
 
@@ -431,13 +431,13 @@ fn recursive_expression<T: Visitor>(
 
        Expression::Binary(expr) => this.visit_binary_expression(h, expr.this),
 
        Expression::Unary(expr) => this.visit_unary_expression(h, expr.this),
 
        Expression::Indexing(expr) => this.visit_indexing_expression(h, expr.this),
 
        Expression::Slicing(expr) => this.visit_slicing_expression(h, expr.this),
 
        Expression::Select(expr) => this.visit_select_expression(h, expr.this),
 
        Expression::Array(expr) => this.visit_array_expression(h, expr.this),
 
        Expression::Constant(expr) => this.visit_constant_expression(h, expr.this),
 
        Expression::Literal(expr) => this.visit_constant_expression(h, expr.this),
 
        Expression::Call(expr) => this.visit_call_expression(h, expr.this),
 
        Expression::Variable(expr) => this.visit_variable_expression(h, expr.this),
 
    }
 
}
 

	
 
fn recursive_assignment_expression<T: Visitor>(
 
@@ -1302,13 +1302,13 @@ impl Visitor for AssignableExpressions {
 
            recursive_call_expression(self, h, expr)
 
        }
 
    }
 
    fn visit_constant_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: ConstantExpressionId,
 
        expr: LiteralExpressionId,
 
    ) -> VisitorResult {
 
        if self.assignable {
 
            self.error(h[expr].position)
 
        } else {
 
            Ok(())
 
        }
 
@@ -1421,13 +1421,13 @@ impl Visitor for IndexableExpressions {
 
        self.indexable = old;
 
        Ok(())
 
    }
 
    fn visit_constant_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: ConstantExpressionId,
 
        expr: LiteralExpressionId,
 
    ) -> VisitorResult {
 
        if self.indexable {
 
            self.error(h[expr].position)
 
        } else {
 
            Ok(())
 
        }
 
@@ -1529,13 +1529,13 @@ impl Visitor for SelectableExpressions {
 
        self.selectable = old;
 
        Ok(())
 
    }
 
    fn visit_constant_expression(
 
        &mut self,
 
        h: &mut Heap,
 
        expr: ConstantExpressionId,
 
        expr: LiteralExpressionId,
 
    ) -> VisitorResult {
 
        if self.selectable {
 
            self.error(h[expr].position)
 
        } else {
 
            Ok(())
 
        }
src/protocol/parser/type_resolver.rs
Show inline comments
 
@@ -1194,13 +1194,13 @@ impl Visitor2 for TypeResolvingVisitor {
 
            self.visit_expr(ctx, element_id)?;
 
        }
 

	
 
        self.progress_array_expr(ctx, id)
 
    }
 

	
 
    fn visit_constant_expr(&mut self, ctx: &mut Ctx, id: ConstantExpressionId) -> VisitorResult {
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitorResult {
 
        let upcast_id = id.upcast();
 
        self.insert_initial_expr_inference_type(ctx, upcast_id)?;
 
        self.progress_constant_expr(ctx, id)
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitorResult {
 
@@ -1383,13 +1383,13 @@ impl TypeResolvingVisitor {
 
                self.progress_select_expr(ctx, id)
 
            },
 
            Expression::Array(expr) => {
 
                let id = expr.this;
 
                self.progress_array_expr(ctx, id)
 
            },
 
            Expression::Constant(expr) => {
 
            Expression::Literal(expr) => {
 
                let id = expr.this;
 
                self.progress_constant_expr(ctx, id)
 
            },
 
            Expression::Call(expr) => {
 
                let id = expr.this;
 
                self.progress_call_expr(ctx, id)
 
@@ -1744,20 +1744,20 @@ impl TypeResolvingVisitor {
 

	
 
        if expr_progress { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
    }
 

	
 
    fn progress_constant_expr(&mut self, ctx: &mut Ctx, id: ConstantExpressionId) -> Result<(), ParseError2> {
 
    fn progress_constant_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> Result<(), ParseError2> {
 
        let upcast_id = id.upcast();
 
        let expr = &ctx.heap[id];
 
        let template = match &expr.value {
 
            Constant::Null => &MESSAGE_TEMPLATE[..],
 
            Constant::Integer(_) => &INTEGERLIKE_TEMPLATE[..],
 
            Constant::True | Constant::False => &BOOL_TEMPLATE[..],
 
            Constant::Character(_) => todo!("character literals")
 
            Literal::Null => &MESSAGE_TEMPLATE[..],
 
            Literal::Integer(_) => &INTEGERLIKE_TEMPLATE[..],
 
            Literal::True | Literal::False => &BOOL_TEMPLATE[..],
 
            Literal::Character(_) => todo!("character literals")
 
        };
 

	
 
        let progress = self.apply_forced_constraint(ctx, upcast_id, template)?;
 
        if progress { self.queue_expr_parent(ctx, upcast_id); }
 

	
 
        Ok(())
 
@@ -2431,13 +2431,13 @@ impl TypeResolvingVisitor {
 

	
 
        while !to_consider.is_empty() {
 
            let parser_type_id = to_consider.pop_front().unwrap();
 
            let parser_type = &ctx.heap[parser_type_id];
 
            match &parser_type.variant {
 
                PTV::Message => {
 
                    /// TODO: @types Remove the Message -> Byte hack at some point...
 
                    // TODO: @types Remove the Message -> Byte hack at some point...
 
                    infer_type.push(ITP::Message);
 
                    infer_type.push(ITP::Byte);
 
                },
 
                PTV::Bool => { infer_type.push(ITP::Bool); },
 
                PTV::Byte => { infer_type.push(ITP::Byte); },
 
                PTV::Short => { infer_type.push(ITP::Short); },
src/protocol/parser/type_table.rs
Show inline comments
 
@@ -3,28 +3,28 @@ TypeTable
 

	
 
Contains the type table: a datastructure that, when compilation succeeds,
 
contains a concrete type definition for each AST type definition. In general
 
terms the type table will go through the following phases during the compilation
 
process:
 

	
 
    1. The base type definitions are resolved after the parser phase has
 
        finished. This implies that the AST is fully constructed, but not yet
 
        annotated.
 
    2. With the base type definitions resolved, the validation/linker phase will
 
        use the type table (together with the symbol table) to disambiguate
 
        terms (e.g. does an expression refer to a variable, an enum, a constant,
 
        etc.)
 
    3. During the type checking/inference phase the type table is used to ensure
 
        that the AST contains valid use of types in expressions and statements.
 
        At the same time type inference will find concrete instantiations of
 
        polymorphic types, these will be stored in the type table as monomorphed
 
        instantiations of a generic type.
 
    4. After type checking and inference (and possibly when constructing byte
 
        code) the type table will construct a type graph and solidify each
 
        non-polymorphic type and monomorphed instantiations of polymorphic types
 
        into concrete types.
 
1. The base type definitions are resolved after the parser phase has
 
    finished. This implies that the AST is fully constructed, but not yet
 
    annotated.
 
2. With the base type definitions resolved, the validation/linker phase will
 
    use the type table (together with the symbol table) to disambiguate
 
    terms (e.g. does an expression refer to a variable, an enum, a constant,
 
    etc.)
 
3. During the type checking/inference phase the type table is used to ensure
 
    that the AST contains valid use of types in expressions and statements.
 
    At the same time type inference will find concrete instantiations of
 
    polymorphic types, these will be stored in the type table as monomorphed
 
    instantiations of a generic type.
 
4. After type checking and inference (and possibly when constructing byte
 
    code) the type table will construct a type graph and solidify each
 
    non-polymorphic type and monomorphed instantiations of polymorphic types
 
    into concrete types.
 

	
 
So a base type is defined by its (optionally polymorphic) representation in the
 
AST. A concrete type has concrete types for each of the polymorphic arguments. A
 
struct, enum or union may have polymorphic arguments but not actually be a
 
polymorphic type. This happens when the polymorphic arguments are not used in
 
the type definition itself. Similarly for functions/components: but here we just
src/protocol/parser/visitor.rs
Show inline comments
 
@@ -204,15 +204,15 @@ pub(crate) trait Visitor2 {
 
                self.visit_select_expr(ctx, this)
 
            }
 
            Expression::Array(expr) => {
 
                let this = expr.this;
 
                self.visit_array_expr(ctx, this)
 
            }
 
            Expression::Constant(expr) => {
 
            Expression::Literal(expr) => {
 
                let this = expr.this;
 
                self.visit_constant_expr(ctx, this)
 
                self.visit_literal_expr(ctx, this)
 
            }
 
            Expression::Call(expr) => {
 
                let this = expr.this;
 
                self.visit_call_expr(ctx, this)
 
            }
 
            Expression::Variable(expr) => {
 
@@ -227,13 +227,13 @@ pub(crate) trait Visitor2 {
 
    fn visit_binary_expr(&mut self, _ctx: &mut Ctx, _id: BinaryExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_unary_expr(&mut self, _ctx: &mut Ctx, _id: UnaryExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_indexing_expr(&mut self, _ctx: &mut Ctx, _id: IndexingExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_slicing_expr(&mut self, _ctx: &mut Ctx, _id: SlicingExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_select_expr(&mut self, _ctx: &mut Ctx, _id: SelectExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_array_expr(&mut self, _ctx: &mut Ctx, _id: ArrayExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_constant_expr(&mut self, _ctx: &mut Ctx, _id: ConstantExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_literal_expr(&mut self, _ctx: &mut Ctx, _id: LiteralExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_call_expr(&mut self, _ctx: &mut Ctx, _id: CallExpressionId) -> VisitorResult { Ok(()) }
 
    fn visit_variable_expr(&mut self, _ctx: &mut Ctx, _id: VariableExpressionId) -> VisitorResult { Ok(()) }
 

	
 
    // Types
 
    fn visit_parser_type(&mut self, _ctx: &mut Ctx, _id: ParserTypeId) -> VisitorResult { Ok(()) }
 
}
 
\ No newline at end of file
src/protocol/parser/visitor_linker.rs
Show inline comments
 
@@ -692,17 +692,37 @@ impl Visitor2 for ValidityAndLinkerVisitor {
 
        self.expression_buffer.truncate(old_num_exprs);
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_constant_expr(&mut self, ctx: &mut Ctx, id: ConstantExpressionId) -> VisitorResult {
 
    fn visit_literal_expr(&mut self, ctx: &mut Ctx, id: LiteralExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
 

	
 
        let constant_expr = &mut ctx.heap[id];
 
        constant_expr.parent = self.expr_parent;
 
        let old_expr_parent = self.expr_parent;
 
        constant_expr.parent = old_expr_parent;
 

	
 
        match &mut constant_expr.value {
 
            Literal::Null | Literal::True | Literal::False |
 
            Literal::Character(_) | Literal::Integer(_) => {
 
                // Just the parent has to be set, done above
 
            },
 
            Literal::Struct(literal) => {
 
                // Retrieve and set the literals definition
 
                let definition =
 
                // Need to traverse fields expressions in struct
 
                let old_num_exprs = self.expression_buffer.len();
 
                self.expression_buffer.extend(literal.fields.iter().map(|v| v.value));
 
                let new_num_exprs = self.expression_buffer.len();
 

	
 
                self.expression_buffer.truncate(old_num_exprs);
 
            }
 
        }
 

	
 
        self.expr_parent = old_expr_parent;
 

	
 
        Ok(())
 
    }
 

	
 
    fn visit_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> VisitorResult {
 
        debug_assert!(!self.performing_breadth_pass);
src/protocol/tests/lexer.rs
Show inline comments
 
new file 100644
 
/// lexer.rs
 
///
 
/// Simple tests for the lexer. Only tests the lexing of the input source and
 
/// the resulting AST without relying on the validation/typing pass
 

	
 
use super::*;
 

	
 
#[test]
 
fn test_disallowed_inference() {
 
    Tester::new_single_source_expect_err(
 
        "argument auto inference",
 
            "int func(auto arg) { return 0; }"
 
    ).error(|e| { e
 
        .assert_msg_has(0, "inference is not allowed")
 
        .assert_occurs_at(0, "auto arg");
 
    });
 

	
 
    Tester::new_single_source_expect_err(
 
        "return type auto inference",
 
        "auto func(int arg) { return 0; }"
 
    ).error(|e| { e
 
        .assert_msg_has(0, "inference is not allowed")
 
        .assert_occurs_at(0, "auto func");
 
    });
 

	
 
    Tester::new_single_source_expect_err(
 
        "implicit polymorph argument auto inference",
 
        "int func(in port) { return port; }"
 
    ).error(|e| { e
 
        .assert_msg_has(0, "inference is not allowed")
 
        .assert_occurs_at(0, "in port");
 
    });
 

	
 
    Tester::new_single_source_expect_err(
 
        "explicit polymorph argument auto inference",
 
        "int func(in<auto> port) { return port; }"
 
    ).error(|e| { e
 
        .assert_msg_has(0, "inference is not allowed")
 
        .assert_occurs_at(0, "auto> port");
 
    });
 

	
 
    Tester::new_single_source_expect_err(
 
        "implicit polymorph return type auto inference",
 
        "in func(in<msg> a, in<msg> b) { return a; }"
 
    ).error(|e| { e
 
        .assert_msg_has(0, "inference is not allowed")
 
        .assert_occurs_at(0, "in func");
 
    });
 

	
 
    Tester::new_single_source_expect_err(
 
        "explicit polymorph return type auto inference",
 
        "in<auto> func(in<msg> a) { return a; }"
 
    ).error(|e| { e
 
        .assert_msg_has(0, "inference is not allowed")
 
        .assert_occurs_at(0, "auto> func");
 
    });
 
}
 

	
 
#[test]
 
fn test_simple_struct_definition() {
 
    Tester::new_single_source_expect_ok(
 
        "empty struct",
 
        "struct Foo{}"
 
    ).for_struct("Foo", |t| { t.assert_num_fields(0); });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "single field, no comma",
 
        "struct Foo{ int field }"
 
    ).for_struct("Foo", |t| { t
 
        .assert_num_fields(1)
 
        .for_field("field", |f| {
 
            f.assert_parser_type("int");
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "single field, with comma",
 
        "struct Foo{ int field, }"
 
    ).for_struct("Foo", |t| { t
 
        .assert_num_fields(1)
 
        .for_field("field", |f| { f
 
            .assert_parser_type("int");
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "multiple fields, no comma",
 
        "struct Foo{ byte a, short b, int c }"
 
    ).for_struct("Foo", |t| { t
 
        .assert_num_fields(3)
 
        .for_field("a", |f| { f.assert_parser_type("byte"); })
 
        .for_field("b", |f| { f.assert_parser_type("short"); })
 
        .for_field("c", |f| { f.assert_parser_type("int"); });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "multiple fields, with comma",
 
        "struct Foo{
 
            byte a,
 
            short b,
 
            int c,
 
        }"
 
    ).for_struct("Foo", |t| { t
 
        .assert_num_fields(3)
 
        .for_field("a", |f| { f.assert_parser_type("byte"); })
 
        .for_field("b", |f| { f.assert_parser_type("short"); })
 
        .for_field("c", |f| { f.assert_parser_type("int"); });
 
    });
 
}
 
\ No newline at end of file
src/protocol/tests/mod.rs
Show inline comments
 
new file 100644
 
mod utils;
 
mod lexer;
 

	
 
pub(crate) use utils::{Tester};
 
\ No newline at end of file
src/protocol/tests/utils.rs
Show inline comments
 
new file 100644
 
use crate::protocol::ast::*;
 
use crate::protocol::inputsource::*;
 
use crate::protocol::parser::*;
 

	
 
//------------------------------------------------------------------------------
 
// Interface for parsing and compiling
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct Tester {
 
    test_name: String,
 
    sources: Vec<String>
 
}
 

	
 
impl Tester {
 
    /// Constructs a new tester, allows adding multiple sources before compiling
 
    pub(crate) fn new<S: ToString>(test_name: S) -> Self {
 
        Self{
 
            test_name: test_name.to_string(),
 
            sources: Vec::new()
 
        }
 
    }
 

	
 
    /// Utility for quick tests that use a single source file and expect the
 
    /// compilation to succeed.
 
    pub(crate) fn new_single_source_expect_ok<T: ToString, S: ToString>(test_name: T, source: S) -> AstOkTester {
 
        Self::new(test_name)
 
            .with_source(source)
 
            .compile()
 
            .expect_ok()
 
    }
 

	
 
    /// Utility for quick tests that use a single source file and expect the
 
    /// compilation to fail.
 
    pub(crate) fn new_single_source_expect_err<T: ToString, S: ToString>(test_name: T, source: S) -> AstErrTester {
 
        Self::new(test_name)
 
            .with_source(source)
 
            .compile()
 
            .expect_err()
 
    }
 

	
 
    pub(crate) fn with_source<S: ToString>(mut self, source: S) -> Self {
 
        self.sources.push(source.to_string());
 
        self
 
    }
 

	
 
    pub(crate) fn compile(self) -> AstTesterResult {
 
        let mut parser = Parser::new();
 
        for (source_idx, source) in self.sources.into_iter().enumerate() {
 
            let mut cursor = std::io::Cursor::new(source);
 
            let input_source = InputSource::new("", &mut cursor)
 
                .expect(&format!("parsing source {}", source_idx + 1));
 

	
 
            if let Err(err) = parser.feed(input_source) {
 
                return AstTesterResult::Err(AstErrTester::new(self.test_name, err))
 
            }
 
        }
 

	
 
        parser.compile();
 
        if let Err(err) = parser.parse() {
 
            return AstTesterResult::Err(AstErrTester::new(self.test_name, err))
 
        }
 

	
 
        AstTesterResult::Ok(AstOkTester::new(self.test_name, parser))
 
    }
 
}
 

	
 
pub(crate) enum AstTesterResult {
 
    Ok(AstOkTester),
 
    Err(AstErrTester)
 
}
 

	
 
impl AstTesterResult {
 
    pub(crate) fn expect_ok(self) -> AstOkTester {
 
        match self {
 
            AstTesterResult::Ok(v) => v,
 
            AstTesterResult::Err(err) => {
 
                let wrapped = ErrorTester{ test_name: &err.test_name, error: &err.error };
 
                assert!(
 
                    false,
 
                    "[{}] Expected compilation to succeed, but it failed with {}",
 
                    err.test_name, wrapped.assert_postfix()
 
                );
 
                unreachable!();
 
            }
 
        }
 
    }
 

	
 
    pub(crate) fn expect_err(self) -> AstErrTester {
 
        match self {
 
            AstTesterResult::Ok(ok) => {
 
                assert!(false, "[{}] Expected compilation to fail, but it succeeded", ok.test_name);
 
                unreachable!();
 
            },
 
            AstTesterResult::Err(err) => err,
 
        }
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Interface for successful compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct AstOkTester {
 
    test_name: String,
 
    modules: Vec<LexedModule>,
 
    heap: Heap,
 
}
 

	
 
impl AstOkTester {
 
    fn new(test_name: String, parser: Parser) -> Self {
 
        Self {
 
            test_name,
 
            modules: parser.modules,
 
            heap: parser.heap
 
        }
 
    }
 

	
 
    pub(crate) fn for_struct<F: Fn(StructTester)>(self, name: &str, f: F) -> Self {
 
        let mut found = false;
 
        for definition in self.heap.definitions.iter() {
 
            if let Definition::Struct(definition) = definition {
 
                if String::from_utf8_lossy(&definition.identifier.value) != name {
 
                    continue;
 
                }
 

	
 
                // Found struct with the same name
 
                let tester = StructTester::new(&self.test_name, definition, &self.heap);
 
                f(tester);
 
                found = true;
 
                break
 
            }
 
        }
 

	
 
        if found { return self }
 

	
 
        assert!(
 
            false, "[{}] Failed to find definition for struct '{}'",
 
            self.test_name, name
 
        );
 
        unreachable!()
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Utilities for successful compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct StructTester<'a> {
 
    test_name: &'a str,
 
    def: &'a StructDefinition,
 
    heap: &'a Heap,
 
}
 

	
 
impl<'a> StructTester<'a> {
 
    fn new(test_name: &'a str, def: &'a StructDefinition, heap: &'a Heap) -> Self {
 
        Self{ test_name, def, heap }
 
    }
 

	
 
    pub(crate) fn assert_num_fields(self, num: usize) -> Self {
 
        debug_assert_eq!(
 
            num, self.def.fields.len(),
 
            "[{}] Expected {} struct fields, but found {} for {}",
 
            self.test_name, num, self.def.fields.len(), self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    pub(crate) fn for_field<F: Fn(StructFieldTester)>(self, name: &str, f: F) -> Self {
 
        // Find field with specified name
 
        for field in &self.def.fields {
 
            if String::from_utf8_lossy(&field.field.value) == name {
 
                let tester = StructFieldTester::new(self.test_name, field, self.heap);
 
                f(tester);
 
                return self;
 
            }
 
        }
 

	
 
        assert!(
 
            false, "[{}] Could not find struct field '{}' for {}",
 
            self.test_name, name, self.assert_postfix()
 
        );
 
        unreachable!();
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("Struct{ name: ");
 
        v.push_str(&String::from_utf8_lossy(&self.def.identifier.value));
 
        v.push_str(", fields: [");
 
        for (field_idx, field) in self.def.fields.iter().enumerate() {
 
            if field_idx != 0 { v.push_str(", "); }
 
            v.push_str(&String::from_utf8_lossy(&field.field.value));
 
        }
 
        v.push_str("] }");
 
        v
 
    }
 
}
 

	
 
pub(crate) struct StructFieldTester<'a> {
 
    test_name: &'a str,
 
    def: &'a StructFieldDefinition,
 
    heap: &'a Heap,
 
}
 

	
 
impl<'a> StructFieldTester<'a> {
 
    fn new(test_name: &'a str, def: &'a StructFieldDefinition, heap: &'a Heap) -> Self {
 
        Self{ test_name, def, heap }
 
    }
 

	
 
    pub(crate) fn assert_parser_type(self, expected: &str) -> Self {
 
        let mut serialized_type = String::new();
 
        serialize_parser_type(&mut serialized_type, &self.heap, self.def.parser_type);
 
        debug_assert_eq!(
 
            expected, &serialized_type,
 
            "[{}] Expected type '{}', but got '{}' for {}",
 
            self.test_name, expected, &serialized_type, self.assert_postfix()
 
        );
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut serialized_type = String::new();
 
        serialize_parser_type(&mut serialized_type, &self.heap, self.def.parser_type);
 
        format!(
 
            "StructField{{ name: {}, parser_type: {} }}",
 
            String::from_utf8_lossy(&self.def.field.value), serialized_type
 
        )
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Interface for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct AstErrTester {
 
    test_name: String,
 
    error: ParseError2,
 
}
 

	
 
impl AstErrTester {
 
    fn new(test_name: String, error: ParseError2) -> Self {
 
        Self{ test_name, error }
 
    }
 

	
 
    pub(crate) fn error<F: Fn(ErrorTester)>(&self, f: F) {
 
        // Maybe multiple errors will be supported in the future
 
        let tester = ErrorTester{ test_name: &self.test_name, error: &self.error };
 
        f(tester)
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Utilities for failed compilation
 
//------------------------------------------------------------------------------
 

	
 
pub(crate) struct ErrorTester<'a> {
 
    test_name: &'a str,
 
    error: &'a ParseError2,
 
}
 

	
 
impl<'a> ErrorTester<'a> {
 
    pub(crate) fn assert_num(self, num: usize) -> Self {
 
        assert_eq!(
 
            num, self.error.statements.len(),
 
            "[{}] expected error to consist of '{}' parts, but encountered '{}' for {}",
 
            self.test_name, num, self.error.statements.len(), self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    pub(crate) fn assert_ctx_has(self, idx: usize, msg: &str) -> Self {
 
        assert!(
 
            self.error.statements[idx].context.contains(msg),
 
            "[{}] expected error statement {}'s context to contain '{}' for {}",
 
            self.test_name, idx, msg, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    pub(crate) fn assert_msg_has(self, idx: usize, msg: &str) -> Self {
 
        assert!(
 
            self.error.statements[idx].message.contains(msg),
 
            "[{}] expected error statement {}'s message to contain '{}' for {}",
 
            self.test_name, idx, msg, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    /// Seeks the index of the pattern in the context message, then checks if
 
    /// the input position corresponds to that index.
 
    pub (crate) fn assert_occurs_at(self, idx: usize, pattern: &str) -> Self {
 
        let pos = self.error.statements[idx].context.find(pattern);
 
        assert!(
 
            pos.is_some(),
 
            "[{}] incorrect occurs_at: '{}' could not be found in the context for {}",
 
            self.test_name, pattern, self.assert_postfix()
 
        );
 
        let pos = pos.unwrap();
 
        let col = self.error.statements[idx].position.col();
 
        assert_eq!(
 
            pos + 1, col,
 
            "[{}] Expected error to occur at column {}, but found it at {} for {}",
 
            self.test_name, pos + 1, col, self.assert_postfix()
 
        );
 

	
 
        self
 
    }
 

	
 
    fn assert_postfix(&self) -> String {
 
        let mut v = String::new();
 
        v.push_str("error: [");
 
        for (idx, stmt) in self.error.statements.iter().enumerate() {
 
            if idx != 0 {
 
                v.push_str(", ");
 
            }
 

	
 
            v.push_str(&format!("{{ context: {}, message: {} }}", &stmt.context, stmt.message));
 
        }
 
        v.push(']');
 
        v
 
    }
 
}
 

	
 
//------------------------------------------------------------------------------
 
// Generic utilities
 
//------------------------------------------------------------------------------
 

	
 
fn serialize_parser_type(buffer: &mut String, heap: &Heap, id: ParserTypeId) {
 
    use ParserTypeVariant as PTV;
 

	
 
    let p = &heap[id];
 
    match &p.variant {
 
        PTV::Message => buffer.push_str("msg"),
 
        PTV::Bool => buffer.push_str("bool"),
 
        PTV::Byte => buffer.push_str("byte"),
 
        PTV::Short => buffer.push_str("short"),
 
        PTV::Int => buffer.push_str("int"),
 
        PTV::Long => buffer.push_str("long"),
 
        PTV::String => buffer.push_str("string"),
 
        PTV::IntegerLiteral => buffer.push_str("intlit"),
 
        PTV::Inferred => buffer.push_str("auto"),
 
        PTV::Array(sub_id) => {
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            buffer.push_str("[]");
 
        },
 
        PTV::Input(sub_id) => {
 
            buffer.push_str("in<");
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            buffer.push('>');
 
        },
 
        PTV::Output(sub_id) => {
 
            buffer.push_str("out<");
 
            serialize_parser_type(buffer, heap, *sub_id);
 
            buffer.push('>');
 
        },
 
        PTV::Symbolic(symbolic) => {
 
            buffer.push_str(&String::from_utf8_lossy(&symbolic.identifier.value));
 
            if symbolic.poly_args.len() > 0 {
 
                buffer.push('<');
 
                for (poly_idx, poly_arg) in symbolic.poly_args.iter().enumerate() {
 
                    if poly_idx != 0 { buffer.push(','); }
 
                    serialize_parser_type(buffer, heap, *poly_arg);
 
                }
 
                buffer.push('>');
 
            }
 
        }
 
    }
 
}
 
\ No newline at end of file
0 comments (0 inline, 0 general)