diff --git a/src/protocol/ast.rs b/src/protocol/ast.rs index 50c394d05e6ca7b4b17c7e1e4e99b3fd14c04571..37c20ecde84a80e926893d9731898a4b442a482a 100644 --- a/src/protocol/ast.rs +++ b/src/protocol/ast.rs @@ -704,6 +704,7 @@ impl Display for Identifier { } } +/// TODO: @types Remove the Message -> Byte hack at some point... #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub enum ParserTypeVariant { // Basic builtin @@ -776,8 +777,12 @@ pub enum SymbolicParserTypeVariant { /// ConcreteType is the representation of a type after resolving symbolic types /// and performing type inference -#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)] +#[derive(Debug, Clone, Copy, Eq, PartialEq, serde::Serialize, serde::Deserialize)] pub enum ConcreteTypePart { + // Markers for the use of polymorphic types within a procedure's body that + // refer to polymorphic variables on the procedure's definition. Different + // from markers in the `InferenceType`, these will not contain nested types. + Marker(usize), // Special types (cannot be explicitly constructed by the programmer) Void, // Builtin types without nested types @@ -797,7 +802,7 @@ pub enum ConcreteTypePart { Instance(DefinitionId, usize), } -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +#[derive(Debug, Clone, Eq, PartialEq, serde::Serialize, serde::Deserialize)] pub struct ConcreteType { pub(crate) parts: Vec } @@ -808,6 +813,16 @@ impl Default for ConcreteType { } } +impl ConcreteType { + pub(crate) fn has_marker(&self) -> bool { + self.parts + .iter() + .any(|p| { + if let ConcreteTypePart::Marker(_) = p { true } else { false } + }) + } +} + // TODO: Remove at some point #[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)] pub enum PrimitiveType { diff --git a/src/protocol/ast_printer.rs b/src/protocol/ast_printer.rs index 03710204ad1a43957138e3b87f5b701811d95a97..7a89d9478e39e034e812ff8acfc41721b48127cb 100644 --- a/src/protocol/ast_printer.rs +++ b/src/protocol/ast_printer.rs @@ -82,7 +82,7 @@ impl<'a> KV<'a> { self } - fn with_d_key(mut self, key: &D) -> Self { + fn with_d_key(self, key: &D) -> Self { self.temp_key.push_str(&key.to_string()); self } @@ -92,12 +92,12 @@ impl<'a> KV<'a> { self } - fn with_disp_val(mut self, val: &D) -> Self { + fn with_disp_val(self, val: &D) -> Self { self.temp_val.push_str(&format!("{}", val)); self } - fn with_debug_val(mut self, val: &D) -> Self { + fn with_debug_val(self, val: &D) -> Self { self.temp_val.push_str(&format!("{:?}", val)); self } @@ -107,7 +107,7 @@ impl<'a> KV<'a> { self } - fn with_opt_disp_val(mut self, val: Option<&D>) -> Self { + fn with_opt_disp_val(self, val: Option<&D>) -> Self { match val { Some(v) => { self.temp_val.push_str(&format!("Some({})", v)); }, None => { self.temp_val.push_str("None"); } @@ -164,6 +164,7 @@ impl<'a> Drop for KV<'a> { } pub(crate) struct ASTWriter { + cur_definition: Option, buffer: String, temp1: String, temp2: String, @@ -172,6 +173,7 @@ pub(crate) struct ASTWriter { impl ASTWriter { pub(crate) fn new() -> Self { Self{ + cur_definition: None, buffer: String::with_capacity(4096), temp1: String::with_capacity(256), temp2: String::with_capacity(256), @@ -267,6 +269,7 @@ impl ASTWriter { //-------------------------------------------------------------------------- fn write_definition(&mut self, heap: &Heap, def_id: DefinitionId, indent: usize) { + self.cur_definition = Some(def_id); let indent2 = indent + 1; let indent3 = indent2 + 1; let indent4 = indent3 + 1; @@ -497,6 +500,7 @@ impl ASTWriter { let expr = &heap[expr_id]; let indent2 = indent + 1; let indent3 = indent2 + 1; + let def_id = self.cur_definition.unwrap(); match expr { Expression::Assignment(expr) => { @@ -510,7 +514,7 @@ impl ASTWriter { self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); self.kv(indent2).with_s_key("ConcreteType") - .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); + .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type)); }, Expression::Conditional(expr) => { self.kv(indent).with_id(PREFIX_CONDITIONAL_EXPR_ID, expr.this.0.index) @@ -524,7 +528,7 @@ impl ASTWriter { self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); self.kv(indent2).with_s_key("ConcreteType") - .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); + .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type)); }, Expression::Binary(expr) => { self.kv(indent).with_id(PREFIX_BINARY_EXPR_ID, expr.this.0.index) @@ -537,7 +541,7 @@ impl ASTWriter { self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); self.kv(indent2).with_s_key("ConcreteType") - .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); + .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type)); }, Expression::Unary(expr) => { self.kv(indent).with_id(PREFIX_UNARY_EXPR_ID, expr.this.0.index) @@ -548,7 +552,7 @@ impl ASTWriter { self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); self.kv(indent2).with_s_key("ConcreteType") - .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); + .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type)); }, Expression::Indexing(expr) => { self.kv(indent).with_id(PREFIX_INDEXING_EXPR_ID, expr.this.0.index) @@ -560,7 +564,7 @@ impl ASTWriter { self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); self.kv(indent2).with_s_key("ConcreteType") - .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); + .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type)); }, Expression::Slicing(expr) => { self.kv(indent).with_id(PREFIX_SLICING_EXPR_ID, expr.this.0.index) @@ -574,7 +578,7 @@ impl ASTWriter { self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); self.kv(indent2).with_s_key("ConcreteType") - .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); + .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type)); }, Expression::Select(expr) => { self.kv(indent).with_id(PREFIX_SELECT_EXPR_ID, expr.this.0.index) @@ -593,7 +597,7 @@ impl ASTWriter { self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); self.kv(indent2).with_s_key("ConcreteType") - .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); + .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type)); }, Expression::Array(expr) => { self.kv(indent).with_id(PREFIX_ARRAY_EXPR_ID, expr.this.0.index) @@ -606,7 +610,7 @@ impl ASTWriter { self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); self.kv(indent2).with_s_key("ConcreteType") - .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); + .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type)); }, Expression::Constant(expr) => { self.kv(indent).with_id(PREFIX_CONST_EXPR_ID, expr.this.0.index) @@ -624,7 +628,7 @@ impl ASTWriter { self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); self.kv(indent2).with_s_key("ConcreteType") - .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); + .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type)); }, Expression::Call(expr) => { self.kv(indent).with_id(PREFIX_CALL_EXPR_ID, expr.this.0.index) @@ -655,7 +659,7 @@ impl ASTWriter { self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); self.kv(indent2).with_s_key("ConcreteType") - .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); + .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type)); }, Expression::Variable(expr) => { self.kv(indent).with_id(PREFIX_VARIABLE_EXPR_ID, expr.this.0.index) @@ -666,7 +670,7 @@ impl ASTWriter { self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); self.kv(indent2).with_s_key("ConcreteType") - .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); + .with_custom_val(|v| write_concrete_type(v, heap, def_id, &expr.concrete_type)); } } } @@ -749,16 +753,26 @@ fn write_parser_type(target: &mut String, heap: &Heap, t: &ParserType) { } } -fn write_concrete_type(target: &mut String, heap: &Heap, t: &ConcreteType) { +fn write_concrete_type(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType) { use ConcreteTypePart as CTP; - fn write_concrete_part(target: &mut String, heap: &Heap, t: &ConcreteType, mut idx: usize) -> usize { + fn write_concrete_part(target: &mut String, heap: &Heap, def_id: DefinitionId, t: &ConcreteType, mut idx: usize) -> usize { if idx >= t.parts.len() { - target.push_str("Programmer error: invalid concrete type tree"); return idx; } match &t.parts[idx] { + CTP::Marker(marker) => { + // Marker points to polymorphic variable index + let definition = &heap[def_id]; + let poly_var_ident = match definition { + Definition::Struct(_) | Definition::Enum(_) => unreachable!(), + Definition::Function(definition) => &definition.poly_vars[*marker].value, + Definition::Component(definition) => &definition.poly_vars[*marker].value, + }; + target.push_str(&String::from_utf8_lossy(&poly_var_ident)); + idx = write_concrete_part(target, heap, def_id, t, idx + 1); + }, CTP::Void => target.push_str("void"), CTP::Message => target.push_str("msg"), CTP::Bool => target.push_str("bool"), @@ -768,21 +782,21 @@ fn write_concrete_type(target: &mut String, heap: &Heap, t: &ConcreteType) { CTP::Long => target.push_str("long"), CTP::String => target.push_str("string"), CTP::Array => { - idx = write_concrete_part(target, heap, t, idx + 1); + idx = write_concrete_part(target, heap, def_id, t, idx + 1); target.push_str("[]"); }, CTP::Slice => { - idx = write_concrete_part(target, heap, t, idx + 1); + idx = write_concrete_part(target, heap, def_id, t, idx + 1); target.push_str("[..]"); } CTP::Input => { target.push_str("in<"); - idx = write_concrete_part(target, heap, t, idx + 1); + idx = write_concrete_part(target, heap, def_id, t, idx + 1); target.push('>'); }, CTP::Output => { target.push_str("out<"); - idx = write_concrete_part(target, heap, t, idx + 1); + idx = write_concrete_part(target, heap, def_id, t, idx + 1); target.push('>') }, CTP::Instance(definition_id, num_embedded) => { @@ -793,7 +807,7 @@ fn write_concrete_type(target: &mut String, heap: &Heap, t: &ConcreteType) { if idx_embedded != 0 { target.push_str(", "); } - idx = write_concrete_part(target, heap, t, idx + 1); + idx = write_concrete_part(target, heap, def_id, t, idx + 1); } target.push('>'); } @@ -802,7 +816,7 @@ fn write_concrete_type(target: &mut String, heap: &Heap, t: &ConcreteType) { idx + 1 } - write_concrete_part(target, heap, t, 0); + write_concrete_part(target, heap, def_id, t, 0); } fn write_expression_parent(target: &mut String, parent: &ExpressionParent) { diff --git a/src/protocol/eval.rs b/src/protocol/eval.rs index 2e8a55feadb9fbcb3926b425b031b1404f450af3..92174af650bcdd887a90ac5fda6dfa6802d73603 100644 --- a/src/protocol/eval.rs +++ b/src/protocol/eval.rs @@ -908,7 +908,11 @@ impl ValueImpl for InputValue { Type::INPUT } fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool { - return if let ParserTypeVariant::Input(_) = t.variant { true } else { false } + use ParserTypeVariant::*; + match &t.variant { + Input(_) | Inferred | Symbolic(_) => true, + _ => false, + } } } @@ -926,7 +930,11 @@ impl ValueImpl for OutputValue { Type::OUTPUT } fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool { - return if let ParserTypeVariant::Output(_) = t.variant { true } else { false } + use ParserTypeVariant::*; + match &t.variant { + Output(_) | Inferred | Symbolic(_) => true, + _ => false, + } } } @@ -954,7 +962,11 @@ impl ValueImpl for MessageValue { Type::MESSAGE } fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool { - return if let ParserTypeVariant::Message = t.variant { true } else { false }; + use ParserTypeVariant::*; + match &t.variant { + Message | Inferred | Symbolic(_) => true, + _ => false, + } } } @@ -974,7 +986,7 @@ impl ValueImpl for BooleanValue { fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool { use ParserTypeVariant::*; match t.variant { - Bool | Byte | Short | Int | Long => true, + Symbolic(_) | Inferred | Bool | Byte | Short | Int | Long => true, _ => false } } @@ -996,7 +1008,7 @@ impl ValueImpl for ByteValue { fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool { use ParserTypeVariant::*; match t.variant { - Byte | Short | Int | Long => true, + Symbolic(_) | Inferred | Byte | Short | Int | Long => true, _ => false } } @@ -1018,7 +1030,7 @@ impl ValueImpl for ShortValue { fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool { use ParserTypeVariant::*; match t.variant { - Short | Int | Long => true, + Symbolic(_) | Inferred | Short | Int | Long => true, _ => false } } @@ -1040,7 +1052,7 @@ impl ValueImpl for IntValue { fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool { use ParserTypeVariant::*; match t.variant { - Int | Long => true, + Symbolic(_) | Inferred | Int | Long => true, _ => false } } @@ -1060,7 +1072,11 @@ impl ValueImpl for LongValue { Type::LONG } fn is_type_compatible_hack(_h: &Heap, t: &ParserType) -> bool { - return if let ParserTypeVariant::Long = t.variant { true } else { false } + use ParserTypeVariant::*; + match &t.variant { + Long | Inferred | Symbolic(_) => true, + _ => false, + } } } @@ -1515,13 +1531,10 @@ impl Store { assert_eq!(2, expr.arguments.len()); let port_value = self.eval(h, ctx, expr.arguments[0])?; let msg_value = self.eval(h, ctx, expr.arguments[1])?; - println!("DEBUG: Handiling put({:?}, {:?})", port_value, msg_value); if ctx.did_put(port_value.clone()) { - println!("DEBUG: Already put..."); // Return bogus, replacing this at some point anyway Ok(Value::Message(MessageValue(None))) } else { - println!("DEBUG: Did not yet put..."); Err(EvalContinuation::Put(port_value, msg_value)) } } diff --git a/src/protocol/lexer.rs b/src/protocol/lexer.rs index 4153a1a50a4698435b37f7215eae2581909d3d82..da622a3eabd0ea9c2b2d5f5e6f27aa02016ed6e9 100644 --- a/src/protocol/lexer.rs +++ b/src/protocol/lexer.rs @@ -2455,354 +2455,3 @@ impl Lexer<'_> { })) } } - -#[cfg(test)] -mod tests { - use crate::protocol::ast::*; - use crate::protocol::lexer::*; - use crate::protocol::inputsource::*; - - #[derive(Debug, Eq, PartialEq)] - enum ParserTypeClass { - Message, Bool, Byte, Short, Int, Long, String, Array, Nope - } - impl ParserTypeClass { - fn from(v: &ParserType) -> ParserTypeClass { - use ParserTypeVariant as PTV; - use ParserTypeClass as PTC; - match &v.variant { - PTV::Message => PTC::Message, - PTV::Bool => PTC::Bool, - PTV::Byte => PTC::Byte, - PTV::Short => PTC::Short, - PTV::Int => PTC::Int, - PTV::Long => PTC::Long, - PTV::String => PTC::String, - PTV::Array(_) => PTC::Array, - _ => PTC::Nope, - } - } - } - - #[test] - fn test_pragmas() { - let mut h = Heap::new(); - let mut input = InputSource::from_string(" - #version 0o7777 - #module something.dot.separated - ").expect("new InputSource"); - let mut lex = Lexer::new(&mut input); - let lexed = lex.consume_protocol_description(&mut h) - .expect("lex input source"); - let root = &h[lexed]; - assert_eq!(root.pragmas.len(), 2); - let pv = &h[root.pragmas[0]]; - let pm = &h[root.pragmas[1]]; - - if let Pragma::Version(v) = pv { - assert_eq!(v.version, 0o7777) - } else { - assert!(false, "first pragma not version"); - } - if let Pragma::Module(m) = pm { - assert_eq!(m.value, b"something.dot.separated"); - } else { - assert!(false, "second pragma not module"); - } - } - - #[test] - fn test_import() { - let mut h = Heap::new(); - let mut input = InputSource::from_string(" - // Module imports, with optional and explicit aliasing - import single_module; - import std.reo; - import something.other as alias; - // Symbol imports - import some_module::*; - import some_module::{Foo as Bar, Qux, Dix as Flu}; - import std.reo::{ - Foo as Bar, // because thing - Qux as Mox, // more explanations - Dix, /* yesh, import me */ - }; - ").unwrap(); - let mut lex = Lexer::new(&mut input); - let lexed = lex.consume_protocol_description(&mut h).unwrap(); - let root = &h[lexed]; - assert_eq!(root.imports.len(), 6); - let no_alias_single = h[root.imports[0]].as_module(); - let no_alias_multi = h[root.imports[1]].as_module(); - let with_alias = h[root.imports[2]].as_module(); - - assert_eq!(no_alias_single.module_name, b"single_module"); - assert_eq!(no_alias_single.alias, b"single_module"); - assert_eq!(no_alias_multi.module_name, b"std.reo"); - assert_eq!(no_alias_multi.alias, b"reo"); - assert_eq!(with_alias.module_name, b"something.other"); - assert_eq!(with_alias.alias, b"alias"); - - let all_symbols = h[root.imports[3]].as_symbols(); - let single_line_symbols = h[root.imports[4]].as_symbols(); - let multi_line_symbols = h[root.imports[5]].as_symbols(); - - assert_eq!(all_symbols.module_name, b"some_module"); - assert!(all_symbols.symbols.is_empty()); - assert_eq!(single_line_symbols.module_name, b"some_module"); - assert_eq!(single_line_symbols.symbols.len(), 3); - assert_eq!(single_line_symbols.symbols[0].name, b"Foo"); - assert_eq!(single_line_symbols.symbols[0].alias, b"Bar"); - assert_eq!(single_line_symbols.symbols[1].name, b"Qux"); - assert_eq!(single_line_symbols.symbols[1].alias, b"Qux"); - assert_eq!(single_line_symbols.symbols[2].name, b"Dix"); - assert_eq!(single_line_symbols.symbols[2].alias, b"Flu"); - assert_eq!(multi_line_symbols.module_name, b"std.reo"); - assert_eq!(multi_line_symbols.symbols.len(), 3); - assert_eq!(multi_line_symbols.symbols[0].name, b"Foo"); - assert_eq!(multi_line_symbols.symbols[0].alias, b"Bar"); - assert_eq!(multi_line_symbols.symbols[1].name, b"Qux"); - assert_eq!(multi_line_symbols.symbols[1].alias, b"Mox"); - assert_eq!(multi_line_symbols.symbols[2].name, b"Dix"); - assert_eq!(multi_line_symbols.symbols[2].alias, b"Dix"); - } - - #[test] - fn test_struct_definition() { - let mut h = Heap::new(); - let mut input = InputSource::from_string(" - struct Foo { - byte one, - short two, - Bar three, - } - struct Bar{int[] one, int[] two, Qux[] three} - ").unwrap(); - let mut lex = Lexer::new(&mut input); - let lexed = lex.consume_protocol_description(&mut h); - if let Err(err) = &lexed { - println!("{}", err); - } - let lexed = lexed.unwrap(); - let root = &h[lexed]; - - assert_eq!(root.definitions.len(), 2); - - // let symbolic_type = |v: &PrimitiveType| -> Vec { - // if let PrimitiveType::Symbolic(v) = v { - // v.identifier.value.clone() - // } else { - // assert!(false); - // unreachable!(); - // } - // }; - - let foo_def = h[root.definitions[0]].as_struct(); - assert_eq!(foo_def.identifier.value, b"Foo"); - assert_eq!(foo_def.fields.len(), 3); - assert_eq!(foo_def.fields[0].field.value, b"one"); - assert_eq!(ParserTypeClass::from(&h[foo_def.fields[0].parser_type]), ParserTypeClass::Byte); - assert_eq!(foo_def.fields[1].field.value, b"two"); - assert_eq!(ParserTypeClass::from(&h[foo_def.fields[1].parser_type]), ParserTypeClass::Short); - assert_eq!(foo_def.fields[2].field.value, b"three"); - // assert_eq!( - // symbolic_type(&h[foo_def.fields[2].the_type].the_type.primitive), - // Vec::from("Bar".as_bytes()) - // ); - - let bar_def = h[root.definitions[1]].as_struct(); - assert_eq!(bar_def.identifier.value, b"Bar"); - assert_eq!(bar_def.fields.len(), 3); - assert_eq!(bar_def.fields[0].field.value, b"one"); - assert_eq!(ParserTypeClass::from(&h[bar_def.fields[0].parser_type]), ParserTypeClass::Array); - assert_eq!(bar_def.fields[1].field.value, b"two"); - assert_eq!(ParserTypeClass::from(&h[bar_def.fields[1].parser_type]), ParserTypeClass::Array); - assert_eq!(bar_def.fields[2].field.value, b"three"); - assert_eq!(ParserTypeClass::from(&h[bar_def.fields[2].parser_type]), ParserTypeClass::Array); - // assert_eq!( - // symbolic_type(&h[bar_def.fields[2].parser_type].the_type.primitive), - // Vec::from("Qux".as_bytes()) - // ); - } - - #[test] - fn test_enum_definition() { - let mut h = Heap::new(); - let mut input = InputSource::from_string(" - enum Foo { - A = 0, - B = 5, - C, - D = 0xFF, - } - enum Bar { Ayoo, Byoo, Cyoo,} - enum Qux { A(byte[]), B(Bar[]), C(byte) - } - ").unwrap(); - let mut lex = Lexer::new(&mut input); - let lexed = lex.consume_protocol_description(&mut h).unwrap(); - let root = &h[lexed]; - - assert_eq!(root.definitions.len(), 3); - - let foo_def = h[root.definitions[0]].as_enum(); - assert_eq!(foo_def.identifier.value, b"Foo"); - assert_eq!(foo_def.variants.len(), 4); - assert_eq!(foo_def.variants[0].identifier.value, b"A"); - assert_eq!(foo_def.variants[0].value, EnumVariantValue::Integer(0)); - assert_eq!(foo_def.variants[1].identifier.value, b"B"); - assert_eq!(foo_def.variants[1].value, EnumVariantValue::Integer(5)); - assert_eq!(foo_def.variants[2].identifier.value, b"C"); - assert_eq!(foo_def.variants[2].value, EnumVariantValue::None); - assert_eq!(foo_def.variants[3].identifier.value, b"D"); - assert_eq!(foo_def.variants[3].value, EnumVariantValue::Integer(0xFF)); - - let bar_def = h[root.definitions[1]].as_enum(); - assert_eq!(bar_def.identifier.value, b"Bar"); - assert_eq!(bar_def.variants.len(), 3); - assert_eq!(bar_def.variants[0].identifier.value, b"Ayoo"); - assert_eq!(bar_def.variants[0].value, EnumVariantValue::None); - assert_eq!(bar_def.variants[1].identifier.value, b"Byoo"); - assert_eq!(bar_def.variants[1].value, EnumVariantValue::None); - assert_eq!(bar_def.variants[2].identifier.value, b"Cyoo"); - assert_eq!(bar_def.variants[2].value, EnumVariantValue::None); - - let qux_def = h[root.definitions[2]].as_enum(); - let enum_type = |value: &EnumVariantValue| -> &ParserType { - if let EnumVariantValue::Type(t) = value { - &h[*t] - } else { - assert!(false); - unreachable!(); - } - }; - assert_eq!(qux_def.identifier.value, b"Qux"); - assert_eq!(qux_def.variants.len(), 3); - assert_eq!(qux_def.variants[0].identifier.value, b"A"); - assert_eq!(ParserTypeClass::from(enum_type(&qux_def.variants[0].value)), ParserTypeClass::Array); - assert_eq!(qux_def.variants[1].identifier.value, b"B"); - assert_eq!(ParserTypeClass::from(enum_type(&qux_def.variants[1].value)), ParserTypeClass::Array); - // if let PrimitiveType::Symbolic(t) = &enum_type(&qux_def.variants[1].value).the_type.primitive { - // assert_eq!(t.identifier.value, Vec::from("Bar".as_bytes())); - // } else { assert!(false) } - - assert_eq!(qux_def.variants[2].identifier.value, b"C"); - assert_eq!(ParserTypeClass::from(enum_type(&qux_def.variants[2].value)), ParserTypeClass::Byte); - } - -// #[test] -// fn test_lowercase() { -// assert_eq!(lowercase(b'a'), b'a'); -// assert_eq!(lowercase(b'A'), b'a'); -// assert_eq!(lowercase(b'z'), b'z'); -// assert_eq!(lowercase(b'Z'), b'z'); -// } - -// #[test] -// fn test_basic_expression() { -// let mut h = Heap::new(); -// let mut is = InputSource::from_string("a+b;").unwrap(); -// let mut lex = Lexer::new(&mut is); -// match lex.consume_expression(&mut h) { -// Ok(expr) => { -// println!("{:?}", expr); -// if let Binary(bin) = &h[expr] { -// if let Variable(left) = &h[bin.left] { -// if let Variable(right) = &h[bin.right] { -// assert_eq!("a", format!("{}", h[left.identifier])); -// assert_eq!("b", format!("{}", h[right.identifier])); -// assert_eq!(Some(b';'), is.next()); -// return; -// } -// } -// } -// assert!(false); -// } -// Err(err) => { -// err.print(&is); -// assert!(false); -// } -// } -// } - -// #[test] -// fn test_paren_expression() { -// let mut h = Heap::new(); -// let mut is = InputSource::from_string("(true)").unwrap(); -// let mut lex = Lexer::new(&mut is); -// match lex.consume_paren_expression(&mut h) { -// Ok(expr) => { -// println!("{:#?}", expr); -// if let Constant(con) = &h[expr] { -// if let ast::Constant::True = con.value { -// return; -// } -// } -// assert!(false); -// } -// Err(err) => { -// err.print(&is); -// assert!(false); -// } -// } -// } - -// #[test] -// fn test_expression() { -// let mut h = Heap::new(); -// let mut is = InputSource::from_string("(x(1+5,get(y))-w[5])+z++\n").unwrap(); -// let mut lex = Lexer::new(&mut is); -// match lex.consume_expression(&mut h) { -// Ok(expr) => { -// println!("{:#?}", expr); -// } -// Err(err) => { -// err.print(&is); -// assert!(false); -// } -// } -// } - -// #[test] -// fn test_basic_statement() { -// let mut h = Heap::new(); -// let mut is = InputSource::from_string("while (true) { skip; }").unwrap(); -// let mut lex = Lexer::new(&mut is); -// match lex.consume_statement(&mut h) { -// Ok(stmt) => { -// println!("{:#?}", stmt); -// if let Statement::While(w) = &h[stmt] { -// if let Expression::Constant(_) = h[w.test] { -// if let Statement::Block(_) = h[w.body] { -// return; -// } -// } -// } -// assert!(false); -// } -// Err(err) => { -// err.print(&is); -// assert!(false); -// } -// } -// } - -// #[test] -// fn test_statement() { -// let mut h = Heap::new(); -// let mut is = InputSource::from_string( -// "label: while (true) { if (x++ > y[0]) break label; else continue; }\n", -// ) -// .unwrap(); -// let mut lex = Lexer::new(&mut is); -// match lex.consume_statement(&mut h) { -// Ok(stmt) => { -// println!("{:#?}", stmt); -// } -// Err(err) => { -// err.print(&is); -// assert!(false); -// } -// } -// } -} diff --git a/src/protocol/parser/mod.rs b/src/protocol/parser/mod.rs index 687930e678e56158ba29073e3c98cec22687b35e..90e97a8ae61e65ce0e5ac6aacca14348c8b3bc62 100644 --- a/src/protocol/parser/mod.rs +++ b/src/protocol/parser/mod.rs @@ -225,9 +225,9 @@ impl Parser { return Err(ParseError2::new_error(&self.modules[0].source, position, &message)) } - let mut writer = ASTWriter::new(); - let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap(); - writer.write_ast(&mut file, &self.heap); + // let mut writer = ASTWriter::new(); + // let mut file = std::fs::File::create(std::path::Path::new("ast.txt")).unwrap(); + // writer.write_ast(&mut file, &self.heap); Ok(root_id) } diff --git a/src/protocol/parser/type_resolver.rs b/src/protocol/parser/type_resolver.rs index 349043750ccdfadd761ee0dbb1704c0426b5b92e..172314208a847db5063049aa1f14bc3d831f0670 100644 --- a/src/protocol/parser/type_resolver.rs +++ b/src/protocol/parser/type_resolver.rs @@ -1,9 +1,35 @@ /// type_resolver.rs /// -/// Performs type inference and type checking +/// Performs type inference and type checking. Type inference is implemented by +/// applying constraints on (sub)trees of types. During this process the +/// resolver takes the `ParserType` structs (the representation of the types +/// written by the programmer), converts them to `InferenceType` structs (the +/// temporary data structure used during type inference) and attempts to arrive +/// at `ConcreteType` structs (the representation of a fully checked and +/// validated type). +/// +/// The resolver will visit every statement and expression relevant to the +/// procedure and insert and determine its initial type based on context (e.g. a +/// return statement's expression must match the function's return type, an +/// if statement's test expression must evaluate to a boolean). When all are +/// visited we attempt to make progress in evaluating the types. Whenever a type +/// is progressed we queue the related expressions for further type progression. +/// Once no more expressions are in the queue the algorithm is finished. At this +/// point either all types are inferred (or can be trivially implicitly +/// determined), or we have incomplete types. In the latter casee we return an +/// error. +/// +/// Inference may be applied on non-polymorphic procedures and on polymorphic +/// procedures. When dealing with a non-polymorphic procedure we apply the type +/// resolver and annotate the AST with the `ConcreteType`s. When dealing with +/// polymorphic procedures we will only annotate the AST once, preserving +/// references to polymorphic variables. Any later pass will perform just the +/// type checking. /// /// TODO: Needs an optimization pass /// TODO: Needs a cleanup pass +/// TODO: Disallow `Void` types in various expressions (and other future types) +/// TODO: Maybe remove msg type? macro_rules! enabled_debug_print { (false, $name:literal, $format:literal) => {}; @@ -41,7 +67,7 @@ use super::visitor::{ use std::collections::hash_map::Entry; use crate::protocol::parser::type_resolver::InferenceTypePart::IntegerLike; -const MESSAGE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Message ]; +const MESSAGE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::Message, InferenceTypePart::Byte ]; const BOOL_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::Bool ]; const NUMBERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::NumberLike ]; const INTEGERLIKE_TEMPLATE: [InferenceTypePart; 1] = [ InferenceTypePart::IntegerLike ]; @@ -50,11 +76,16 @@ const ARRAYLIKE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::ArrayLik const PORTLIKE_TEMPLATE: [InferenceTypePart; 2] = [ InferenceTypePart::PortLike, InferenceTypePart::Unknown ]; /// TODO: @performance Turn into PartialOrd+Ord to simplify checks +/// TODO: @types Remove the Message -> Byte hack at some point... #[derive(Debug, Clone, Eq, PartialEq)] pub(crate) enum InferenceTypePart { - // A marker with an identifier which we can use to seek subsections of the - // inferred type - Marker(usize), + // A marker with an identifier which we can use to retrieve the type subtree + // that follows the marker. This is used to perform type inference on + // polymorphs: an expression may determine the polymorphs type, after we + // need to apply that information to all other places where the polymorph is + // used. + MarkerDefinition(usize), // marker for polymorph types on a procedure's definition + MarkerBody(usize), // marker for polymorph types within a procedure body // Completely unknown type, needs to be inferred Unknown, // Partially known type, may be inferred to to be the appropriate related @@ -67,7 +98,6 @@ pub(crate) enum InferenceTypePart { // Special types that cannot be instantiated by the user Void, // For builtin functions that do not return anything // Concrete types without subtypes - Message, Bool, Byte, Short, @@ -75,6 +105,7 @@ pub(crate) enum InferenceTypePart { Long, String, // One subtype + Message, Array, Slice, Input, @@ -85,7 +116,12 @@ pub(crate) enum InferenceTypePart { impl InferenceTypePart { fn is_marker(&self) -> bool { - if let InferenceTypePart::Marker(_) = self { true } else { false } + use InferenceTypePart as ITP; + + match self { + ITP::MarkerDefinition(_) | ITP::MarkerBody(_) => true, + _ => false, + } } /// Checks if the type is concrete, markers are interpreted as concrete @@ -116,10 +152,10 @@ impl InferenceTypePart { } } - fn is_concrete_array_or_slice(&self) -> bool { + fn is_concrete_msg_array_or_slice(&self) -> bool { use InferenceTypePart as ITP; match self { - ITP::Array | ITP::Slice => true, + ITP::Array | ITP::Slice | ITP::Message => true, _ => false, } } @@ -140,7 +176,7 @@ impl InferenceTypePart { (*self == ITP::IntegerLike && arg.is_concrete_integer()) || (*self == ITP::NumberLike && (arg.is_concrete_number() || *arg == ITP::IntegerLike)) || - (*self == ITP::ArrayLike && arg.is_concrete_array_or_slice()) || + (*self == ITP::ArrayLike && arg.is_concrete_msg_array_or_slice()) || (*self == ITP::PortLike && arg.is_concrete_port()) } @@ -151,12 +187,13 @@ impl InferenceTypePart { use InferenceTypePart as ITP; match &self { ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | - ITP::Void | ITP::Message | ITP::Bool | + ITP::Void | ITP::Bool | ITP::Byte | ITP::Short | ITP::Int | ITP::Long | ITP::String => { -1 }, - ITP::Marker(_) | ITP::ArrayLike | ITP::Array | ITP::Slice | + ITP::MarkerDefinition(_) | ITP::MarkerBody(_) | + ITP::ArrayLike | ITP::Message | ITP::Array | ITP::Slice | ITP::PortLike | ITP::Input | ITP::Output => { // One subtype, so do not modify depth 0 @@ -174,6 +211,9 @@ impl From for InferenceTypePart { use InferenceTypePart as ITP; match v { + CTP::Marker(_) => { + unreachable!("encountered marker while converting concrete type to inferred type"); + } CTP::Void => ITP::Void, CTP::Message => ITP::Message, CTP::Bool => ITP::Bool, @@ -193,23 +233,25 @@ impl From for InferenceTypePart { #[derive(Debug)] struct InferenceType { - has_marker: bool, + has_body_marker: bool, is_done: bool, parts: Vec, } impl InferenceType { - fn new(has_marker: bool, is_done: bool, parts: Vec) -> Self { + fn new(has_body_marker: bool, is_done: bool, parts: Vec) -> Self { if cfg!(debug_assertions) { debug_assert!(!parts.is_empty()); - if !has_marker { - debug_assert!(parts.iter().all(|v| !v.is_marker())); + if !has_body_marker { + debug_assert!(parts.iter().all(|v| { + if let InferenceTypePart::MarkerBody(_) = v { false } else { true } + })); } if is_done { debug_assert!(parts.iter().all(|v| v.is_concrete())); } } - Self{ has_marker, is_done, parts } + Self{ has_body_marker: has_body_marker, is_done, parts } } fn replace_subtree(&mut self, start_idx: usize, with: &[InferenceTypePart]) { @@ -224,64 +266,20 @@ impl InferenceType { self.is_done = self.parts.iter().all(|v| v.is_concrete()); } - /// Checks if type is, or may be inferred as, a number - // TODO: @float - fn might_be_number(&self) -> bool { - use InferenceTypePart as ITP; - - // TODO: @marker? - if self.parts.len() != 1 { return false; } - match self.parts[0] { - ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | - ITP::Byte | ITP::Short | ITP::Int | ITP::Long => - true, - _ => - false, - } - } - - /// Checks if type is, or may be inferred as, an integer - fn might_be_integer(&self) -> bool { - use InferenceTypePart as ITP; - - // TODO: @marker? - if self.parts.len() != 1 { return false; } - match self.parts[0] { - ITP::Unknown | ITP::IntegerLike | - ITP::Byte | ITP::Short | ITP::Int | ITP::Long => - true, - _ => - false, - } - } - - /// Checks if type is, or may be inferred as, a boolean - fn might_be_boolean(&self) -> bool { - use InferenceTypePart as ITP; - - // TODO: @marker? - if self.parts.len() != 1 { return false; } - match self.parts[0] { - ITP::Unknown | ITP::Bool => true, - _ => false - } - } - - /// Returns an iterator over all markers and the partial type tree that + /// Returns an iterator over all body markers and the partial type tree that /// follows those markers. - fn marker_iter(&self) -> InferenceTypeMarkerIter { + fn body_marker_iter(&self) -> InferenceTypeMarkerIter { InferenceTypeMarkerIter::new(&self.parts) } - /// Attempts to find a marker with a specific value appearing at or after - /// the specified index. If found then the partial type tree's bounding - /// indices that follow that marker are returned. - fn find_subtree_idx_for_marker(&self, marker: usize, mut idx: usize) -> Option<(usize, usize)> { - // Seek ahead to find a marker - let marker = InferenceTypePart::Marker(marker); + /// Attempts to find a specific type part appearing at or after the + /// specified index. If found then the partial type tree's bounding indices + /// that follow that marker are returned. + fn find_subtree_idx_for_part(&self, part: InferenceTypePart, mut idx: usize) -> Option<(usize, usize)> { + debug_assert!(part.depth_change() >= 0, "cannot find subtree for leaf part"); while idx < self.parts.len() { - if marker == self.parts[idx] { - // Found the marker + if part == self.parts[idx] { + // Found the specified part let start_idx = idx + 1; let end_idx = Self::find_subtree_end_idx(&self.parts, start_idx); return Some((start_idx, end_idx)) @@ -334,6 +332,14 @@ impl InferenceType { let to_infer_part = &to_infer.parts[*to_infer_idx]; let template_part = &template_parts[*template_idx]; + // TODO: Maybe do this differently? + let mut template_definition_marker = None; + if *template_idx > 0 { + if let ITP::MarkerDefinition(marker) = &template_parts[*template_idx - 1] { + template_definition_marker = Some(*marker) + } + } + // Check for programmer mistakes debug_assert_ne!(to_infer_part, template_part); debug_assert!(!to_infer_part.is_marker(), "marker encountered in 'infer part'"); @@ -343,7 +349,14 @@ impl InferenceType { if to_infer_part.may_be_inferred_from(template_part) { let depth_change = to_infer_part.depth_change(); debug_assert_eq!(depth_change, template_part.depth_change()); + + if let Some(marker) = template_definition_marker { + to_infer.parts.insert(*to_infer_idx, ITP::MarkerDefinition(marker)); + *to_infer_idx += 1; + } + to_infer.parts[*to_infer_idx] = template_part.clone(); + *to_infer_idx += 1; *template_idx += 1; return Some(depth_change); @@ -354,12 +367,19 @@ impl InferenceType { // template part is different, so cannot be unknown, hence copy the // entire subtree let template_end_idx = Self::find_subtree_end_idx(template_parts, *template_idx); - to_infer.parts[*to_infer_idx] = template_part.clone(); - *to_infer_idx += 1; - for insert_idx in (*template_idx + 1)..template_end_idx { - to_infer.parts.insert(*to_infer_idx, template_parts[insert_idx].clone()); + let erase_offset = if let Some(marker) = template_definition_marker { + to_infer.parts[*to_infer_idx] = ITP::MarkerDefinition(marker); *to_infer_idx += 1; - } + 0 + } else { + 1 + }; + + to_infer.parts.splice( + *to_infer_idx..*to_infer_idx + erase_offset, + template_parts[*template_idx..template_end_idx].iter().cloned() + ); + *to_infer_idx += (template_end_idx - *template_idx); *template_idx = template_end_idx; // Note: by definition the LHS was Unknown and the RHS traversed a @@ -371,8 +391,8 @@ impl InferenceType { } /// Call that checks if the `to_check` part is compatible with the `infer` - /// part. This essentially implements `infer_part_for_single_type` but skips - /// over the matching parts. + /// part. This is essentially a copy of `infer_part_for_single_type`, but + /// without actually copying the type parts. fn check_part_for_single_type( to_check_parts: &[InferenceTypePart], to_check_idx: &mut usize, template_parts: &[InferenceTypePart], template_idx: &mut usize @@ -415,7 +435,7 @@ impl InferenceType { ) -> DualInferenceResult { use InferenceTypePart as ITP; - debug_assert!(!std::ptr::eq(type_a, type_b), "same inference types"); + debug_assert!(!std::ptr::eq(type_a, type_b), "encountered pointers to the same inference type"); let type_a = &mut *type_a; let type_b = &mut *type_b; @@ -438,8 +458,8 @@ impl InferenceType { idx_b += 1; continue; } - if let ITP::Marker(_) = part_a { idx_a += 1; continue; } - if let ITP::Marker(_) = part_b { idx_b += 1; continue; } + if part_a.is_marker() { idx_a += 1; continue; } + if part_b.is_marker() { idx_b += 1; continue; } // Types are not equal and are both not markers if let Some(depth_change) = Self::infer_part_for_single_type(type_a, &mut idx_a, &type_b.parts, &mut idx_b) { @@ -576,15 +596,30 @@ impl InferenceType { use InferenceTypePart as ITP; use ConcreteTypePart as CTP; + // Make sure inference type is specified but concrete type is not yet specified + debug_assert!(!self.parts.is_empty()); debug_assert!(concrete_type.parts.is_empty()); concrete_type.parts.reserve(self.parts.len()); - for part in &self.parts { + let mut idx = 0; + while idx < self.parts.len() { + let part = &self.parts[idx]; let converted_part = match part { - ITP::Marker(_) => { continue; }, + ITP::MarkerDefinition(marker) => { + // Outer markers are converted to regular markers, we + // completely remove the type subtree that follows it + idx = InferenceType::find_subtree_end_idx(&self.parts, idx + 1); + concrete_type.parts.push(CTP::Marker(*marker)); + continue; + }, + ITP::MarkerBody(_) => { + // Inner markers are removed when writing to the concrete + // type. + idx += 1; + continue; + }, ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | ITP::ArrayLike | ITP::PortLike => { - debug_assert!(false, "Attempted to convert inference type part {:?} into concrete type", part); - unreachable!(); + unreachable!("Attempted to convert inference type part {:?} into concrete type", part); }, ITP::Void => CTP::Void, ITP::Message => CTP::Message, @@ -600,7 +635,9 @@ impl InferenceType { ITP::Output => CTP::Output, ITP::Instance(id, num) => CTP::Instance(*id, *num), }; + concrete_type.parts.push(converted_part); + idx += 1; } } @@ -612,7 +649,7 @@ impl InferenceType { use InferenceTypePart as ITP; match &parts[idx] { - ITP::Marker(_) => { + ITP::MarkerDefinition(_) | ITP::MarkerBody(_) => { idx = Self::write_display_name(buffer, heap, parts, idx + 1) }, ITP::Unknown => buffer.push_str("?"), @@ -628,13 +665,17 @@ impl InferenceType { buffer.push('>'); } ITP::Void => buffer.push_str("void"), - ITP::Message => buffer.push_str("msg"), ITP::Bool => buffer.push_str("bool"), ITP::Byte => buffer.push_str("byte"), ITP::Short => buffer.push_str("short"), ITP::Int => buffer.push_str("int"), ITP::Long => buffer.push_str("long"), ITP::String => buffer.push_str("str"), + ITP::Message => { + buffer.push_str("msg<"); + idx = Self::write_display_name(buffer, heap, parts, idx + 1); + buffer.push('>'); + }, ITP::Array => { idx = Self::write_display_name(buffer, heap, parts, idx + 1); buffer.push_str("[]"); @@ -704,7 +745,7 @@ impl<'a> Iterator for InferenceTypeMarkerIter<'a> { fn next(&mut self) -> Option { // Iterate until we find a marker while self.idx < self.parts.len() { - if let InferenceTypePart::Marker(marker) = self.parts[self.idx] { + if let InferenceTypePart::MarkerBody(marker) = self.parts[self.idx] { // Found a marker, find the subtree end let start_idx = self.idx + 1; let end_idx = InferenceType::find_subtree_end_idx(self.parts, start_idx); @@ -803,8 +844,22 @@ struct ExtraData { } struct VarData { + /// Type of the variable var_type: InferenceType, + /// VariableExpressions that use the variable used_at: Vec, + /// For channel statements we link to the other variable such that when one + /// channel's interior type is resolved, we can also resolve the other one. + linked_var: Option, +} + +impl VarData { + fn new_channel(var_type: InferenceType, other_port: VariableId) -> Self { + Self{ var_type, used_at: Vec::new(), linked_var: Some(other_port) } + } + fn new_local(var_type: InferenceType) -> Self { + Self{ var_type, used_at: Vec::new(), linked_var: None } + } } impl TypeResolvingVisitor { @@ -888,15 +943,15 @@ impl Visitor2 for TypeResolvingVisitor { let comp_def = &ctx.heap[id]; debug_assert_eq!(comp_def.poly_vars.len(), self.poly_vars.len(), "component polyvars do not match imposed polyvars"); - debug_log!("{}", "-".repeat(80)); + debug_log!("{}", "-".repeat(50)); debug_log!("Visiting component '{}': {}", &String::from_utf8_lossy(&comp_def.identifier.value), id.0.index); - debug_log!("{}", "-".repeat(80)); + debug_log!("{}", "-".repeat(50)); for param_id in comp_def.parameters.clone() { let param = &ctx.heap[param_id]; let var_type = self.determine_inference_type_from_parser_type(ctx, param.parser_type, true); debug_assert!(var_type.is_done, "expected component arguments to be concrete types"); - self.var_types.insert(param_id.upcast(), VarData{ var_type, used_at: Vec::new() }); + self.var_types.insert(param_id.upcast(), VarData::new_local(var_type)); } let body_stmt_id = ctx.heap[id].body; @@ -909,15 +964,15 @@ impl Visitor2 for TypeResolvingVisitor { let func_def = &ctx.heap[id]; debug_assert_eq!(func_def.poly_vars.len(), self.poly_vars.len(), "function polyvars do not match imposed polyvars"); - debug_log!("{}", "-".repeat(80)); + debug_log!("{}", "-".repeat(50)); debug_log!("Visiting function '{}': {}", &String::from_utf8_lossy(&func_def.identifier.value), id.0.index); - debug_log!("{}", "-".repeat(80)); + debug_log!("{}", "-".repeat(50)); for param_id in func_def.parameters.clone() { let param = &ctx.heap[param_id]; let var_type = self.determine_inference_type_from_parser_type(ctx, param.parser_type, true); debug_assert!(var_type.is_done, "expected function arguments to be concrete types"); - self.var_types.insert(param_id.upcast(), VarData{ var_type, used_at: Vec::new() }); + self.var_types.insert(param_id.upcast(), VarData::new_local(var_type)); } let body_stmt_id = ctx.heap[id].body; @@ -942,7 +997,7 @@ impl Visitor2 for TypeResolvingVisitor { let local = &ctx.heap[memory_stmt.variable]; let var_type = self.determine_inference_type_from_parser_type(ctx, local.parser_type, true); - self.var_types.insert(memory_stmt.variable.upcast(), VarData{ var_type, used_at: Vec::new() }); + self.var_types.insert(memory_stmt.variable.upcast(), VarData::new_local(var_type)); Ok(()) } @@ -952,11 +1007,11 @@ impl Visitor2 for TypeResolvingVisitor { let from_local = &ctx.heap[channel_stmt.from]; let from_var_type = self.determine_inference_type_from_parser_type(ctx, from_local.parser_type, true); - self.var_types.insert(from_local.this.upcast(), VarData{ var_type: from_var_type, used_at: Vec::new() }); + self.var_types.insert(from_local.this.upcast(), VarData::new_channel(from_var_type, channel_stmt.to.upcast())); let to_local = &ctx.heap[channel_stmt.to]; let to_var_type = self.determine_inference_type_from_parser_type(ctx, to_local.parser_type, true); - self.var_types.insert(to_local.this.upcast(), VarData{ var_type: to_var_type, used_at: Vec::new() }); + self.var_types.insert(to_local.this.upcast(), VarData::new_channel(to_var_type, channel_stmt.from.upcast())); Ok(()) } @@ -1217,18 +1272,23 @@ impl TypeResolvingVisitor { self.progress_expr(ctx, next_expr_id)?; } - // Should have inferred everything - for (expr_id, expr_type) in self.expr_types.iter() { + // Should have inferred everything. Check for this and optionally + // auto-infer the remaining types + for (expr_id, expr_type) in self.expr_types.iter_mut() { if !expr_type.is_done { - // TODO: Auto-inference of integerlike types - let expr = &ctx.heap[*expr_id]; - return Err(ParseError2::new_error( - &ctx.module.source, expr.position(), - &format!( - "Could not fully infer the type of this expression (got '{}')", - expr_type.display_name(&ctx.heap) - ) - )) + // Auto-infer numberlike/integerlike types to a regular int + if expr_type.parts.len() == 1 && expr_type.parts[0] == InferenceTypePart::IntegerLike { + expr_type.parts[0] = InferenceTypePart::Int; + } else { + let expr = &ctx.heap[*expr_id]; + return Err(ParseError2::new_error( + &ctx.module.source, expr.position(), + &format!( + "Could not fully infer the type of this expression (got '{}')", + expr_type.display_name(&ctx.heap) + ) + )) + } } let concrete_type = ctx.heap[*expr_id].get_type_mut(); @@ -1240,7 +1300,7 @@ impl TypeResolvingVisitor { for (call_expr_id, extra_data) in self.extra_data.iter() { if extra_data.poly_vars.is_empty() { continue; } - // We have a polymorph + // Retrieve polymorph variable specification let mut monomorph_types = Vec::with_capacity(extra_data.poly_vars.len()); for (poly_idx, poly_type) in extra_data.poly_vars.iter().enumerate() { if !poly_type.is_done { @@ -1268,32 +1328,27 @@ impl TypeResolvingVisitor { todo!("implement different kinds of polymorph expressions"); }; + // Add to type table if not yet typechecked if let Method::Symbolic(symbolic) = &call_expr.method { let definition_id = symbolic.definition.unwrap(); - let root_id = ctx.types - .get_base_definition(&definition_id) - .unwrap() - .ast_root; - - queue.push(ResolveQueueElement{ - root_id, - definition_id, - monomorph_types, - }) + if !ctx.types.has_monomorph(&definition_id, &monomorph_types) { + let root_id = ctx.types + .get_base_definition(&definition_id) + .unwrap() + .ast_root; + + // Pre-emptively add the monomorph to the type table, but + // we still need to perform typechecking on it + ctx.types.add_monomorph(&definition_id, monomorph_types.clone()); + queue.push(ResolveQueueElement { + root_id, + definition_id, + monomorph_types, + }) + } } } - // Finally, if the currently resolved definition is a monomoprh, then we - // add it to the type table - if !self.poly_vars.is_empty() { - let definition_id = match &self.definition_type { - DefinitionType::None => unreachable!(), - DefinitionType::Function(id) => id.upcast(), - DefinitionType::Component(id) => id.upcast(), - }; - ctx.types.instantiate_monomorph(&definition_id, &self.poly_vars) - } - Ok(()) } @@ -1696,9 +1751,9 @@ impl TypeResolvingVisitor { let upcast_id = id.upcast(); let expr = &ctx.heap[id]; let template = match &expr.value { - Constant::Null => &MESSAGE_TEMPLATE, - Constant::Integer(_) => &INTEGERLIKE_TEMPLATE, - Constant::True | Constant::False => &BOOL_TEMPLATE, + Constant::Null => &MESSAGE_TEMPLATE[..], + Constant::Integer(_) => &INTEGERLIKE_TEMPLATE[..], + Constant::True | Constant::False => &BOOL_TEMPLATE[..], Constant::Character(_) => todo!("character literals") }; @@ -1736,8 +1791,8 @@ impl TypeResolvingVisitor { for (arg_idx, arg_id) in expr.arguments.clone().into_iter().enumerate() { let signature_type = &mut extra.embedded[arg_idx]; let argument_type: *mut _ = self.expr_types.get_mut(&arg_id).unwrap(); - let (progress_sig, progress_arg) = Self::apply_equal2_constraint_types( - ctx, upcast_id, signature_type, 0, argument_type, 0 + let (progress_sig, progress_arg) = Self::apply_equal2_signature_constraint( + ctx, upcast_id, Some(arg_id), signature_type, 0, argument_type, 0 )?; debug_log!(" - Arg {} type | sig: {}, arg: {}", arg_idx, signature_type.display_name(&ctx.heap), unsafe{&*argument_type}.display_name(&ctx.heap)); @@ -1745,8 +1800,8 @@ impl TypeResolvingVisitor { if progress_sig { // Progressed signature, so also apply inference to the // polymorph types using the markers - debug_assert!(signature_type.has_marker, "progress on signature argument type without markers"); - for (poly_idx, poly_section) in signature_type.marker_iter() { + debug_assert!(signature_type.has_body_marker, "progress on signature argument type without markers"); + for (poly_idx, poly_section) in signature_type.body_marker_iter() { let polymorph_type = &mut extra.poly_vars[poly_idx]; match Self::apply_forced_constraint_types( polymorph_type, 0, poly_section, 0 @@ -1768,16 +1823,16 @@ impl TypeResolvingVisitor { // Do the same for the return type let signature_type = &mut extra.returned; let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap(); - let (progress_sig, progress_expr) = Self::apply_equal2_constraint_types( - ctx, upcast_id, signature_type, 0, expr_type, 0 + let (progress_sig, progress_expr) = Self::apply_equal2_signature_constraint( + ctx, upcast_id, None, signature_type, 0, expr_type, 0 )?; debug_log!(" - Ret type | sig: {}, arg: {}", signature_type.display_name(&ctx.heap), unsafe{&*expr_type}.display_name(&ctx.heap)); if progress_sig { // As above: apply inference to polyargs as well - debug_assert!(signature_type.has_marker, "progress on signature return type without markers"); - for (poly_idx, poly_section) in signature_type.marker_iter() { + debug_assert!(signature_type.has_body_marker, "progress on signature return type without markers"); + for (poly_idx, poly_section) in signature_type.body_marker_iter() { let polymorph_type = &mut extra.poly_vars[poly_idx]; match Self::apply_forced_constraint_types( polymorph_type, 0, poly_section, 0 @@ -1815,7 +1870,9 @@ impl TypeResolvingVisitor { for (arg_idx, sig_type) in extra.embedded.iter_mut().enumerate() { let mut seek_idx = 0; let mut modified_sig = false; - while let Some((start_idx, end_idx)) = sig_type.find_subtree_idx_for_marker(poly_idx, seek_idx) { + while let Some((start_idx, end_idx)) = sig_type.find_subtree_idx_for_part( + InferenceTypePart::MarkerBody(poly_idx), seek_idx + ) { let modified_at_marker = Self::apply_forced_constraint_types( sig_type, start_idx, &poly_type.parts, 0 ).unwrap(); @@ -1832,8 +1889,8 @@ impl TypeResolvingVisitor { // argument as well let arg_expr_id = expr.arguments[arg_idx]; let arg_type: *mut _ = self.expr_types.get_mut(&arg_expr_id).unwrap(); - let (progress_arg, _) = Self::apply_equal2_constraint_types( - ctx, arg_expr_id, arg_type, 0, sig_type, 0 + let (_, progress_arg) = Self::apply_equal2_signature_constraint( + ctx, arg_expr_id, Some(arg_expr_id), sig_type, 0, arg_type, 0 ).expect("no inference error at argument type"); if progress_arg { self.expr_queued.insert(arg_expr_id); } debug_log!(" - Poly {} | Arg {} type | sig: {}, arg: {}", poly_idx, arg_idx, sig_type.display_name(&ctx.heap), unsafe{&*arg_type}.display_name(&ctx.heap)); @@ -1843,7 +1900,9 @@ impl TypeResolvingVisitor { let sig_type = &mut extra.returned; let mut seek_idx = 0; let mut modified_sig = false; - while let Some((start_idx, end_idx)) = sig_type.find_subtree_idx_for_marker(poly_idx, seek_idx) { + while let Some((start_idx, end_idx)) = sig_type.find_subtree_idx_for_part( + InferenceTypePart::MarkerBody(poly_idx), seek_idx + ) { let modified_at_marker = Self::apply_forced_constraint_types( sig_type, start_idx, &poly_type.parts, 0 ).unwrap(); @@ -1853,8 +1912,8 @@ impl TypeResolvingVisitor { if modified_sig { let ret_type = self.expr_types.get_mut(&upcast_id).unwrap(); - let (progress_ret, _) = Self::apply_equal2_constraint_types( - ctx, upcast_id, ret_type, 0, sig_type, 0 + let (_, progress_ret) = Self::apply_equal2_signature_constraint( + ctx, upcast_id, None, sig_type, 0, ret_type, 0 ).expect("no inference error at return type"); if progress_ret { if let Some(parent_id) = ctx.heap[upcast_id].parent_expr_id() { @@ -1911,11 +1970,57 @@ impl TypeResolvingVisitor { let progress_expr = infer_res.modified_rhs(); if progress_var { + // Let other variable expressions using this type progress as well for other_expr in var_data.used_at.iter() { if *other_expr != upcast_id { self.expr_queued.insert(*other_expr); } } + + // Let a linked port know that our type has updated + if let Some(linked_id) = var_data.linked_var { + // Only perform one-way inference to prevent updating our type, this + // would lead to an inconsistency + let var_type: *mut _ = &mut var_data.var_type; + let mut link_data = self.var_types.get_mut(&linked_id).unwrap(); + + debug_assert!( + unsafe{&*var_type}.parts[0] == InferenceTypePart::Input || + unsafe{&*var_type}.parts[0] == InferenceTypePart::Output + ); + debug_assert!( + link_data.var_type.parts[0] == InferenceTypePart::Input || + link_data.var_type.parts[0] == InferenceTypePart::Output + ); + match InferenceType::infer_subtree_for_single_type(&mut link_data.var_type, 1, &unsafe{&*var_type}.parts, 1) { + SingleInferenceResult::Modified => { + for other_expr in &link_data.used_at { + self.expr_queued.insert(*other_expr); + } + }, + SingleInferenceResult::Unmodified => {}, + SingleInferenceResult::Incompatible => { + let var_data = self.var_types.get(&var_id).unwrap(); + let link_data = self.var_types.get(&linked_id).unwrap(); + let var_decl = &ctx.heap[var_id]; + let link_decl = &ctx.heap[linked_id]; + + return Err(ParseError2::new_error( + &ctx.module.source, var_decl.position(), + &format!( + "Conflicting types for this variable, assigned the type '{}'", + var_data.var_type.display_name(&ctx.heap) + ) + ).with_postfixed_info( + &ctx.module.source, link_decl.position(), + &format!( + "Because it is incompatible with this variable, assigned the type '{}'", + link_data.var_type.display_name(&ctx.heap) + ) + )); + } + } + } } if progress_expr { self.queue_expr_parent(ctx, upcast_id); } @@ -1993,23 +2098,40 @@ impl TypeResolvingVisitor { Ok((infer_res.modified_lhs(), infer_res.modified_rhs())) } - fn apply_equal2_constraint_types( - ctx: &Ctx, expr_id: ExpressionId, - type1: *mut InferenceType, type1_start_idx: usize, - type2: *mut InferenceType, type2_start_idx: usize + fn apply_equal2_signature_constraint( + ctx: &Ctx, outer_expr_id: ExpressionId, expr_id: Option, + signature_type: *mut InferenceType, signature_start_idx: usize, + expression_type: *mut InferenceType, expression_start_idx: usize ) -> Result<(bool, bool), ParseError2> { - debug_assert_ptrs_distinct!(type1, type2); + debug_assert_ptrs_distinct!(signature_type, expression_type); let infer_res = unsafe { InferenceType::infer_subtrees_for_both_types( - type1, type1_start_idx, - type2, type2_start_idx + signature_type, signature_start_idx, + expression_type, expression_start_idx ) }; if infer_res == DualInferenceResult::Incompatible { + // TODO: Check if I still need to use this + let outer_position = ctx.heap[outer_expr_id].position(); + let (position_name, position) = match expr_id { + Some(expr_id) => ("argument's", ctx.heap[expr_id].position()), + None => ("return type's", outer_position) + }; + let (signature_display_type, expression_display_type) = unsafe { ( + (&*signature_type).display_name(&ctx.heap), + (&*expression_type).display_name(&ctx.heap) + ) }; + return Err(ParseError2::new_error( - &ctx.module.source, ctx.heap[expr_id].position(), - "TODO: Write me, apply_equal2_constraint_types" + &ctx.module.source, outer_position, + "Failed to fully resolve the types of this expression" + ).with_postfixed_info( + &ctx.module.source, position, + &format!( + "Because the {} signature has been resolved to '{}', but the expression has been resolved to '{}'", + position_name, signature_display_type, expression_display_type + ) )); } @@ -2049,7 +2171,7 @@ impl TypeResolvingVisitor { // to be expanded, then we must also assign this to expr_type. let mut progress_expr = expr_res.modified_lhs(); let mut progress_arg1 = expr_res.modified_rhs(); - let mut progress_arg2 = args_res.modified_rhs(); + let progress_arg2 = args_res.modified_rhs(); if args_res.modified_lhs() { unsafe { @@ -2213,28 +2335,31 @@ impl TypeResolvingVisitor { let (embedded_types, return_type) = match &call.method { Method::Create => { // Not polymorphic - unreachable!("insert initial polymorph data for builtin 'create()' call") + ( + vec![InferenceType::new(false, true, vec![ITP::Int])], + InferenceType::new(false, true, vec![ITP::Message, ITP::Byte]) + ) }, Method::Fires => { // bool fires(PortLike arg) ( - vec![InferenceType::new(true, false, vec![ITP::PortLike, ITP::Marker(0), ITP::Unknown])], + vec![InferenceType::new(true, false, vec![ITP::PortLike, ITP::MarkerBody(0), ITP::Unknown])], InferenceType::new(false, true, vec![ITP::Bool]) ) }, Method::Get => { // T get(input arg) ( - vec![InferenceType::new(true, false, vec![ITP::Input, ITP::Marker(0), ITP::Unknown])], - InferenceType::new(true, false, vec![ITP::Marker(0), ITP::Unknown]) + vec![InferenceType::new(true, false, vec![ITP::Input, ITP::MarkerBody(0), ITP::Unknown])], + InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown]) ) }, Method::Put => { // void Put(output port, T msg) ( vec![ - InferenceType::new(true, false, vec![ITP::Output, ITP::Marker(0), ITP::Unknown]), - InferenceType::new(true, false, vec![ITP::Marker(0), ITP::Unknown]) + InferenceType::new(true, false, vec![ITP::Output, ITP::MarkerBody(0), ITP::Unknown]), + InferenceType::new(true, false, vec![ITP::MarkerBody(0), ITP::Unknown]) ], InferenceType::new(false, true, vec![ITP::Void]) ) @@ -2308,7 +2433,11 @@ impl TypeResolvingVisitor { let parser_type_id = to_consider.pop_front().unwrap(); let parser_type = &ctx.heap[parser_type_id]; match &parser_type.variant { - PTV::Message => { infer_type.push(ITP::Message); }, + PTV::Message => { + /// TODO: @types Remove the Message -> Byte hack at some point... + infer_type.push(ITP::Message); + infer_type.push(ITP::Byte); + }, PTV::Bool => { infer_type.push(ITP::Bool); }, PTV::Byte => { infer_type.push(ITP::Byte); }, PTV::Short => { infer_type.push(ITP::Short); }, @@ -2336,20 +2465,23 @@ impl TypeResolvingVisitor { debug_assert!(symbolic.variant.is_some(), "symbolic variant not yet determined"); match symbolic.variant.as_ref().unwrap() { SymbolicParserTypeVariant::PolyArg(_, arg_idx) => { - // Retrieve concrete type of argument and add it to - // the inference type. let arg_idx = *arg_idx; debug_assert!(symbolic.poly_args.is_empty()); // TODO: @hkt if parser_type_in_body { + // Polymorphic argument refers to definition's + // polymorphic variables debug_assert!(arg_idx < self.poly_vars.len()); + debug_assert!(!self.poly_vars[arg_idx].has_marker()); + infer_type.push(ITP::MarkerDefinition(arg_idx)); for concrete_part in &self.poly_vars[arg_idx].parts { infer_type.push(ITP::from(*concrete_part)); } } else { + // Polymorphic argument has to be inferred has_markers = true; has_inferred = true; - infer_type.push(ITP::Marker(arg_idx)); + infer_type.push(ITP::MarkerBody(arg_idx)); infer_type.push(ITP::Unknown); } }, @@ -2469,12 +2601,12 @@ impl TypeResolvingVisitor { // Helper function to check for polymorph mismatch between two inference // types. fn has_poly_mismatch<'a>(type_a: &'a InferenceType, type_b: &'a InferenceType) -> Option<(usize, &'a [InferenceTypePart], &'a [InferenceTypePart])> { - if !type_a.has_marker || !type_b.has_marker { + if !type_a.has_body_marker || !type_b.has_body_marker { return None } - for (marker_a, section_a) in type_a.marker_iter() { - for (marker_b, section_b) in type_b.marker_iter() { + for (marker_a, section_a) in type_a.body_marker_iter() { + for (marker_b, section_b) in type_b.body_marker_iter() { if marker_a != marker_b { // Not the same polymorphic variable continue; @@ -2621,7 +2753,7 @@ mod tests { let pairs = [ (ITP::NumberLike, ITP::Byte), (ITP::IntegerLike, ITP::Int), - (ITP::Unknown, ITP::Message), + (ITP::Unknown, ITP::Long), (ITP::Unknown, ITP::String) ]; for (lhs, rhs) in pairs.iter() { @@ -2636,10 +2768,10 @@ mod tests { // Using infer-single let mut lhs_type = IT::new(false, false, vec![lhs.clone()]); - let mut rhs_type = IT::new(false, true, vec![rhs.clone()]); - let result = unsafe{ IT::infer_subtree_for_single_type( + let rhs_type = IT::new(false, true, vec![rhs.clone()]); + let result = IT::infer_subtree_for_single_type( &mut lhs_type, 0, &rhs_type.parts, 0 - ) }; + ); assert_eq!(SingleInferenceResult::Modified, result); assert_eq!(lhs_type.parts, rhs_type.parts); } @@ -2668,10 +2800,10 @@ mod tests { assert_eq!(lhs_type.parts, rhs_type.parts); let mut lhs_type = IT::new(false, false, lhs.clone()); - let mut rhs_type = IT::new(false, false, rhs.clone()); - let result = unsafe{ IT::infer_subtree_for_single_type( + let rhs_type = IT::new(false, false, rhs.clone()); + let result = IT::infer_subtree_for_single_type( &mut lhs_type, 0, &rhs_type.parts, 0 - ) }; + ); assert_eq!(SingleInferenceResult::Modified, result); assert_eq!(lhs_type.parts, rhs_type.parts) } diff --git a/src/protocol/parser/type_table.rs b/src/protocol/parser/type_table.rs index da4a18eb37c03393491c17ddd2ba09e0893a20e2..f041b25972dff96f84ab901ca3888107883ba7ff 100644 --- a/src/protocol/parser/type_table.rs +++ b/src/protocol/parser/type_table.rs @@ -116,6 +116,22 @@ pub struct DefinedType { pub(crate) monomorphs: Vec>, } +impl DefinedType { + fn add_monomorph(&mut self, types: Vec) { + debug_assert!(!self.has_monomorph(&types), "monomorph already exists"); + self.monomorphs.push(types); + } + + fn has_monomorph(&self, types: &Vec) -> bool { + debug_assert_eq!(self.poly_args.len(), types.len(), "mismatch in number of polymorphic types"); + for monomorph in &self.monomorphs { + if monomorph == types { return true; } + } + + return false; + } +} + pub enum DefinedTypeVariant { Enum(EnumType), Union(UnionType), @@ -188,8 +204,8 @@ pub struct FunctionType { } pub struct ComponentType { - variant: ComponentVariant, - arguments: Vec + pub variant: ComponentVariant, + pub arguments: Vec } pub struct FunctionArgument { @@ -323,19 +339,25 @@ impl TypeTable { } /// Instantiates a monomorph for a given base definition. - pub(crate) fn instantiate_monomorph(&mut self, definition_id: &DefinitionId, monomorph: &Vec) { + pub(crate) fn add_monomorph(&mut self, definition_id: &DefinitionId, types: Vec) { debug_assert!( self.lookup.contains_key(definition_id), "attempting to instantiate monomorph of definition unknown to type table" ); + let definition = self.lookup.get_mut(definition_id).unwrap(); - debug_assert_eq!( - monomorph.len(), definition.poly_args.len(), - "attempting to instantiate monomorph with {} types, but definition requires {}", - monomorph.len(), definition.poly_args.len() + definition.add_monomorph(types); + } + + /// Checks if a given definition already has a specific monomorph + pub(crate) fn has_monomorph(&mut self, definition_id: &DefinitionId, types: &Vec) -> bool { + debug_assert!( + self.lookup.contains_key(definition_id), + "attempting to check monomorph existence of definition unknown to type table" ); - definition.monomorphs.push(monomorph.clone()) + let definition = self.lookup.get(definition_id).unwrap(); + definition.has_monomorph(types) } /// This function will resolve just the basic definition of the type, it diff --git a/src/protocol/parser/visitor_linker.rs b/src/protocol/parser/visitor_linker.rs index bcfeab5ab112973f56804bab31e89cc0b2933318..14953f6c0ca02beea4640faa69017a37f468f2a5 100644 --- a/src/protocol/parser/visitor_linker.rs +++ b/src/protocol/parser/visitor_linker.rs @@ -492,67 +492,25 @@ impl Visitor2 for ValidityAndLinkerVisitor { ); } - // No fancy recursive parsing, must be followed by a call expression - let definition_id = { - let call_expr = &ctx.heap[call_expr_id]; - if let Method::Symbolic(symbolic) = &call_expr.method { - let found_symbol = self.find_symbol_of_type( - ctx.module.root_id, &ctx.symbols, &ctx.types, - &symbolic.identifier, TypeClass::Component - ); - - match found_symbol { - FindOfTypeResult::Found(definition_id) => definition_id, - FindOfTypeResult::TypeMismatch(got_type_class) => { - return Err(ParseError2::new_error( - &ctx.module.source, symbolic.identifier.position, - &format!("New must instantiate a component, this identifier points to a {}", got_type_class) - )) - }, - FindOfTypeResult::NotFound => { - return Err(ParseError2::new_error( - &ctx.module.source, symbolic.identifier.position, - "Could not find a defined component with this name" - )) - } - } - } else { - return Err( - ParseError2::new_error(&ctx.module.source, call_expr.position, "Must instantiate a component") - ); - } - }; - - // Modify new statement's symbolic call to point to the appropriate - // definition. - let call_expr = &mut ctx.heap[call_expr_id]; - match &mut call_expr.method { - Method::Symbolic(method) => method.definition = Some(definition_id), - _ => unreachable!() + // We make sure that we point to a symbolic method. Checking that it + // points to a component is done in the depth pass. + let call_expr = &ctx.heap[call_expr_id]; + if let Method::Symbolic(_) = &call_expr.method { + // We're fine + } else { + return Err( + ParseError2::new_error(&ctx.module.source, call_expr.position, "Must instantiate a component") + ); } } else { - // Performing depth pass. The function definition should have been - // resolved in the breadth pass, now we recurse to evaluate the - // arguments - // TODO: @cleanup Maybe just call `visit_call_expr`? + // Just call `visit_call_expr`. We do some extra work we don't have + // to, but this prevents silly mistakes. let call_expr_id = ctx.heap[id].expression; - let call_expr = &mut ctx.heap[call_expr_id]; - call_expr.parent = ExpressionParent::New(id); - - let old_num_exprs = self.expression_buffer.len(); - self.expression_buffer.extend(&call_expr.arguments); - let new_num_exprs = self.expression_buffer.len(); - - let old_expr_parent = self.expr_parent; - for arg_expr_idx in old_num_exprs..new_num_exprs { - let arg_expr_id = self.expression_buffer[arg_expr_idx]; - self.expr_parent = ExpressionParent::Expression(call_expr_id.upcast(), arg_expr_idx as u32); - self.visit_expr(ctx, arg_expr_id)?; - } - - self.expression_buffer.truncate(old_num_exprs); - self.expr_parent = old_expr_parent; + debug_assert_eq!(self.expr_parent, ExpressionParent::None); + self.expr_parent = ExpressionParent::New(id); + self.visit_call_expr(ctx, call_expr_id)?; + self.expr_parent = ExpressionParent::None; } Ok(()) @@ -808,22 +766,33 @@ impl Visitor2 for ValidityAndLinkerVisitor { } Method::Symbolic(symbolic) => { // Find symbolic method + let (verb, expected_type) = if let ExpressionParent::New(_) = self.expr_parent { + // Expect to find a component + ("instantiated", TypeClass::Component) + } else { + // Expect to find a function + ("called", TypeClass::Function) + }; + let found_symbol = self.find_symbol_of_type( ctx.module.root_id, &ctx.symbols, &ctx.types, - &symbolic.identifier, TypeClass::Function + &symbolic.identifier, expected_type ); let definition_id = match found_symbol { FindOfTypeResult::Found(definition_id) => definition_id, FindOfTypeResult::TypeMismatch(got_type_class) => { return Err(ParseError2::new_error( &ctx.module.source, symbolic.identifier.position, - &format!("Only functions can be called, this identifier points to a {}", got_type_class) + &format!( + "Only {}s can be {}, this identifier points to a {}", + expected_type, verb, got_type_class + ) )) }, FindOfTypeResult::NotFound => { return Err(ParseError2::new_error( &ctx.module.source, symbolic.identifier.position, - &format!("Could not find a function with this name") + &format!("Could not find a {} with this name", expected_type) )) } }; @@ -833,6 +802,9 @@ impl Visitor2 for ValidityAndLinkerVisitor { DefinedTypeVariant::Function(definition) => { num_definition_args = definition.arguments.len(); }, + DefinedTypeVariant::Component(definition) => { + num_definition_args = definition.arguments.len(); + } _ => unreachable!(), } } @@ -1122,7 +1094,7 @@ impl ValidityAndLinkerVisitor { })); } - if let PTV::Symbolic(symbolic) = &mut ctx.heap[id].variant { + if let PTV::Symbolic(symbolic) = &mut ctx.heap[parser_type_id].variant { for _ in 0..num_inferred_to_allocate { symbolic.poly_args.push(self.parser_type_buffer.pop().unwrap()); } @@ -1482,11 +1454,13 @@ impl ValidityAndLinkerVisitor { } Method::Symbolic(symbolic) => { let definition = &ctx.heap[symbolic.definition.unwrap()]; - if let Definition::Function(definition) = definition { - definition.poly_vars.len() - } else { - debug_assert!(false, "expected function while visiting call poly args"); - unreachable!(); + match definition { + Definition::Function(definition) => definition.poly_vars.len(), + Definition::Component(definition) => definition.poly_vars.len(), + _ => { + debug_assert!(false, "expected function or component definition while visiting call poly args"); + unreachable!(); + } } } }; diff --git a/src/runtime/tests.rs b/src/runtime/tests.rs index 052b3e04d4d9ef84c1aa8f87460e85a65c30df91..d95b032827b69cdbfe2b1e33fc7a2df77f1ec0b9 100644 --- a/src/runtime/tests.rs +++ b/src/runtime/tests.rs @@ -37,7 +37,7 @@ fn file_logged_configured_connector( Connector::new(file_logger, pd, connector_id) } static MINIMAL_PDL: &'static [u8] = b" -primitive together(in ia, in ib, out oa, out ob){ +primitive together(in ia, in ib, out oa, out ob){ while(true) synchronous { if(fires(ia)) { put(oa, get(ia)); @@ -857,7 +857,7 @@ fn ac_not_b() { let mut c = file_logged_connector(0, test_log_path); let p0 = c.new_net_port(Putter, sock_addrs[0], Active).unwrap(); let p1 = c.new_net_port(Putter, sock_addrs[1], Active).unwrap(); - c.connect(SEC1).unwrap(); + c.connect(SEC5).unwrap(); // put both A and B c.put(p0, TEST_MSG.clone()).unwrap(); @@ -867,7 +867,7 @@ fn ac_not_b() { s.spawn(|_| { // "bob" let pdl = b" - primitive ac_not_b(in a, in b, out c){ + primitive ac_not_b(in a, in b, out c){ // forward A to C but keep B silent synchronous{ put(c, get(a)); } }"; @@ -876,7 +876,9 @@ fn ac_not_b() { let p0 = c.new_net_port(Getter, sock_addrs[0], Passive).unwrap(); let p1 = c.new_net_port(Getter, sock_addrs[1], Passive).unwrap(); let [a, b] = c.new_port_pair(); + c.add_component(b"ac_not_b", &[p0, p1, a]).unwrap(); + c.connect(SEC1).unwrap(); c.get(b).unwrap(); @@ -930,7 +932,7 @@ fn many_rounds_mem() { #[test] fn pdl_reo_lossy() { let pdl = b" - primitive lossy(in a, out b) { + primitive lossy(in a, out b) { while(true) synchronous { msg m = null; if(fires(a)) { @@ -948,7 +950,7 @@ fn pdl_reo_lossy() { #[test] fn pdl_reo_fifo1() { let pdl = b" - primitive fifo1(in a, out b) { + primitive fifo1(in a, out b) { msg m = null; while(true) synchronous { if(m == null) { @@ -967,7 +969,7 @@ fn pdl_reo_fifo1() { fn pdl_reo_fifo1full() { let test_log_path = Path::new("./logs/pdl_reo_fifo1full"); let pdl = b" - primitive fifo1full(in a, out b) { + primitive fifo1full(in a, out b) { msg m = create(0); while(true) synchronous { if(m == null) { @@ -994,7 +996,7 @@ fn pdl_reo_fifo1full() { fn pdl_msg_consensus() { let test_log_path = Path::new("./logs/pdl_msg_consensus"); let pdl = b" - primitive msgconsensus(in a, in b) { + primitive msgconsensus(in a, in b) { while(true) synchronous { msg x = get(a); msg y = get(b); @@ -1021,7 +1023,7 @@ fn pdl_msg_consensus() { fn sequencer3_prim() { let test_log_path = Path::new("./logs/sequencer3_prim"); let pdl = b" - primitive sequencer3(out a, out b, out c) { + primitive sequencer3(out a, out b, out c) { int i = 0; while(true) synchronous { out to = a; @@ -1068,7 +1070,7 @@ fn sequencer3_prim() { fn sequencer3_comp() { let test_log_path = Path::new("./logs/sequencer3_comp"); let pdl = b" - primitive fifo1_init(msg m, in a, out b) { + primitive fifo1_init(T m, in a, out b) { while(true) synchronous { if(m != null && fires(b)) { put(b, m); @@ -1078,13 +1080,13 @@ fn sequencer3_comp() { } } } - composite fifo1_full(in a, out b) { + composite fifo1_full(in a, out b) { new fifo1_init(create(0), a, b); } - composite fifo1(in a, out b) { + composite fifo1(in a, out b) { new fifo1_init(null, a, b); } - composite sequencer3(out a, out b, out c) { + composite sequencer3(out a, out b, out c) { channel d -> e; channel f -> g; channel h -> i; @@ -1149,7 +1151,7 @@ const XROUTER_ITEMS: &[XRouterItem] = { fn xrouter_prim() { let test_log_path = Path::new("./logs/xrouter_prim"); let pdl = b" - primitive xrouter(in a, out b, out c) { + primitive xrouter(in a, out b, out c) { while(true) synchronous { if(fires(a)) { if(fires(b)) put(b, get(a)); @@ -1189,15 +1191,15 @@ fn xrouter_prim() { fn xrouter_comp() { let test_log_path = Path::new("./logs/xrouter_comp"); let pdl = b" - primitive lossy(in a, out b) { + primitive lossy(in a, out b) { while(true) synchronous { if(fires(a)) { - msg m = get(a); + auto m = get(a); if(fires(b)) put(b, m); } } } - primitive sync_drain(in a, in b) { + primitive sync_drain(in a, in b) { while(true) synchronous { if(fires(a)) { get(a); @@ -1205,7 +1207,7 @@ fn xrouter_comp() { } } } - composite xrouter(in a, out b, out c) { + composite xrouter(in a, out b, out c) { channel d -> e; channel f -> g; channel h -> i; @@ -1258,7 +1260,7 @@ fn xrouter_comp() { fn count_stream() { let test_log_path = Path::new("./logs/count_stream"); let pdl = b" - primitive count_stream(out o) { + primitive count_stream(out o) { msg m = create(1); m[0] = 0; while(true) synchronous { @@ -1286,11 +1288,12 @@ fn count_stream() { fn for_msg_byte() { let test_log_path = Path::new("./logs/for_msg_byte"); let pdl = b" - primitive for_msg_byte(out o) { + primitive for_msg_byte(out o) { byte i = 0; + int idx = 0; while(i<8) { msg m = create(1); - m[0] = i; + m[idx] = i; synchronous put(o, m); i++; } @@ -1316,7 +1319,7 @@ fn for_msg_byte() { fn eq_causality() { let test_log_path = Path::new("./logs/eq_causality"); let pdl = b" - primitive eq(in a, in b, out c) { + primitive eq(in a, in b, out c) { msg ma = null; msg mb = null; while(true) synchronous {