From 6ec2e0261a037b79d1537530a84244df0592610d 2021-03-22 18:24:54 From: MH Date: 2021-03-22 18:24:54 Subject: [PATCH] debugging type inference --- diff --git a/src/protocol/arena.rs b/src/protocol/arena.rs index 21d3af16ee6ec5a2cdc65fe2bafd7f0019acba0e..c2e9ed75708ea1a3c05c1fdd8940e3ab952a9b7d 100644 --- a/src/protocol/arena.rs +++ b/src/protocol/arena.rs @@ -7,6 +7,13 @@ pub struct Id { pub(crate) index: u32, _phantom: PhantomData, } + +impl Id { + pub(crate) fn new(index: u32) -> Self { + Self{ index, _phantom: Default::default() } + } +} + #[derive(Debug, serde::Serialize, serde::Deserialize)] pub(crate) struct Arena { store: Vec, @@ -42,19 +49,13 @@ impl Arena { } pub fn alloc_with_id(&mut self, f: impl FnOnce(Id) -> T) -> Id { use std::convert::TryFrom; - let id = Id { - index: u32::try_from(self.store.len()).expect("Out of capacity!"), - _phantom: Default::default(), - }; + let id = Id::new(u32::try_from(self.store.len()).expect("Out of capacity!")); self.store.push(f(id)); id } pub fn iter(&self) -> impl Iterator { self.store.iter() } - pub fn iter_mut(&mut self) -> impl Iterator { - self.store.iter_mut() - } pub fn len(&self) -> usize { self.store.len() } diff --git a/src/protocol/ast.rs b/src/protocol/ast.rs index 05b1e5b41e43b621eb400e0024c9fb4b98126645..92a2d16e17d8eb47928441263ec11b6b5b3b93d5 100644 --- a/src/protocol/ast.rs +++ b/src/protocol/ast.rs @@ -704,9 +704,6 @@ impl Display for Identifier { } } -/// TODO: @cleanup Maybe handle this differently, preallocate in heap? The -/// reason I'm handling it like this now is so we don't allocate types in -/// the `Arena` structure if they're the common types defined here. #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub enum ParserTypeVariant { // Basic builtin @@ -777,6 +774,40 @@ pub enum SymbolicParserTypeVariant { PolyArg(DefinitionId, usize), // index of polyarg in the definition } +/// ConcreteType is the representation of a type after resolving symbolic types +/// and performing type inference +#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)] +pub enum ConcreteTypePart { + // Special types (cannot be explicitly constructed by the programmer) + Void, + // Builtin types without nested types + Message, + Bool, + Byte, + Short, + Int, + Long, + String, + // Builtin types with one nested type + Array, + Slice, + Input, + Output, + // User defined type with any number of nested types + Instance(DefinitionId, usize), +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] +pub struct ConcreteType { + pub(crate) parts: Vec +} + +impl Default for ConcreteType { + fn default() -> Self { + Self{ parts: Vec::new() } + } +} + #[derive(Debug, Clone, PartialEq, Eq, serde::Serialize, serde::Deserialize)] pub enum PrimitiveType { Input, @@ -2005,6 +2036,7 @@ impl Expression { Expression::Variable(expr) => &expr.parent, } } + // TODO: @cleanup pub fn parent_expr_id(&self) -> Option { if let ExpressionParent::Expression(id, _) = self.parent() { Some(*id) @@ -2012,6 +2044,7 @@ impl Expression { None } } + // TODO: @cleanup pub fn set_parent(&mut self, parent: ExpressionParent) { match self { Expression::Assignment(expr) => expr.parent = parent, @@ -2027,6 +2060,22 @@ impl Expression { Expression::Variable(expr) => expr.parent = parent, } } + // TODO: @cleanup + pub fn get_type_mut(&mut self) -> &mut ConcreteType { + match self { + Expression::Assignment(expr) => &mut expr.concrete_type, + Expression::Conditional(expr) => &mut expr.concrete_type, + Expression::Binary(expr) => &mut expr.concrete_type, + Expression::Unary(expr) => &mut expr.concrete_type, + Expression::Indexing(expr) => &mut expr.concrete_type, + Expression::Slicing(expr) => &mut expr.concrete_type, + Expression::Select(expr) => &mut expr.concrete_type, + Expression::Array(expr) => &mut expr.concrete_type, + Expression::Constant(expr) => &mut expr.concrete_type, + Expression::Call(expr) => &mut expr.concrete_type, + Expression::Variable(expr) => &mut expr.concrete_type, + } + } } impl SyntaxElement for Expression { @@ -2072,6 +2121,8 @@ pub struct AssignmentExpression { pub right: ExpressionId, // Phase 2: linker pub parent: ExpressionParent, + // Phase 3: type checking + pub concrete_type: ConcreteType, } impl SyntaxElement for AssignmentExpression { @@ -2090,6 +2141,8 @@ pub struct ConditionalExpression { pub false_expression: ExpressionId, // Phase 2: linker pub parent: ExpressionParent, + // Phase 3: type checking + pub concrete_type: ConcreteType, } impl SyntaxElement for ConditionalExpression { @@ -2131,6 +2184,8 @@ pub struct BinaryExpression { pub right: ExpressionId, // Phase 2: linker pub parent: ExpressionParent, + // Phase 3: type checking + pub concrete_type: ConcreteType, } impl SyntaxElement for BinaryExpression { @@ -2160,6 +2215,8 @@ pub struct UnaryExpression { pub expression: ExpressionId, // Phase 2: linker pub parent: ExpressionParent, + // Phase 3: type checking + pub concrete_type: ConcreteType, } impl SyntaxElement for UnaryExpression { @@ -2177,6 +2234,8 @@ pub struct IndexingExpression { pub index: ExpressionId, // Phase 2: linker pub parent: ExpressionParent, + // Phase 3: type checking + pub concrete_type: ConcreteType, } impl SyntaxElement for IndexingExpression { @@ -2195,6 +2254,8 @@ pub struct SlicingExpression { pub to_index: ExpressionId, // Phase 2: linker pub parent: ExpressionParent, + // Phase 3: type checking + pub concrete_type: ConcreteType, } impl SyntaxElement for SlicingExpression { @@ -2212,6 +2273,8 @@ pub struct SelectExpression { pub field: Field, // Phase 2: linker pub parent: ExpressionParent, + // Phase 3: type checking + pub concrete_type: ConcreteType, } impl SyntaxElement for SelectExpression { @@ -2228,6 +2291,8 @@ pub struct ArrayExpression { pub elements: Vec, // Phase 2: linker pub parent: ExpressionParent, + // Phase 3: type checking + pub concrete_type: ConcreteType, } impl SyntaxElement for ArrayExpression { @@ -2246,6 +2311,8 @@ pub struct CallExpression { pub poly_args: Vec, // Phase 2: linker pub parent: ExpressionParent, + // Phase 3: type checking + pub concrete_type: ConcreteType, } impl SyntaxElement for CallExpression { @@ -2262,6 +2329,8 @@ pub struct ConstantExpression { pub value: Constant, // Phase 2: linker pub parent: ExpressionParent, + // Phase 3: type checking + pub concrete_type: ConcreteType, } impl SyntaxElement for ConstantExpression { @@ -2279,6 +2348,8 @@ pub struct VariableExpression { // Phase 2: linker pub declaration: Option, pub parent: ExpressionParent, + // Phase 3: type checking + pub concrete_type: ConcreteType, } impl SyntaxElement for VariableExpression { diff --git a/src/protocol/ast_printer.rs b/src/protocol/ast_printer.rs index fa75fcb31a7dad2546a25ddac755c6a8b7a08c54..8dbfc6f0900fef9dfc31ac07df4855cf5e1b0991 100644 --- a/src/protocol/ast_printer.rs +++ b/src/protocol/ast_printer.rs @@ -83,7 +83,7 @@ impl<'a> KV<'a> { } fn with_d_key(mut self, key: &D) -> Self { - write!(&mut self.temp_key, "{}", key); + self.temp_key.push_str(&key.to_string()); self } @@ -93,24 +93,24 @@ impl<'a> KV<'a> { } fn with_disp_val(mut self, val: &D) -> Self { - write!(&mut self.temp_val, "{}", val); + self.temp_val.push_str(&format!("{}", val)); self } fn with_debug_val(mut self, val: &D) -> Self { - write!(&mut self.temp_val, "{:?}", val); + self.temp_val.push_str(&format!("{:?}", val)); self } fn with_ascii_val(self, val: &[u8]) -> Self { - self.temp_val.write_str(&*String::from_utf8_lossy(val)); + self.temp_val.push_str(&*String::from_utf8_lossy(val)); self } fn with_opt_disp_val(mut self, val: Option<&D>) -> Self { match val { - Some(v) => { write!(&mut self.temp_val, "Some({})", v); }, - None => { self.temp_val.write_str("None"); } + Some(v) => { self.temp_val.push_str(&format!("Some({})", v)); }, + None => { self.temp_val.push_str("None"); } } self } @@ -118,12 +118,12 @@ impl<'a> KV<'a> { fn with_opt_ascii_val(self, val: Option<&[u8]>) -> Self { match val { Some(v) => { - self.temp_val.write_str("Some("); - self.temp_val.write_str(&*String::from_utf8_lossy(v)); - self.temp_val.write_char(')'); + self.temp_val.push_str("Some("); + self.temp_val.push_str(&*String::from_utf8_lossy(v)); + self.temp_val.push(')'); }, None => { - self.temp_val.write_str("None"); + self.temp_val.push_str("None"); } } self @@ -139,9 +139,9 @@ impl<'a> Drop for KV<'a> { fn drop(&mut self) { // Prefix and indent if let Some((prefix, id)) = &self.prefix { - write!(&mut self.buffer, "{}[{:04}] ", prefix, id); + self.buffer.push_str(&format!("{}[{:04}]", prefix, id)); } else { - write!(&mut self.buffer, " "); + self.buffer.push_str(" "); } for _ in 0..self.indent * INDENT { @@ -149,10 +149,10 @@ impl<'a> Drop for KV<'a> { } // Leading dash - self.buffer.write_str("- "); + self.buffer.push_str("- "); // Key and value - self.buffer.write_str(self.temp_key); + self.buffer.push_str(self.temp_key); if self.temp_val.is_empty() { self.buffer.push(':'); } else { @@ -284,7 +284,7 @@ impl ASTWriter { self.kv(indent4).with_s_key("Name").with_ascii_val(&poly_var_id.value); } - self.kv(indent2).with_s_key("ReturnType").with_custom_val(|s| write_type(s, heap, &heap[def.return_type])); + self.kv(indent2).with_s_key("ReturnParserType").with_custom_val(|s| write_parser_type(s, heap, &heap[def.return_type])); self.kv(indent2).with_s_key("Parameters"); for param_id in &def.parameters { @@ -325,7 +325,7 @@ impl ASTWriter { self.kv(indent).with_id(PREFIX_PARAMETER_ID, param_id.0.index) .with_s_key("Parameter"); self.kv(indent2).with_s_key("Name").with_ascii_val(¶m.identifier.value); - self.kv(indent2).with_s_key("Type").with_custom_val(|w| write_type(w, heap, &heap[param.parser_type])); + self.kv(indent2).with_s_key("ParserType").with_custom_val(|w| write_parser_type(w, heap, &heap[param.parser_type])); } fn write_stmt(&mut self, heap: &Heap, stmt_id: StatementId, indent: usize) { @@ -511,6 +511,8 @@ impl ASTWriter { self.write_expr(heap, expr.right, indent3); self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); + self.kv(indent2).with_s_key("ConcreteType") + .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); }, Expression::Conditional(expr) => { self.kv(indent).with_id(PREFIX_CONDITIONAL_EXPR_ID, expr.this.0.index) @@ -523,6 +525,8 @@ impl ASTWriter { self.write_expr(heap, expr.false_expression, indent3); self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); + self.kv(indent2).with_s_key("ConcreteType") + .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); }, Expression::Binary(expr) => { self.kv(indent).with_id(PREFIX_BINARY_EXPR_ID, expr.this.0.index) @@ -534,6 +538,8 @@ impl ASTWriter { self.write_expr(heap, expr.right, indent3); self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); + self.kv(indent2).with_s_key("ConcreteType") + .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); }, Expression::Unary(expr) => { self.kv(indent).with_id(PREFIX_UNARY_EXPR_ID, expr.this.0.index) @@ -543,6 +549,8 @@ impl ASTWriter { self.write_expr(heap, expr.expression, indent3); self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); + self.kv(indent2).with_s_key("ConcreteType") + .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); }, Expression::Indexing(expr) => { self.kv(indent).with_id(PREFIX_INDEXING_EXPR_ID, expr.this.0.index) @@ -553,6 +561,8 @@ impl ASTWriter { self.write_expr(heap, expr.index, indent3); self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); + self.kv(indent2).with_s_key("ConcreteType") + .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); }, Expression::Slicing(expr) => { self.kv(indent).with_id(PREFIX_SLICING_EXPR_ID, expr.this.0.index) @@ -565,6 +575,8 @@ impl ASTWriter { self.write_expr(heap, expr.to_index, indent3); self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); + self.kv(indent2).with_s_key("ConcreteType") + .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); }, Expression::Select(expr) => { self.kv(indent).with_id(PREFIX_SELECT_EXPR_ID, expr.this.0.index) @@ -582,6 +594,8 @@ impl ASTWriter { } self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); + self.kv(indent2).with_s_key("ConcreteType") + .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); }, Expression::Array(expr) => { self.kv(indent).with_id(PREFIX_ARRAY_EXPR_ID, expr.this.0.index) @@ -593,6 +607,8 @@ impl ASTWriter { self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); + self.kv(indent2).with_s_key("ConcreteType") + .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); }, Expression::Constant(expr) => { self.kv(indent).with_id(PREFIX_CONST_EXPR_ID, expr.this.0.index) @@ -609,6 +625,8 @@ impl ASTWriter { self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); + self.kv(indent2).with_s_key("ConcreteType") + .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); }, Expression::Call(expr) => { self.kv(indent).with_id(PREFIX_CALL_EXPR_ID, expr.this.0.index) @@ -638,6 +656,8 @@ impl ASTWriter { // Parent self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); + self.kv(indent2).with_s_key("ConcreteType") + .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); }, Expression::Variable(expr) => { self.kv(indent).with_id(PREFIX_VARIABLE_EXPR_ID, expr.this.0.index) @@ -647,6 +667,8 @@ impl ASTWriter { .with_opt_disp_val(expr.declaration.as_ref().map(|v| &v.index)); self.kv(indent2).with_s_key("Parent") .with_custom_val(|v| write_expression_parent(v, &expr.parent)); + self.kv(indent2).with_s_key("ConcreteType") + .with_custom_val(|v| write_concrete_type(v, heap, &expr.concrete_type)); } } } @@ -659,8 +681,8 @@ impl ASTWriter { .with_s_key("Local"); self.kv(indent2).with_s_key("Name").with_ascii_val(&local.identifier.value); - self.kv(indent2).with_s_key("Type") - .with_custom_val(|w| write_type(w, heap, &heap[local.parser_type])); + self.kv(indent2).with_s_key("ParserType") + .with_custom_val(|w| write_parser_type(w, heap, &heap[local.parser_type])); } //-------------------------------------------------------------------------- @@ -680,39 +702,39 @@ impl ASTWriter { fn write_option(target: &mut String, value: Option) { target.clear(); match &value { - Some(v) => write!(target, "Some({})", v), - None => target.write_str("None") + Some(v) => target.push_str(&format!("Some({})", v)), + None => target.push_str("None") }; } -fn write_type(target: &mut String, heap: &Heap, t: &ParserType) { +fn write_parser_type(target: &mut String, heap: &Heap, t: &ParserType) { use ParserTypeVariant as PTV; let mut embedded = Vec::new(); match &t.variant { - PTV::Input(id) => { target.write_str("in"); embedded.push(*id); } - PTV::Output(id) => { target.write_str("out"); embedded.push(*id) } - PTV::Array(id) => { target.write_str("array"); embedded.push(*id) } - PTV::Message => { target.write_str("msg"); } - PTV::Bool => { target.write_str("bool"); } - PTV::Byte => { target.write_str("byte"); } - PTV::Short => { target.write_str("short"); } - PTV::Int => { target.write_str("int"); } - PTV::Long => { target.write_str("long"); } - PTV::String => { target.write_str("str"); } - PTV::IntegerLiteral => { target.write_str("int_lit"); } - PTV::Inferred => { target.write_str("auto"); } + PTV::Input(id) => { target.push_str("in"); embedded.push(*id); } + PTV::Output(id) => { target.push_str("out"); embedded.push(*id) } + PTV::Array(id) => { target.push_str("array"); embedded.push(*id) } + PTV::Message => { target.push_str("msg"); } + PTV::Bool => { target.push_str("bool"); } + PTV::Byte => { target.push_str("byte"); } + PTV::Short => { target.push_str("short"); } + PTV::Int => { target.push_str("int"); } + PTV::Long => { target.push_str("long"); } + PTV::String => { target.push_str("str"); } + PTV::IntegerLiteral => { target.push_str("int_lit"); } + PTV::Inferred => { target.push_str("auto"); } PTV::Symbolic(symbolic) => { - target.write_str(&String::from_utf8_lossy(&symbolic.identifier.value)); + target.push_str(&String::from_utf8_lossy(&symbolic.identifier.value)); match symbolic.variant { Some(SymbolicParserTypeVariant::PolyArg(def_id, idx)) => { - target.write_str(&format!("{{def: {}, idx: {}}}", def_id.index, idx)); + target.push_str(&format!("{{def: {}, idx: {}}}", def_id.index, idx)); }, Some(SymbolicParserTypeVariant::Definition(def_id)) => { - target.write_str(&format!("{{def: {}}}", def_id.index)); + target.push_str(&format!("{{def: {}}}", def_id.index)); }, None => { - target.write_str("{None}"); + target.push_str("{None}"); } } embedded.extend(&symbolic.poly_args); @@ -720,15 +742,71 @@ fn write_type(target: &mut String, heap: &Heap, t: &ParserType) { }; if !embedded.is_empty() { - target.write_str("<"); + target.push_str("<"); for (idx, embedded_id) in embedded.into_iter().enumerate() { - if idx != 0 { target.write_str(", "); } - write_type(target, heap, &heap[embedded_id]); + if idx != 0 { target.push_str(", "); } + write_parser_type(target, heap, &heap[embedded_id]); } - target.write_str(">"); + target.push_str(">"); } } +fn write_concrete_type(target: &mut String, heap: &Heap, t: &ConcreteType) { + use ConcreteTypePart as CTP; + + fn write_concrete_part(target: &mut String, heap: &Heap, t: &ConcreteType, mut idx: usize) -> usize { + if idx >= t.parts.len() { + target.push_str("Programmer error: invalid concrete type tree"); + return idx; + } + + match &t.parts[idx] { + CTP::Void => target.push_str("void"), + CTP::Message => target.push_str("msg"), + CTP::Bool => target.push_str("bool"), + CTP::Byte => target.push_str("byte"), + CTP::Short => target.push_str("short"), + CTP::Int => target.push_str("int"), + CTP::Long => target.push_str("long"), + CTP::String => target.push_str("string"), + CTP::Array => { + idx = write_concrete_part(target, heap, t, idx + 1); + target.push_str("[]"); + }, + CTP::Slice => { + idx = write_concrete_part(target, heap, t, idx + 1); + target.push_str("[..]"); + } + CTP::Input => { + target.push_str("in<"); + idx = write_concrete_part(target, heap, t, idx + 1); + target.push('>'); + }, + CTP::Output => { + target.push_str("out<"); + idx = write_concrete_part(target, heap, t, idx + 1); + target.push('>') + }, + CTP::Instance(definition_id, num_embedded) => { + let identifier = heap[*definition_id].identifier(); + target.push_str(&String::from_utf8_lossy(&identifier.value)); + target.push('<'); + for idx_embedded in 0..*num_embedded { + if idx_embedded != 0 { + target.push_str(", "); + } + idx = write_concrete_part(target, heap, t, idx + 1); + } + target.push('>'); + } + } + + idx + 1 + } + + write_concrete_part(target, heap, t, 0); +} + fn write_expression_parent(target: &mut String, parent: &ExpressionParent) { use ExpressionParent as EP; diff --git a/src/protocol/eval.rs b/src/protocol/eval.rs index a16a05fa3183f31abe804a6d8ef8ba06dec471d7..85af7bf55525596e1cc67fe667120703a96cf940 100644 --- a/src/protocol/eval.rs +++ b/src/protocol/eval.rs @@ -1511,10 +1511,13 @@ impl Store { assert_eq!(2, expr.arguments.len()); let port_value = self.eval(h, ctx, expr.arguments[0])?; let msg_value = self.eval(h, ctx, expr.arguments[1])?; + println!("DEBUG: Handiling put({:?}, {:?})", port_value, msg_value); if ctx.did_put(port_value.clone()) { + println!("DEBUG: Already put..."); // Return bogus, replacing this at some point anyway Ok(Value::Message(MessageValue(None))) } else { + println!("DEBUG: Did not yet put..."); Err(EvalContinuation::Put(port_value, msg_value)) } } diff --git a/src/protocol/inputsource.rs b/src/protocol/inputsource.rs index 0be0127d9d4ddbf1d8b5fcd70adce61fabf40ed9..46e5bc7d0f64af8d69356b6a7baf07bdcadbaec3 100644 --- a/src/protocol/inputsource.rs +++ b/src/protocol/inputsource.rs @@ -212,7 +212,6 @@ pub struct ParseErrorStatement { impl ParseErrorStatement { fn from_source(error_type: ParseErrorType, source: &InputSource, position: InputPosition, msg: &str) -> Self { // Seek line start and end - debug_assert!(position.column < position.offset); let line_start = position.offset - (position.column - 1); let mut line_end = position.offset; while line_end < source.input.len() && source.input[line_end] != b'\n' { @@ -241,7 +240,7 @@ impl fmt::Display for ParseErrorStatement { ParseErrorType::Info => write!(f, " INFO: ")?, ParseErrorType::Error => write!(f, "ERROR: ")?, } - writeln!(f, "{}", &self.message); + writeln!(f, "{}", &self.message)?; // Write originating file/line/column if self.filename.is_empty() { diff --git a/src/protocol/lexer.rs b/src/protocol/lexer.rs index 2cf9f3b581cac741ce858461f489a9f2dce167d8..38ee597ee880bb951f22245d4b65d4523ce0e656 100644 --- a/src/protocol/lexer.rs +++ b/src/protocol/lexer.rs @@ -280,7 +280,6 @@ impl Lexer<'_> { || self.has_keyword(b"assert") || self.has_keyword(b"goto") || self.has_keyword(b"new") - || self.has_keyword(b"put") // TODO: @fix, should be a function, even though it has sideeffects } fn has_type_keyword(&self) -> bool { self.has_keyword(b"in") @@ -538,13 +537,13 @@ impl Lexer<'_> { } // Consume any polymorphic arguments that follow the type identifier + let mut backup_pos = self.source.pos(); if self.consume_whitespace(false).is_err() { return false; } if !self.maybe_consume_poly_args_spilled_without_pos_recovery() { return false; } - // Consume any array specifiers. Make sure we always leave the input // position at the end of the last array specifier if we do find a // valid type - let mut backup_pos = self.source.pos(); + if self.consume_whitespace(false).is_err() { return false; } while let Some(b'[') = self.source.next() { self.source.consume(); @@ -764,6 +763,7 @@ impl Lexer<'_> { operation, right, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast()) } else { @@ -841,6 +841,7 @@ impl Lexer<'_> { true_expression, false_expression, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast()) } else { @@ -866,6 +867,7 @@ impl Lexer<'_> { operation, right, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } @@ -890,6 +892,7 @@ impl Lexer<'_> { operation, right, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } @@ -914,6 +917,7 @@ impl Lexer<'_> { operation, right, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } @@ -938,6 +942,7 @@ impl Lexer<'_> { operation, right, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } @@ -962,6 +967,7 @@ impl Lexer<'_> { operation, right, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } @@ -986,6 +992,7 @@ impl Lexer<'_> { operation, right, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } @@ -1016,6 +1023,7 @@ impl Lexer<'_> { operation, right, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } @@ -1056,6 +1064,7 @@ impl Lexer<'_> { operation, right, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } @@ -1088,6 +1097,7 @@ impl Lexer<'_> { operation, right, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } @@ -1120,6 +1130,7 @@ impl Lexer<'_> { operation, right, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } @@ -1156,6 +1167,7 @@ impl Lexer<'_> { operation, right, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } @@ -1207,6 +1219,7 @@ impl Lexer<'_> { operation, expression, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast()); } @@ -1233,6 +1246,7 @@ impl Lexer<'_> { operation, expression, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } else if self.has_string(b"--") { @@ -1247,6 +1261,7 @@ impl Lexer<'_> { operation, expression, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } else if self.has_string(b"[") { @@ -1273,6 +1288,7 @@ impl Lexer<'_> { from_index: index, to_index, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } else { @@ -1283,6 +1299,7 @@ impl Lexer<'_> { subject, index, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } @@ -1307,6 +1324,7 @@ impl Lexer<'_> { subject, field, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), }) .upcast(); } @@ -1354,6 +1372,7 @@ impl Lexer<'_> { position, elements, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), })) } fn has_constant(&self) -> bool { @@ -1400,6 +1419,7 @@ impl Lexer<'_> { position, value, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), })) } fn has_call_expression(&mut self) -> bool { @@ -1478,6 +1498,7 @@ impl Lexer<'_> { arguments, poly_args, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), })) } fn consume_variable_expression( @@ -1492,6 +1513,7 @@ impl Lexer<'_> { identifier, declaration: None, parent: ExpressionParent::None, + concrete_type: ConcreteType::default(), })) } diff --git a/src/protocol/mod.rs b/src/protocol/mod.rs index 3352aa183902a1d0ca745f59a08e780944a27e04..bd3189dcd0b4ad9299940d53507fbca0327686fd 100644 --- a/src/protocol/mod.rs +++ b/src/protocol/mod.rs @@ -299,7 +299,7 @@ impl EvalContext<'_> { EvalContext::Nonsync(_) => unreachable!("did_put in nonsync context"), EvalContext::Sync(context) => match port { Value::Output(OutputValue(port)) => { - context.is_firing(port).unwrap_or(false) + context.did_put_or_get(port) }, Value::Input(_) => unreachable!("did_put on input port"), _ => unreachable!("did_put on non-port value") diff --git a/src/protocol/parser/mod.rs b/src/protocol/parser/mod.rs index 55863e937755362f19a0a6deb35d74b09b6751d1..687930e678e56158ba29073e3c98cec22687b35e 100644 --- a/src/protocol/parser/mod.rs +++ b/src/protocol/parser/mod.rs @@ -11,6 +11,7 @@ use depth_visitor::*; use symbol_table::SymbolTable; use visitor::Visitor2; use visitor_linker::ValidityAndLinkerVisitor; +use type_resolver::{TypeResolvingVisitor, ResolveQueue}; use type_table::{TypeTable, TypeCtx}; use crate::protocol::ast::*; @@ -211,6 +212,14 @@ impl Parser { }; let mut visit = ValidityAndLinkerVisitor::new(); visit.visit_module(&mut ctx)?; + let mut type_visit = TypeResolvingVisitor::new(); + let mut queue = ResolveQueue::new(); + TypeResolvingVisitor::queue_module_definitions(&ctx, &mut queue); + while !queue.is_empty() { + let top = queue.pop().unwrap(); + println!("Resolving root={}, def={}, mono={:?}", top.root_id.index, top.definition_id.index, top.monomorph_types); + type_visit.handle_module_definition(&mut ctx, &mut queue, top)?; + } if let Err((position, message)) = Self::parse_inner(&mut self.heap, root_id) { return Err(ParseError2::new_error(&self.modules[0].source, position, &message)) diff --git a/src/protocol/parser/type_resolver.rs b/src/protocol/parser/type_resolver.rs index 8b7ea100689138c4099b292f21e0b39bca2e1ed8..286ab594464fe9c987748e0070279969b5839158 100644 --- a/src/protocol/parser/type_resolver.rs +++ b/src/protocol/parser/type_resolver.rs @@ -148,28 +148,30 @@ impl InferenceTypePart { } } -impl From for InferenceTypePart { - fn from(v: ConcreteTypeVariant) -> InferenceTypePart { - use ConcreteTypeVariant as CTV; +impl From for InferenceTypePart { + fn from(v: ConcreteTypePart) -> InferenceTypePart { + use ConcreteTypePart as CTP; use InferenceTypePart as ITP; match v { - CTV::Message => ITP::Message, - CTV::Bool => ITP::Bool, - CTV::Byte => ITP::Byte, - CTV::Short => ITP::Short, - CTV::Int => ITP::Int, - CTV::Long => ITP::Long, - CTV::String => ITP::String, - CTV::Array => ITP::Array, - CTV::Slice => ITP::Slice, - CTV::Input => ITP::Input, - CTV::Output => ITP::Output, - CTV::Instance(id, num) => ITP::Instance(id, num), + CTP::Void => ITP::Void, + CTP::Message => ITP::Message, + CTP::Bool => ITP::Bool, + CTP::Byte => ITP::Byte, + CTP::Short => ITP::Short, + CTP::Int => ITP::Int, + CTP::Long => ITP::Long, + CTP::String => ITP::String, + CTP::Array => ITP::Array, + CTP::Slice => ITP::Slice, + CTP::Input => ITP::Input, + CTP::Output => ITP::Output, + CTP::Instance(id, num) => ITP::Instance(id, num), } } } +#[derive(Debug)] struct InferenceType { has_marker: bool, is_done: bool, @@ -409,8 +411,9 @@ impl InferenceType { let part_b = &type_b.parts[idx_b]; if part_a == part_b { - depth += part_a.depth_change(); - debug_assert_eq!(depth, part_b.depth_change()); + let depth_change = part_a.depth_change(); + depth += depth_change; + debug_assert_eq!(depth_change, part_b.depth_change()); idx_a += 1; idx_b += 1; continue; @@ -462,8 +465,9 @@ impl InferenceType { let template_part = &template[template_idx]; if to_infer_part == template_part { - depth += to_infer_part.depth_change(); - debug_assert_eq!(depth, template_part.depth_change()); + let depth_change = to_infer_part.depth_change(); + depth += depth_change; + debug_assert_eq!(depth_change, template_part.depth_change()); to_infer_idx += 1; template_idx += 1; continue; @@ -515,8 +519,9 @@ impl InferenceType { let part_b = &type_parts_b[idx_b]; if part_a == part_b { - depth += part_a.depth_change(); - debug_assert_eq!(depth, part_b.depth_change()); + let depth_change = part_a.depth_change(); + depth += depth_change; + debug_assert_eq!(depth_change, part_b.depth_change()); idx_a += 1; idx_b += 1; continue; @@ -544,15 +549,52 @@ impl InferenceType { true } - /// Returns a human-readable version of the type. Only use for debugging - /// or returning errors (since it allocates a string). + /// Performs the conversion of the inference type into a concrete type. + /// By calling this function you must make sure that no unspecified types + /// (e.g. Unknown or IntegerLike) exist in the type. + fn write_concrete_type(&self, concrete_type: &mut ConcreteType) { + use InferenceTypePart as ITP; + use ConcreteTypePart as CTP; + + debug_assert!(concrete_type.parts.is_empty()); + concrete_type.parts.reserve(self.parts.len()); + + for part in &self.parts { + let converted_part = match part { + ITP::Marker(_) => { continue; }, + ITP::Unknown | ITP::NumberLike | ITP::IntegerLike | ITP::ArrayLike | ITP::PortLike => { + debug_assert!(false, "Attempted to convert inference type part {:?} into concrete type", part); + unreachable!(); + }, + ITP::Void => CTP::Void, + ITP::Message => CTP::Message, + ITP::Bool => CTP::Bool, + ITP::Byte => CTP::Byte, + ITP::Short => CTP::Short, + ITP::Int => CTP::Int, + ITP::Long => CTP::Long, + ITP::String => CTP::String, + ITP::Array => CTP::Array, + ITP::Slice => CTP::Slice, + ITP::Input => CTP::Input, + ITP::Output => CTP::Output, + ITP::Instance(id, num) => CTP::Instance(*id, *num), + }; + concrete_type.parts.push(converted_part); + } + } + + /// Writes a human-readable version of the type to a string. Mostly a + /// function for interior use. fn write_display_name( buffer: &mut String, heap: &Heap, parts: &[InferenceTypePart], mut idx: usize ) -> usize { use InferenceTypePart as ITP; match &parts[idx] { - ITP::Marker(_) => {}, + ITP::Marker(_) => { + idx = Self::write_display_name(buffer, heap, parts, idx + 1) + }, ITP::Unknown => buffer.push_str("?"), ITP::NumberLike => buffer.push_str("num?"), ITP::IntegerLike => buffer.push_str("int?"), @@ -609,12 +651,15 @@ impl InferenceType { idx } + /// Returns the display name of a (part of) the type tree. Will allocate a + /// string. fn partial_display_name(heap: &Heap, parts: &[InferenceTypePart]) -> String { let mut buffer = String::with_capacity(parts.len() * 6); Self::write_display_name(&mut buffer, heap, parts, 0); buffer } + /// Returns the display name of the full type tree. Will allocate a string. fn display_name(&self, heap: &Heap) -> String { Self::partial_display_name(heap, &self.parts) } @@ -656,7 +701,7 @@ impl<'a> Iterator for InferenceTypeMarkerIter<'a> { } } -#[derive(PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq)] enum DualInferenceResult { Neither, // neither argument is clarified First, // first argument is clarified using the second one @@ -686,7 +731,7 @@ impl DualInferenceResult { } } -#[derive(PartialEq, Eq)] +#[derive(Debug, PartialEq, Eq)] enum SingleInferenceResult { Unmodified, Modified, @@ -699,32 +744,30 @@ enum DefinitionType{ Function(FunctionId), } +pub(crate) struct ResolveQueueElement { + pub(crate) root_id: RootId, + pub(crate) definition_id: DefinitionId, + pub(crate) monomorph_types: Vec, +} + +pub(crate) type ResolveQueue = Vec; + /// This particular visitor will recurse depth-first into the AST and ensures -/// that all expressions have the appropriate types. At the moment this implies: -/// -/// - Type checking arguments to unary and binary operators. -/// - Type checking assignment, indexing, slicing and select expressions. -/// - Checking arguments to functions and component instantiations. -/// -/// This will be achieved by slowly descending into the AST. At any given -/// expression we may depend on +/// that all expressions have the appropriate types. pub(crate) struct TypeResolvingVisitor { + // Current definition we're typechecking. definition_type: DefinitionType, + poly_vars: Vec, // Buffers for iteration over substatements and subexpressions stmt_buffer: Vec, expr_buffer: Vec, - // If instantiating a monomorph of a polymorphic proctype, then we store the - // values of the polymorphic values here. There should be as many, and in - // the same order as, in the definition's polyargs. - polyvars: Vec, // Mapping from parser type to inferred type. We attempt to continue to // specify these types until we're stuck or we've fully determined the type. var_types: HashMap, // types of variables expr_types: HashMap, // types of expressions extra_data: HashMap, // data for function call inference - // Keeping track of which expressions need to be reinferred because the // expressions they're linked to made progression on an associated type expr_queued: HashSet, @@ -748,9 +791,9 @@ impl TypeResolvingVisitor { pub(crate) fn new() -> Self { TypeResolvingVisitor{ definition_type: DefinitionType::None, + poly_vars: Vec::new(), stmt_buffer: Vec::with_capacity(STMT_BUFFER_INIT_CAPACITY), expr_buffer: Vec::with_capacity(EXPR_BUFFER_INIT_CAPACITY), - polyvars: Vec::new(), var_types: HashMap::new(), expr_types: HashMap::new(), extra_data: HashMap::new(), @@ -758,13 +801,58 @@ impl TypeResolvingVisitor { } } + // TODO: @cleanup Unsure about this, maybe a pattern will arise after + // a while. + pub(crate) fn queue_module_definitions(ctx: &Ctx, queue: &mut ResolveQueue) { + let root_id = ctx.module.root_id; + let root = &ctx.heap.protocol_descriptions[root_id]; + for definition_id in &root.definitions { + let definition = &ctx.heap[*definition_id]; + match definition { + Definition::Function(definition) => { + if definition.poly_vars.is_empty() { + queue.push(ResolveQueueElement{ + root_id, + definition_id: *definition_id, + monomorph_types: Vec::new(), + }) + } + }, + Definition::Component(definition) => { + if definition.poly_vars.is_empty() { + queue.push(ResolveQueueElement{ + root_id, + definition_id: *definition_id, + monomorph_types: Vec::new(), + }) + } + }, + Definition::Enum(_) | Definition::Struct(_) => {}, + } + } + } + + pub(crate) fn handle_module_definition( + &mut self, ctx: &mut Ctx, queue: &mut ResolveQueue, element: ResolveQueueElement + ) -> VisitorResult { + // Visit the definition + debug_assert_eq!(ctx.module.root_id, element.root_id); + self.visit_definition(ctx, element.definition_id)?; + + // Keep resolving types + self.resolve_types(ctx, queue)?; + Ok(()) + } + fn reset(&mut self) { self.definition_type = DefinitionType::None; + self.poly_vars.clear(); self.stmt_buffer.clear(); self.expr_buffer.clear(); - self.polyvars.clear(); self.var_types.clear(); self.expr_types.clear(); + self.extra_data.clear(); + self.expr_queued.clear(); } } @@ -776,7 +864,7 @@ impl Visitor2 for TypeResolvingVisitor { self.definition_type = DefinitionType::Component(id); let comp_def = &ctx.heap[id]; - debug_assert_eq!(comp_def.poly_vars.len(), self.polyvars.len(), "component polyvars do not match imposed polyvars"); + debug_assert_eq!(comp_def.poly_vars.len(), self.poly_vars.len(), "component polyvars do not match imposed polyvars"); for param_id in comp_def.parameters.clone() { let param = &ctx.heap[param_id]; @@ -794,7 +882,7 @@ impl Visitor2 for TypeResolvingVisitor { self.definition_type = DefinitionType::Function(id); let func_def = &ctx.heap[id]; - debug_assert_eq!(func_def.poly_vars.len(), self.polyvars.len(), "function polyvars do not match imposed polyvars"); + debug_assert_eq!(func_def.poly_vars.len(), self.poly_vars.len(), "function polyvars do not match imposed polyvars"); for param_id in func_def.parameters.clone() { let param = &ctx.heap[param_id]; @@ -814,7 +902,7 @@ impl Visitor2 for TypeResolvingVisitor { let block = &ctx.heap[id]; for stmt_id in block.statements.clone() { - self.visit_stmt(ctx, stmt_id); + self.visit_stmt(ctx, stmt_id)?; } Ok(()) @@ -1094,14 +1182,149 @@ macro_rules! debug_assert_ptrs_distinct { }; } -enum TypeConstraintResult { - Progress, // Success: Made progress in applying constraints - NoProgess, // Success: But did not make any progress in applying constraints - ErrExprType, // Error: Expression type did not match the argument(s) of the expression type - ErrArgType, // Error: Expression argument types did not match -} - impl TypeResolvingVisitor { + fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError2> { + // Keep inferring until we can no longer make any progress + println!("DEBUG: Resolve queue is {:?}", &self.expr_queued); + while let Some(next_expr_id) = self.expr_queued.iter().next() { + let next_expr_id = *next_expr_id; + self.expr_queued.remove(&next_expr_id); + self.progress_expr(ctx, next_expr_id)?; + } + + // Should have inferred everything + for (expr_id, expr_type) in self.expr_types.iter() { + if !expr_type.is_done { + let mut buffer = std::fs::File::create("type_debug.txt").unwrap(); + use crate::protocol::ast_printer::ASTWriter; + let mut w = ASTWriter::new(); + w.write_ast(&mut buffer, &ctx.heap); + // TODO: Auto-inference of integerlike types + let expr = &ctx.heap[*expr_id]; + return Err(ParseError2::new_error( + &ctx.module.source, expr.position(), + &format!( + "Could not fully infer the type of this expression (got '{}')", + expr_type.display_name(&ctx.heap) + ) + )) + } + + let concrete_type = ctx.heap[*expr_id].get_type_mut(); + expr_type.write_concrete_type(concrete_type); + } + + // Check all things we need to monomorphize + // TODO: Struct/enum/union monomorphization + for (call_expr_id, extra_data) in self.extra_data.iter() { + if extra_data.poly_vars.is_empty() { continue; } + + // We have a polymorph + let mut monomorph_types = Vec::with_capacity(extra_data.poly_vars.len()); + for (poly_idx, poly_type) in extra_data.poly_vars.iter().enumerate() { + if !poly_type.is_done { + // TODO: Single clean function for function signatures and polyvars. + // TODO: Better error message + let expr = &ctx.heap[*call_expr_id]; + return Err(ParseError2::new_error( + &ctx.module.source, expr.position(), + &format!( + "Could not fully infer the type of polymorphic variable {} of this expression (got '{}')", + poly_idx, poly_type.display_name(&ctx.heap) + ) + )) + } + + let mut concrete_type = ConcreteType::default(); + poly_type.write_concrete_type(&mut concrete_type); + monomorph_types.insert(poly_idx, concrete_type); + } + + // Resolve to call expression's definition + let call_expr = if let Expression::Call(call_expr) = &ctx.heap[*call_expr_id] { + call_expr + } else { + todo!("implement different kinds of polymorph expressions"); + }; + + if let Method::Symbolic(symbolic) = &call_expr.method { + let definition_id = symbolic.definition.unwrap(); + let root_id = ctx.types + .get_base_definition(&definition_id) + .unwrap() + .ast_root; + + queue.push(ResolveQueueElement{ + root_id, + definition_id, + monomorph_types, + }) + } + } + + // Finally, if the currently resolved definition is a monomoprh, then we + // add it to the type table + if !self.poly_vars.is_empty() { + let definition_id = match &self.definition_type { + DefinitionType::None => unreachable!(), + DefinitionType::Function(id) => id.upcast(), + DefinitionType::Component(id) => id.upcast(), + }; + ctx.types.instantiate_monomorph(&definition_id, &self.poly_vars) + } + + Ok(()) + } + + fn progress_expr(&mut self, ctx: &mut Ctx, id: ExpressionId) -> Result<(), ParseError2> { + match &ctx.heap[id] { + Expression::Assignment(expr) => { + let id = expr.this; + self.progress_assignment_expr(ctx, id) + }, + Expression::Conditional(expr) => { + let id = expr.this; + self.progress_conditional_expr(ctx, id) + }, + Expression::Binary(expr) => { + let id = expr.this; + self.progress_binary_expr(ctx, id) + }, + Expression::Unary(expr) => { + let id = expr.this; + self.progress_unary_expr(ctx, id) + }, + Expression::Indexing(expr) => { + let id = expr.this; + self.progress_indexing_expr(ctx, id) + }, + Expression::Slicing(expr) => { + let id = expr.this; + self.progress_slicing_expr(ctx, id) + }, + Expression::Select(expr) => { + let id = expr.this; + self.progress_select_expr(ctx, id) + }, + Expression::Array(expr) => { + let id = expr.this; + self.progress_array_expr(ctx, id) + }, + Expression::Constant(expr) => { + let id = expr.this; + self.progress_constant_expr(ctx, id) + }, + Expression::Call(expr) => { + let id = expr.this; + self.progress_call_expr(ctx, id) + }, + Expression::Variable(expr) => { + let id = expr.this; + self.progress_variable_expr(ctx, id) + } + } + } + fn progress_assignment_expr(&mut self, ctx: &mut Ctx, id: AssignmentExpressionId) -> Result<(), ParseError2> { use AssignmentOperator as AO; @@ -1347,6 +1570,7 @@ impl TypeResolvingVisitor { // Note that if the array type progressed the type of the arguments, // then we should enqueue this progression function again + // TODO: @fix Make apply_equal_n accept a start idx as well if arg_progress { self.queue_expr(upcast_id); } } @@ -1375,6 +1599,7 @@ impl TypeResolvingVisitor { // polymorphic struct/enum/union literals. These likely follow the same // pattern as here. fn progress_call_expr(&mut self, ctx: &mut Ctx, id: CallExpressionId) -> Result<(), ParseError2> { + println!("DEBUG: Processing call {}", id.0.index); let upcast_id = id.upcast(); let expr = &ctx.heap[id]; let extra = self.extra_data.get_mut(&upcast_id).unwrap(); @@ -1392,6 +1617,8 @@ impl TypeResolvingVisitor { ctx, upcast_id, signature_type, 0, argument_type, 0 )?; + println!("DEBUG Arg {}: {} <--> {}", arg_idx, signature_type.display_name(&ctx.heap), unsafe{&*argument_type}.display_name(&ctx.heap)); + if progress_sig { // Progressed signature, so also apply inference to the // polymorph types using the markers @@ -1405,6 +1632,7 @@ impl TypeResolvingVisitor { Ok(false) => {}, Err(()) => { poly_infer_error = true; } } + println!("DEBUG Poly {}: {} <--> {}", poly_idx, polymorph_type.display_name(&ctx.heap), InferenceType::partial_display_name(&ctx.heap, poly_section)); } } if progress_arg { @@ -1420,6 +1648,8 @@ impl TypeResolvingVisitor { ctx, upcast_id, signature_type, 0, expr_type, 0 )?; + println!("DEBUG Ret {} <--> {}", signature_type.display_name(&ctx.heap), unsafe{&*expr_type}.display_name(&ctx.heap)); + if progress_sig { // As above: apply inference to polyargs as well debug_assert!(signature_type.has_marker, "progress on signature return type without markers"); @@ -1432,6 +1662,7 @@ impl TypeResolvingVisitor { Ok(false) => {}, Err(()) => { poly_infer_error = true; } } + println!("DEBUG Poly {}: {} <--> {}", poly_idx, polymorph_type.display_name(&ctx.heap), InferenceType::partial_display_name(&ctx.heap, poly_section)); } } if progress_expr { @@ -1967,12 +2198,13 @@ impl TypeResolvingVisitor { debug_assert!(symbolic.poly_args.is_empty()); // TODO: @hkt if parser_type_in_body { - debug_assert!(arg_idx < self.polyvars.len()); - for concrete_part in &self.polyvars[arg_idx].v { + debug_assert!(arg_idx < self.poly_vars.len()); + for concrete_part in &self.poly_vars[arg_idx].parts { infer_type.push(ITP::from(*concrete_part)); } } else { has_markers = true; + has_inferred = true; infer_type.push(ITP::Marker(arg_idx)); infer_type.push(ITP::Unknown); } @@ -2230,4 +2462,74 @@ impl TypeResolvingVisitor { unreachable!("construct_poly_arg_error without actual error found?") } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::protocol::arena::Id; + use InferenceTypePart as ITP; + use InferenceType as IT; + + #[test] + fn test_single_part_inference() { + // lhs argument inferred from rhs + let pairs = [ + (ITP::NumberLike, ITP::Byte), + (ITP::IntegerLike, ITP::Int), + (ITP::Unknown, ITP::Message), + (ITP::Unknown, ITP::String) + ]; + for (lhs, rhs) in pairs.iter() { + // Using infer-both + let mut lhs_type = IT::new(false, false, vec![lhs.clone()]); + let mut rhs_type = IT::new(false, true, vec![rhs.clone()]); + let result = unsafe{ IT::infer_subtrees_for_both_types( + &mut lhs_type, 0, &mut rhs_type, 0 + ) }; + assert_eq!(DualInferenceResult::First, result); + assert_eq!(lhs_type.parts, rhs_type.parts); + + // Using infer-single + let mut lhs_type = IT::new(false, false, vec![lhs.clone()]); + let mut rhs_type = IT::new(false, true, vec![rhs.clone()]); + let result = unsafe{ IT::infer_subtree_for_single_type( + &mut lhs_type, 0, &rhs_type.parts, 0 + ) }; + assert_eq!(SingleInferenceResult::Modified, result); + assert_eq!(lhs_type.parts, rhs_type.parts); + } + } + + #[test] + fn test_multi_part_inference() { + let pairs = [ + (vec![ITP::ArrayLike, ITP::NumberLike], vec![ITP::Slice, ITP::Byte]), + (vec![ITP::Unknown], vec![ITP::Input, ITP::Array, ITP::String]), + (vec![ITP::PortLike, ITP::Int], vec![ITP::Input, ITP::Int]), + (vec![ITP::Unknown], vec![ITP::Output, ITP::Int]), + ( + vec![ITP::Instance(Id::new(0), 2), ITP::Input, ITP::Unknown, ITP::Output, ITP::Unknown], + vec![ITP::Instance(Id::new(0), 2), ITP::Input, ITP::Array, ITP::Int, ITP::Output, ITP::Int] + ) + ]; + + for (lhs, rhs) in pairs.iter() { + let mut lhs_type = IT::new(false, false, lhs.clone()); + let mut rhs_type = IT::new(false, false, rhs.clone()); + let result = unsafe{ IT::infer_subtrees_for_both_types( + &mut lhs_type, 0, &mut rhs_type, 0 + ) }; + assert_eq!(DualInferenceResult::First, result); + assert_eq!(lhs_type.parts, rhs_type.parts); + + let mut lhs_type = IT::new(false, false, lhs.clone()); + let mut rhs_type = IT::new(false, false, rhs.clone()); + let result = unsafe{ IT::infer_subtree_for_single_type( + &mut lhs_type, 0, &rhs_type.parts, 0 + ) }; + assert_eq!(SingleInferenceResult::Modified, result); + assert_eq!(lhs_type.parts, rhs_type.parts) + } + } } \ No newline at end of file diff --git a/src/protocol/parser/type_table.rs b/src/protocol/parser/type_table.rs index 2ce2d93f242b7245f5a6f75e898407ab370e4600..da4a18eb37c03393491c17ddd2ba09e0893a20e2 100644 --- a/src/protocol/parser/type_table.rs +++ b/src/protocol/parser/type_table.rs @@ -106,12 +106,14 @@ impl std::fmt::Display for TypeClass { /// field, enum variant, etc.). Otherwise the polymorphic argument is just a /// marker and does not influence the bytesize of the type. pub struct DefinedType { + pub(crate) ast_root: RootId, pub(crate) ast_definition: DefinitionId, pub(crate) definition: DefinedTypeVariant, pub(crate) poly_args: Vec, pub(crate) is_polymorph: bool, pub(crate) is_pointerlike: bool, - pub(crate) monomorphs: Vec, // TODO: ? + // TODO: @optimize + pub(crate) monomorphs: Vec>, } pub enum DefinedTypeVariant { @@ -236,31 +238,6 @@ impl TypeIterator { } } -#[derive(Copy, Clone)] -pub(crate) enum ConcreteTypeVariant { - // No subtypes - Message, - Bool, - Byte, - Short, - Int, - Long, - String, - // One subtype - Array, - Slice, - Input, - Output, - // Multiple subtypes (definition of thing and number of poly args) - Instance(DefinitionId, usize) -} - -pub(crate) struct ConcreteType { - // serialized version (interpret as serialized depth-first tree, with - // variant indicating the number of children (subtypes)) - pub(crate) v: Vec -} - /// Result from attempting to resolve a `ParserType` using the symbol table and /// the type table. enum ResolveResult { @@ -345,6 +322,22 @@ impl TypeTable { self.lookup.get(&definition_id) } + /// Instantiates a monomorph for a given base definition. + pub(crate) fn instantiate_monomorph(&mut self, definition_id: &DefinitionId, monomorph: &Vec) { + debug_assert!( + self.lookup.contains_key(definition_id), + "attempting to instantiate monomorph of definition unknown to type table" + ); + let definition = self.lookup.get_mut(definition_id).unwrap(); + debug_assert_eq!( + monomorph.len(), definition.poly_args.len(), + "attempting to instantiate monomorph with {} types, but definition requires {}", + monomorph.len(), definition.poly_args.len() + ); + + definition.monomorphs.push(monomorph.clone()) + } + /// This function will resolve just the basic definition of the type, it /// will not handle any of the monomorphized instances of the type. fn resolve_base_definition<'a>(&'a mut self, ctx: &mut TypeCtx, definition_id: DefinitionId) -> Result<(), ParseError2> { @@ -474,6 +467,7 @@ impl TypeTable { // Insert base definition in type table self.lookup.insert(definition_id, DefinedType { + ast_root: root_id, ast_definition: definition_id, definition: DefinedTypeVariant::Union(UnionType{ variants, @@ -525,6 +519,7 @@ impl TypeTable { // polymorphic variables, they might still be present as tokens let definition_id = definition.this.upcast(); self.lookup.insert(definition_id, DefinedType { + ast_root: root_id, ast_definition: definition_id, definition: DefinedTypeVariant::Enum(EnumType{ variants, @@ -581,6 +576,7 @@ impl TypeTable { let is_polymorph = poly_args.iter().any(|arg| arg.is_in_use); self.lookup.insert(definition_id, DefinedType{ + ast_root: root_id, ast_definition: definition_id, definition: DefinedTypeVariant::Struct(StructType{ fields, @@ -651,6 +647,7 @@ impl TypeTable { // Construct entry in type table self.lookup.insert(definition_id, DefinedType{ + ast_root: root_id, ast_definition: definition_id, definition: DefinedTypeVariant::Function(FunctionType{ return_type, @@ -712,6 +709,7 @@ impl TypeTable { // Construct entry in type table self.lookup.insert(definition_id, DefinedType{ + ast_root: root_id, ast_definition: definition_id, definition: DefinedTypeVariant::Component(ComponentType{ variant: component_variant, diff --git a/src/protocol/parser/visitor_linker.rs b/src/protocol/parser/visitor_linker.rs index c013cab65a594ac86aa9d90cb7eecc120dd0691b..d230550a16d658bae1e3d4a02538bc21f88bcd92 100644 --- a/src/protocol/parser/visitor_linker.rs +++ b/src/protocol/parser/visitor_linker.rs @@ -217,7 +217,7 @@ impl Visitor2 for ValidityAndLinkerVisitor { } else { let variable_id = ctx.heap[id].variable; let parser_type_id = ctx.heap[variable_id].parser_type; - self.visit_parser_type(ctx, parser_type_id); + self.visit_parser_type(ctx, parser_type_id)?; debug_assert_eq!(self.expr_parent, ExpressionParent::None); self.expr_parent = ExpressionParent::Memory(id); @@ -1523,7 +1523,7 @@ impl ValidityAndLinkerVisitor { self.parser_type_buffer.extend(&call_expr.poly_args); while self.parser_type_buffer.len() > old_num_types { let parser_type_id = self.parser_type_buffer.pop().unwrap(); - self.visit_parser_type(ctx, parser_type_id); + self.visit_parser_type(ctx, parser_type_id)?; } self.parser_type_buffer.truncate(old_num_types); Ok(()) diff --git a/src/runtime/communication.rs b/src/runtime/communication.rs index c7b00ba871ead94626a13b65778b09e2d077e121..fbc0f14ad660eaa7757fb1eaff39e4407c631f36 100644 --- a/src/runtime/communication.rs +++ b/src/runtime/communication.rs @@ -1416,6 +1416,10 @@ impl SyncProtoContext<'_> { self.predicate.query(var).map(SpecVal::is_firing) } + pub(crate) fn did_put_or_get(&mut self, port: PortId) -> bool { + self.branch_inner.did_put_or_get.contains(&port) + } + // The component calls the runtime back, trying to inspect a port's message pub(crate) fn read_msg(&mut self, port: PortId) -> Option<&Payload> { let maybe_msg = self.branch_inner.inbox.get(&port); diff --git a/src/runtime/mod.rs b/src/runtime/mod.rs index d0c89f190b75a73f8a246bb8d4fc762a8c864d24..7f9122e289fe4b875480eb968394b8ca5aa0e7a4 100644 --- a/src/runtime/mod.rs +++ b/src/runtime/mod.rs @@ -847,7 +847,7 @@ impl RoundCtx { self.getter_push(getter, msg); } else { log!(cu.logger(), "Putter {:?} has no known peer!", putter); - panic!("Putter {:?} has no known peer!"); + panic!("Putter {:?} has no known peer!", putter); } } } diff --git a/src/runtime/tests.rs b/src/runtime/tests.rs index 0218068ac87f8f7a23e48e27ea96d45a311ea7ad..7d6ae12b435c1331daee790ba951677275e52ebb 100644 --- a/src/runtime/tests.rs +++ b/src/runtime/tests.rs @@ -1401,11 +1401,11 @@ fn eq_no_causality() { T something = a; return something; } - primitive quick_test(in a, in b) { + primitive quick_test(in a, in b) { // msg ma = null; - msg test1 = null; - msg test2 = null; - msg ma = some_function(test1, test2); + auto test1 = 0; + auto test2 = 0; + auto ma = some_function(test1, test2); while(true) synchronous { if (fires(a)) { ma = get(a);