From fc2d65a1b906b29dca80a87ecb8918fe518ff15e 2021-05-04 14:56:16 From: MH Date: 2021-05-04 14:56:16 Subject: [PATCH] WIP on fixing all newly introduced bugs --- diff --git a/src/collections/scoped_buffer.rs b/src/collections/scoped_buffer.rs index a699e66b62879a0e91d57593deb425a243379b2c..0c98be33aa21af26c96f69b8b0bb9ce22025ac68 100644 --- a/src/collections/scoped_buffer.rs +++ b/src/collections/scoped_buffer.rs @@ -1,5 +1,3 @@ -use std::iter::FromIterator; - /// scoped_buffer.rs /// /// Solves the common pattern where we are performing some kind of recursive @@ -10,13 +8,17 @@ use std::iter::FromIterator; /// It is unsafe because we're using pointers to circumvent borrowing rules in /// the name of code cleanliness. The correctness of use is checked in debug /// mode. + +use std::iter::FromIterator; + pub(crate) struct ScopedBuffer { pub inner: Vec, } /// A section of the buffer. Keeps track of where we started the section. When /// done with the section one must call `into_vec` or `forget` to remove the -/// section from the underlying buffer. +/// section from the underlying buffer. This will also be done upon dropping the +/// ScopedSection in case errors are being handled. pub(crate) struct ScopedSection { inner: *mut Vec, start_size: u32, @@ -75,16 +77,28 @@ impl ScopedSection { } #[inline] - pub(crate) fn forget(self) { + pub(crate) fn forget(mut self) { let vec = unsafe{&mut *self.inner}; - debug_assert_eq!(vec.len(), self.cur_size as usize, "trying to forget section, but size is larger than expected"); + if cfg!(debug_assertions) { + debug_assert_eq!( + vec.len(), self.cur_size as usize, + "trying to forget section, but size is larger than expected" + ); + self.cur_size = self.start_size; + } vec.truncate(self.start_size as usize); } #[inline] - pub(crate) fn into_vec(self) -> Vec { + pub(crate) fn into_vec(mut self) -> Vec { let vec = unsafe{&mut *self.inner}; - debug_assert_eq!(vec.len(), self.cur_size as usize, "trying to turn section into vec, but size is larger than expected"); + if cfg!(debug_assertions) { + debug_assert_eq!( + vec.len(), self.cur_size as usize, + "trying to turn section into vec, but size is larger than expected" + ); + self.cur_size = self.start_size; + } let section = Vec::from_iter(vec.drain(self.start_size as usize..)); section } @@ -102,8 +116,8 @@ impl std::ops::Index for ScopedSection { #[cfg(debug_assertions)] impl Drop for ScopedSection { fn drop(&mut self) { - // Make sure that the data was actually taken out of the scoped section - let vec = unsafe{&*self.inner}; - debug_assert_eq!(vec.len(), self.start_size as usize); + let mut vec = unsafe{&mut *self.inner}; + debug_assert_eq!(vec.len(), self.cur_size as usize); + vec.truncate(self.start_size as usize); } } \ No newline at end of file diff --git a/src/protocol/input_source.rs b/src/protocol/input_source.rs index c6abc1f440c844c938344f1d180c9695e20b594b..dc417536f7ceec7def8586545923f51961a16642 100644 --- a/src/protocol/input_source.rs +++ b/src/protocol/input_source.rs @@ -264,9 +264,9 @@ impl ParseErrorStatement { Self{ statement_kind, context_kind, - start_line: first_line_start, + start_line: span.begin.line, start_column, - end_line: last_line_start, + end_line: span.end.line, end_column, filename: source.filename.clone(), context, @@ -319,8 +319,10 @@ impl fmt::Display for ParseErrorStatement { fn extend_annotation(first_col: u32, last_col: u32, source: &str, target: &mut String, extend_char: char) { debug_assert!(first_col > 0 && last_col > first_col); - for (char_idx, char) in source.chars().enumerate().skip(first_col as usize - 1) { - if char_idx == last_col as usize { + let first_idx = first_col as usize - 1; + let last_idx = last_col as usize - 1; + for (char_idx, char) in source.chars().enumerate().skip(first_idx) { + if char_idx == last_idx as usize { break; } @@ -342,12 +344,13 @@ impl fmt::Display for ParseErrorStatement { ContextKind::SingleLine => { // Write single line of context with indicator for the offending // span underneath. + context.push_str(" | "); transform_context(&self.context, &mut context); context.push('\n'); f.write_str(&context)?; annotation.push_str(" | "); - extend_annotation(1, self.start_column, &self.context, &mut annotation, ' '); + extend_annotation(1, self.start_column + 1, &self.context, &mut annotation, ' '); extend_annotation(self.start_column, self.end_column, &self.context, &mut annotation, '~'); annotation.push('\n'); diff --git a/src/protocol/parser/mod.rs b/src/protocol/parser/mod.rs index 3019bb25b7f8ac5560829531525e454724b2ca41..1043e598de85fe3febf0e85280c49ae68f0ca389 100644 --- a/src/protocol/parser/mod.rs +++ b/src/protocol/parser/mod.rs @@ -133,9 +133,12 @@ impl Parser { // Continue compilation with the remaining phases now that the types // are all in the type table for module_idx in 0..self.modules.len() { + // TODO: Remove the entire Visitor abstraction. It really doesn't + // make sense considering the amount of special handling we do + // in each pass. let mut ctx = visitor::Ctx{ heap: &mut self.heap, - module: &self.modules[module_idx], + module: &mut self.modules[module_idx], symbols: &mut self.symbol_table, types: &mut self.type_table, }; @@ -144,7 +147,7 @@ impl Parser { // Perform typechecking on all modules let mut queue = ResolveQueue::new(); - for module in &self.modules { + for module in &mut self.modules { let ctx = visitor::Ctx{ heap: &mut self.heap, module, @@ -157,7 +160,7 @@ impl Parser { let top = queue.pop().unwrap(); let mut ctx = visitor::Ctx{ heap: &mut self.heap, - module: &self.modules[top.root_id.index as usize], + module: &mut self.modules[top.root_id.index as usize], symbols: &mut self.symbol_table, types: &mut self.type_table, }; diff --git a/src/protocol/parser/pass_definitions.rs b/src/protocol/parser/pass_definitions.rs index 3da96eecbed9bbe208c92967d27e68b71e51c73f..05c1a88309ba763003a162cb5b8b3ebf14f6a878 100644 --- a/src/protocol/parser/pass_definitions.rs +++ b/src/protocol/parser/pass_definitions.rs @@ -58,9 +58,10 @@ impl PassDefinitions { self.visit_range(modules, module_idx, ctx, range_idx_usize)?; - match cur_range.next_sibling_idx { - Some(idx) => { range_idx = idx; }, - None => { break; }, + if cur_range.next_sibling_idx == NO_SIBLING { + break; + } else { + range_idx = cur_range.next_sibling_idx; } } @@ -94,7 +95,7 @@ impl PassDefinitions { Some(KW_PRIMITIVE) | Some(KW_COMPOSITE) => self.visit_component_definition(module, &mut iter, ctx)?, _ => return Err(ParseError::new_error_str_at_pos( &module.source, iter.last_valid_pos(), - "unexpected symbol, expected some kind of type or procedure definition" + "unexpected symbol, expected a keyword marking the start of a definition" )), } } @@ -110,6 +111,7 @@ impl PassDefinitions { let module_scope = SymbolScope::Module(module.root_id); let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text) .unwrap().variant.as_definition().definition_id; + self.cur_definition = definition_id; // Parse struct definition consume_polymorphic_vars_spilled(&module.source, iter, ctx)?; @@ -151,6 +153,7 @@ impl PassDefinitions { let module_scope = SymbolScope::Module(module.root_id); let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text) .unwrap().variant.as_definition().definition_id; + self.cur_definition = definition_id; // Parse enum definition consume_polymorphic_vars_spilled(&module.source, iter, ctx)?; @@ -189,6 +192,7 @@ impl PassDefinitions { let module_scope = SymbolScope::Module(module.root_id); let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text) .unwrap().variant.as_definition().definition_id; + self.cur_definition = definition_id; // Parse union definition consume_polymorphic_vars_spilled(&module.source, iter, ctx)?; @@ -244,6 +248,9 @@ impl PassDefinitions { let module_scope = SymbolScope::Module(module.root_id); let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text) .unwrap().variant.as_definition().definition_id; + self.cur_definition = definition_id; + + consume_polymorphic_vars_spilled(&module.source, iter, ctx)?; // Parse function's argument list let mut parameter_section = self.parameters.start_section(); @@ -295,6 +302,9 @@ impl PassDefinitions { let module_scope = SymbolScope::Module(module.root_id); let definition_id = ctx.symbols.get_symbol_by_name_defined_in_scope(module_scope, ident_text) .unwrap().variant.as_definition().definition_id; + self.cur_definition = definition_id; + + consume_polymorphic_vars_spilled(&module.source, iter, ctx)?; // Parse component's argument list let mut parameter_section = self.parameters.start_section(); @@ -357,7 +367,7 @@ impl PassDefinitions { let id = self.consume_block_statement(module, iter, ctx)?; section.push(id.upcast()); } else if next == TokenKind::Ident { - let (ident, _) = consume_any_ident(&module.source, iter)?; + let ident = peek_ident(&module.source, iter).unwrap(); if ident == KW_STMT_IF { // Consume if statement and place end-if statement directly // after it. diff --git a/src/protocol/parser/pass_imports.rs b/src/protocol/parser/pass_imports.rs index a480ca60baeb3fef9c7eeb2c6447ba3f539e0fc7..94b931c23cfed36274c369370d37d0ec763e087e 100644 --- a/src/protocol/parser/pass_imports.rs +++ b/src/protocol/parser/pass_imports.rs @@ -39,9 +39,10 @@ impl PassImport { self.visit_import_range(modules, module_idx, ctx, range_idx_usize)?; } - match cur_range.next_sibling_idx { - Some(idx) => { range_idx = idx; }, - None => { break; } + if cur_range.next_sibling_idx == NO_SIBLING { + break; + } else { + range_idx = cur_range.next_sibling_idx; } } diff --git a/src/protocol/parser/pass_symbols.rs b/src/protocol/parser/pass_symbols.rs index 8d7fa84e4423d53074dfd094c87d3cc0e471a84a..e3ba3fb5e77091381088069aac78dc7ba0f37f7f 100644 --- a/src/protocol/parser/pass_symbols.rs +++ b/src/protocol/parser/pass_symbols.rs @@ -80,9 +80,10 @@ impl PassSymbols { self.visit_pragma_range(modules, module_idx, ctx, range_idx_usize)?; } - match next_sibling_idx { - Some(idx) => { range_idx = idx; }, - None => { break; }, + if next_sibling_idx == NO_SIBLING { + break; + } else { + range_idx = next_sibling_idx; } } diff --git a/src/protocol/parser/pass_tokenizer.rs b/src/protocol/parser/pass_tokenizer.rs index 904b0d4526e1adfd73f66476b58a75d23a521da9..82340d8079374585e81141422d181c1244db8818 100644 --- a/src/protocol/parser/pass_tokenizer.rs +++ b/src/protocol/parser/pass_tokenizer.rs @@ -44,15 +44,15 @@ impl PassTokenizer { // see `push_range` and `pop_range`. self.stack_idx = 0; target.ranges.push(TokenRange{ - parent_idx: 0, + parent_idx: NO_RELATION, range_kind: TokenRangeKind::Module, curly_depth: 0, start: 0, end: 0, num_child_ranges: 0, - first_child_idx: 0, - last_child_idx: 0, - next_sibling_idx: None, + first_child_idx: NO_RELATION, + last_child_idx: NO_RELATION, + next_sibling_idx: NO_RELATION, }); // Main tokenization loop @@ -142,6 +142,20 @@ impl PassTokenizer { )); } + // Ranges that did not depend on curly braces may have missing tokens. + // So close all of the active tokens + while self.stack_idx != 0 { + self.pop_range(target, target.tokens.len() as u32); + } + + // And finally, we may have a token range at the end that doesn't belong + // to a range yet, so insert a "code" range if this is the case. + debug_assert_eq!(self.stack_idx, 0); + let last_token_idx = target.tokens.len() as u32; + if target.ranges[0].end != last_token_idx { + + } + // TODO: @remove once I'm sure the algorithm works. For now it is better // if the debugging is a little more expedient if cfg!(debug_assertions) { @@ -149,23 +163,24 @@ impl PassTokenizer { for parent_idx in 0..target.ranges.len() { let cur_range = &target.ranges[parent_idx]; if cur_range.num_child_ranges == 0 { - assert_eq!(cur_range.first_child_idx, parent_idx as u32); - assert_eq!(cur_range.last_child_idx, parent_idx as u32); + assert_eq!(cur_range.first_child_idx, NO_RELATION); + assert_eq!(cur_range.last_child_idx, NO_RELATION); } else { - assert_ne!(cur_range.first_child_idx, parent_idx as u32); - assert_ne!(cur_range.last_child_idx, parent_idx as u32); + assert_ne!(cur_range.first_child_idx, NO_RELATION); + assert_ne!(cur_range.last_child_idx, NO_RELATION); let mut child_counter = 0u32; - let last_child_idx = cur_range.first_child_idx; - let mut child_idx = Some(cur_range.first_child_idx); - while let Some(cur_child_idx) = child_idx { - let child_range = &target.ranges[cur_child_idx as usize]; - assert_eq!(child_range.parent_idx, parent_idx); + let mut last_valid_child_idx = cur_range.first_child_idx; + let mut child_idx = cur_range.first_child_idx; + while child_idx != NO_RELATION { + let child_range = &target.ranges[child_idx as usize]; + assert_eq!(child_range.parent_idx, parent_idx as i32); + last_valid_child_idx = child_idx; child_idx = child_range.next_sibling_idx; child_counter += 1; } - assert_eq!(cur_range.last_child_idx, last_child_idx); + assert_eq!(cur_range.last_child_idx, last_valid_child_idx); assert_eq!(cur_range.num_child_ranges, child_counter); } } @@ -626,79 +641,92 @@ impl PassTokenizer { has_newline } - /// Pushes a new token range onto the stack in the buffers. - fn push_range(&mut self, target: &mut TokenBuffer, range_kind: TokenRangeKind, first_token: u32) { - let new_range_idx = target.ranges.len() as u32; - let cur_range = &mut target.ranges[self.stack_idx]; + fn push_range(&mut self, target: &mut TokenBuffer, range_kind: TokenRangeKind, first_token_idx: u32) { + let new_range_idx = target.ranges.len() as i32; + let parent_idx = self.stack_idx as i32; + let parent_range = &mut target.ranges[self.stack_idx]; + let curly_depth = self.curly_stack.len() as u32; + + if parent_range.first_child_idx == NO_RELATION { + parent_range.first_child_idx = new_range_idx; + } - // If we have just popped a range and then push a new range, then the - // first token is equal to the last token registered on the current - // range. If not, then we had some intermediate tokens that did not - // belong to a particular kind of token range: hence we insert an - // intermediate "code" range. - if cur_range.end != first_token { - let code_start = cur_range.end; + if parent_range.end != first_token_idx { + // We popped a range, processed some intermediate tokens and now + // enter a new range. Those intermediate tokens do not belong to a + // particular range yet. So we put them in a "code" range. - if cur_range.first_child_idx == self.stack_idx as u32 { - // The parent of the new "code" range we're going to push does - // not have any registered children yet. - cur_range.first_child_idx = new_range_idx; - } - cur_range.last_child_idx = new_range_idx + 1; + // Remember last sibling from parent (if any) + let sibling_idx = parent_range.last_child_idx; + + // Push the code range + let code_start_idx = parent_range.end; + let code_end_idx = first_token_idx; + + parent_range.last_child_idx = new_range_idx; + parent_range.end = code_end_idx; + parent_range.num_child_ranges += 1; - cur_range.end = first_token; - cur_range.num_child_ranges += 1; target.ranges.push(TokenRange{ - parent_idx: self.stack_idx, + parent_idx, range_kind: TokenRangeKind::Code, - curly_depth: self.curly_stack.len() as u32, - start: code_start, - end: first_token, + curly_depth, + start: code_start_idx, + end: code_end_idx, num_child_ranges: 0, - first_child_idx: new_range_idx, - last_child_idx: new_range_idx, - next_sibling_idx: Some(new_range_idx + 1), // we're going to push this thing next + first_child_idx: NO_RELATION, + last_child_idx: NO_RELATION, + next_sibling_idx: new_range_idx + 1, // we're going to push this range below }); - } else { - // We're going to push the range in the code below, but while we - // have the `cur_range` borrowed mutably, we fix up its children - // indices. - if cur_range.first_child_idx == self.stack_idx as u32 { - cur_range.first_child_idx = new_range_idx; + + // Fix up the sibling indices + if sibling_idx != NO_RELATION { + let sibling_range = &mut target.ranges[sibling_idx as usize]; + sibling_range.next_sibling_idx = new_range_idx; } - cur_range.last_child_idx = new_range_idx; } - // Insert a new range - let parent_idx = self.stack_idx; + // Push the new range self.stack_idx = target.ranges.len(); - let new_range_idx = self.stack_idx as u32; target.ranges.push(TokenRange{ parent_idx, range_kind, - curly_depth: self.curly_stack.len() as u32, - start: first_token, - end: first_token, + curly_depth, + start: first_token_idx, + end: first_token_idx, // modified when popped num_child_ranges: 0, - first_child_idx: new_range_idx, - last_child_idx: new_range_idx, - next_sibling_idx: None, - }); + first_child_idx: NO_RELATION, + last_child_idx: NO_RELATION, + next_sibling_idx: NO_RELATION + }) } - fn pop_range(&mut self, target: &mut TokenBuffer, end_index: u32) { - let last = &mut target.ranges[self.stack_idx]; - debug_assert!(self.stack_idx != last.parent_idx, "attempting to pop top-level range"); + fn pop_range(&mut self, target: &mut TokenBuffer, end_token_idx: u32) { + let popped_idx = self.stack_idx as i32; + let popped_range = &mut target.ranges[self.stack_idx]; + debug_assert!(self.stack_idx != 0, "attempting to pop top-level range"); // Fix up the current range before going back to parent - last.end = end_index; - debug_assert_ne!(last.start, end_index); + popped_range.end = end_token_idx; + debug_assert_ne!(popped_range.start, end_token_idx); - // Go back to parent - self.stack_idx = last.parent_idx; + // Go back to parent and fix up its child pointers, but remember the + // last child, so we can link it to the newly popped range. + self.stack_idx = popped_range.parent_idx as usize; let parent = &mut target.ranges[self.stack_idx]; - parent.end = end_index; + if parent.first_child_idx == NO_RELATION { + parent.first_child_idx = popped_idx; + } + let prev_sibling_idx = parent.last_child_idx; + parent.last_child_idx = popped_idx; + parent.end = end_token_idx; parent.num_child_ranges += 1; + + // Fix up the sibling (if it exists) + if prev_sibling_idx != NO_RELATION { + let sibling = &mut target.ranges[prev_sibling_idx as usize]; + sibling.next_sibling_idx = popped_idx; + } } diff --git a/src/protocol/parser/pass_typing.rs b/src/protocol/parser/pass_typing.rs index 3b3db1e4e2fa010ceaa6583387d9739fe22ecaab..c9b03cf37a711df99ec8dfa3befe0da580b04126 100644 --- a/src/protocol/parser/pass_typing.rs +++ b/src/protocol/parser/pass_typing.rs @@ -47,10 +47,10 @@ macro_rules! debug_log { ($format:literal) => { - enabled_debug_print!(true, "types", $format); + enabled_debug_print!(false, "types", $format); }; ($format:literal, $($args:expr),*) => { - enabled_debug_print!(true, "types", $format, $($args),*); + enabled_debug_print!(false, "types", $format, $($args),*); }; } diff --git a/src/protocol/parser/pass_validation_linking.rs b/src/protocol/parser/pass_validation_linking.rs index 0ca5b3faf3327427e4d023d04495f450c08e15a5..92b91e316ac6232307795e4a299c0de610e365d9 100644 --- a/src/protocol/parser/pass_validation_linking.rs +++ b/src/protocol/parser/pass_validation_linking.rs @@ -11,6 +11,7 @@ use super::visitor::{ Visitor2, VisitorResult }; +use crate::protocol::parser::ModuleCompilationPhase; #[derive(PartialEq, Eq)] enum DefinitionType { @@ -63,9 +64,8 @@ pub(crate) struct PassValidationLinking { // used as an expression parent) expr_parent: ExpressionParent, // Keeping track of relative position in block in the breadth-first pass. - // May not correspond to block.statement[index] if any statements are - // inserted after the breadth-pass relative_pos_in_block: u32, + definition_buffer: ScopedBuffer, // Single buffer of statement IDs that we want to traverse in a block. // Required to work around Rust borrowing rules and to prevent constant // cloning of vectors. @@ -84,6 +84,7 @@ impl PassValidationLinking { expr_parent: ExpressionParent::None, def_type: DefinitionType::Function(FunctionDefinitionId::new_invalid()), relative_pos_in_block: 0, + definition_buffer: ScopedBuffer::new_reserved(128), statement_buffer: ScopedBuffer::new_reserved(STMT_BUFFER_INIT_CAPACITY), expression_buffer: ScopedBuffer::new_reserved(EXPR_BUFFER_INIT_CAPACITY), } @@ -100,6 +101,20 @@ impl PassValidationLinking { } impl Visitor2 for PassValidationLinking { + fn visit_module(&mut self, ctx: &mut Ctx) -> VisitorResult { + debug_assert_eq!(ctx.module.phase, ModuleCompilationPhase::TypesAddedToTable); + + let root = &ctx.heap[ctx.module.root_id]; + let section = self.definition_buffer.start_section_initialized(&root.definitions); + for definition_idx in 0..section.len() { + let definition_id = section[definition_idx]; + self.visit_definition(ctx, definition_id)?; + } + section.forget(); + + ctx.module.phase = ModuleCompilationPhase::ValidatedAndLinked; + Ok(()) + } //-------------------------------------------------------------------------- // Definition visitors //-------------------------------------------------------------------------- diff --git a/src/protocol/parser/token_parsing.rs b/src/protocol/parser/token_parsing.rs index ebaaefde18b5870d0dc6a577ca1091a506896ecd..94f5a43993af91fe6c37bf4e2493ee1a8c0135a2 100644 --- a/src/protocol/parser/token_parsing.rs +++ b/src/protocol/parser/token_parsing.rs @@ -16,7 +16,7 @@ pub(crate) const KW_AS: &'static [u8] = b"as"; pub(crate) const KW_STRUCT: &'static [u8] = b"struct"; pub(crate) const KW_ENUM: &'static [u8] = b"enum"; pub(crate) const KW_UNION: &'static [u8] = b"union"; -pub(crate) const KW_FUNCTION: &'static [u8] = b"function"; +pub(crate) const KW_FUNCTION: &'static [u8] = b"func"; pub(crate) const KW_PRIMITIVE: &'static [u8] = b"primitive"; pub(crate) const KW_COMPOSITE: &'static [u8] = b"composite"; pub(crate) const KW_IMPORT: &'static [u8] = b"import"; diff --git a/src/protocol/parser/tokens.rs b/src/protocol/parser/tokens.rs index d1ae773a037be730c655cd3b4585542f6a73723b..be1b418a77912e021844104de739f1e40ac3be5e 100644 --- a/src/protocol/parser/tokens.rs +++ b/src/protocol/parser/tokens.rs @@ -178,13 +178,16 @@ pub enum TokenRangeKind { Code, } +pub const NO_RELATION: i32 = -1; +pub const NO_SIBLING: i32 = NO_RELATION; + /// A range of tokens with a specific meaning. Such a range is part of a tree /// where each parent tree envelops all of its children. #[derive(Debug)] pub struct TokenRange { // Index of parent in `TokenBuffer.ranges`, does not have a parent if the - // range kind is Module, in that case the parent index points to itself. - pub parent_idx: usize, + // range kind is Module, in that case the parent index is -1. + pub parent_idx: i32, pub range_kind: TokenRangeKind, pub curly_depth: u32, // Offsets into `TokenBuffer.ranges`: the tokens belonging to this range. @@ -192,9 +195,9 @@ pub struct TokenRange { pub end: u32, // last token (exclusive index) // Child ranges pub num_child_ranges: u32, // Number of subranges - pub first_child_idx: u32, // First subrange (or points to self if no subranges) - pub last_child_idx: u32, // Last subrange (or points to self if no subranges) - pub next_sibling_idx: Option, + pub first_child_idx: i32, // First subrange (or -1 if no subranges) + pub last_child_idx: i32, // Last subrange (or -1 if no subranges) + pub next_sibling_idx: i32, // Next subrange (or -1 if no next subrange) } pub struct TokenBuffer { diff --git a/src/protocol/parser/visitor.rs b/src/protocol/parser/visitor.rs index 582c2f26ba0fe1f17394975c0f62c9ec7620e078..b046c0662a0ed5872cb21777968605c4a02ac42c 100644 --- a/src/protocol/parser/visitor.rs +++ b/src/protocol/parser/visitor.rs @@ -16,7 +16,7 @@ pub(crate) const EXPR_BUFFER_INIT_CAPACITY: usize = 256; /// General context structure that is used while traversing the AST. pub(crate) struct Ctx<'p> { pub heap: &'p mut Heap, - pub module: &'p Module, + pub module: &'p mut Module, pub symbols: &'p mut SymbolTable, pub types: &'p mut TypeTable, } diff --git a/src/protocol/tests/lexer.rs b/src/protocol/tests/lexer.rs index 25d3fba7df135b5e72b6d1325f45b1b316fb7e66..dcc200336d1929477428880c13c0e9e1b63538e3 100644 --- a/src/protocol/tests/lexer.rs +++ b/src/protocol/tests/lexer.rs @@ -9,7 +9,7 @@ use super::*; fn test_disallowed_inference() { Tester::new_single_source_expect_err( "argument auto inference", - "int func(auto arg) { return 0; }" + "s32 func(auto arg) { return 0; }" ).error(|e| { e .assert_msg_has(0, "inference is not allowed") .assert_occurs_at(0, "auto arg"); @@ -17,7 +17,7 @@ fn test_disallowed_inference() { Tester::new_single_source_expect_err( "return type auto inference", - "auto func(int arg) { return 0; }" + "auto func(s32 arg) { return 0; }" ).error(|e| { e .assert_msg_has(0, "inference is not allowed") .assert_occurs_at(0, "auto func"); @@ -25,7 +25,7 @@ fn test_disallowed_inference() { Tester::new_single_source_expect_err( "implicit polymorph argument auto inference", - "int func(in port) { return port; }" + "s32 func(in port) { return port; }" ).error(|e| { e .assert_msg_has(0, "inference is not allowed") .assert_occurs_at(0, "in port"); @@ -33,7 +33,7 @@ fn test_disallowed_inference() { Tester::new_single_source_expect_err( "explicit polymorph argument auto inference", - "int func(in port) { return port; }" + "s32 func(in port) { return port; }" ).error(|e| { e .assert_msg_has(0, "inference is not allowed") .assert_occurs_at(0, "auto> port"); @@ -65,45 +65,45 @@ fn test_simple_struct_definition() { Tester::new_single_source_expect_ok( "single field, no comma", - "struct Foo{ int field }" + "struct Foo{ s32 field }" ).for_struct("Foo", |t| { t .assert_num_fields(1) .for_field("field", |f| { - f.assert_parser_type("int"); + f.assert_parser_type("s32"); }); }); Tester::new_single_source_expect_ok( "single field, with comma", - "struct Foo{ int field, }" + "struct Foo{ s32 field, }" ).for_struct("Foo", |t| { t .assert_num_fields(1) .for_field("field", |f| { f - .assert_parser_type("int"); + .assert_parser_type("s32"); }); }); Tester::new_single_source_expect_ok( "multiple fields, no comma", - "struct Foo{ byte a, short b, int c }" + "struct Foo{ u8 a, s16 b, s32 c }" ).for_struct("Foo", |t| { t .assert_num_fields(3) - .for_field("a", |f| { f.assert_parser_type("byte"); }) - .for_field("b", |f| { f.assert_parser_type("short"); }) - .for_field("c", |f| { f.assert_parser_type("int"); }); + .for_field("a", |f| { f.assert_parser_type("u8"); }) + .for_field("b", |f| { f.assert_parser_type("s16"); }) + .for_field("c", |f| { f.assert_parser_type("s32"); }); }); Tester::new_single_source_expect_ok( "multiple fields, with comma", "struct Foo{ - byte a, - short b, - int c, + u8 a, + s16 b, + s32 c, }" ).for_struct("Foo", |t| { t .assert_num_fields(3) - .for_field("a", |f| { f.assert_parser_type("byte"); }) - .for_field("b", |f| { f.assert_parser_type("short"); }) - .for_field("c", |f| { f.assert_parser_type("int"); }); + .for_field("a", |f| { f.assert_parser_type("u8"); }) + .for_field("b", |f| { f.assert_parser_type("s16"); }) + .for_field("c", |f| { f.assert_parser_type("s32"); }); }); } \ No newline at end of file diff --git a/src/protocol/tests/parser_imports.rs b/src/protocol/tests/parser_imports.rs index 52b113ab255654806943064cefea64731bd6ef0e..b8749160d0afbe78a0de33e822efeca21390be18 100644 --- a/src/protocol/tests/parser_imports.rs +++ b/src/protocol/tests/parser_imports.rs @@ -9,11 +9,11 @@ fn test_module_import() { Tester::new("single domain name") .with_source(" #module external - struct Foo { int field } + struct Foo { s32 field } ") .with_source(" import external; - int caller() { + s32 caller() { auto a = external::Foo{ field: 0 }; return a.field; } @@ -24,11 +24,11 @@ fn test_module_import() { Tester::new("multi domain name") .with_source(" #module external.domain - struct Foo { int field } + struct Foo { s32 field } ") .with_source(" import external.domain; - int caller() { + s32 caller() { auto a = domain::Foo{ field: 0 }; return a.field; } @@ -39,11 +39,11 @@ fn test_module_import() { Tester::new("aliased domain name") .with_source(" #module external - struct Foo { int field } + struct Foo { s32 field } ") .with_source(" import external as aliased; - int caller() { + s32 caller() { auto a = aliased::Foo{ field: 0 }; return a.field; } @@ -57,11 +57,11 @@ fn test_single_symbol_import() { Tester::new("specific symbol") .with_source(" #module external - struct Foo { int field } + struct Foo { s32 field } ") .with_source(" import external::Foo; - int caller() { + s32 caller() { auto a = Foo{ field: 1 }; auto b = Foo{ field: 2 }; return a.field + b.field; @@ -72,11 +72,11 @@ fn test_single_symbol_import() { Tester::new("specific aliased symbol") .with_source(" #module external - struct Foo { int field } + struct Foo { s32 field } ") .with_source(" import external::Foo as Bar; - int caller() { + s32 caller() { return Bar{ field: 0 }.field; } ") @@ -87,11 +87,11 @@ fn test_single_symbol_import() { // Tester::new("import all") // .with_source(" // #module external - // struct Foo { int field } + // struct Foo { s32 field } // ") // .with_source(" // import external::*; - // int caller() { return Foo{field:0}.field; } + // s32 caller() { return Foo{field:0}.field; } // ") // .compile() // .expect_ok(); @@ -102,12 +102,12 @@ fn test_multi_symbol_import() { Tester::new("specific symbols") .with_source(" #module external - struct Foo { byte f } - struct Bar { byte b } + struct Foo { s8 f } + struct Bar { s8 b } ") .with_source(" import external::{Foo, Bar}; - byte caller() { + s8 caller() { return Foo{f:0}.f + Bar{b:1}.b; } ") @@ -117,12 +117,12 @@ fn test_multi_symbol_import() { Tester::new("aliased symbols") .with_source(" #module external - struct Foo { byte in_foo } - struct Bar { byte in_bar } + struct Foo { s8 in_foo } + struct Bar { s8 in_bar } ") .with_source(" import external::{Foo as Bar, Bar as Foo}; - byte caller() { + s8 caller() { return Foo{in_bar:0}.in_bar + Bar{in_foo:0}.in_foo; }") .compile() @@ -132,12 +132,12 @@ fn test_multi_symbol_import() { // Tester::new("import all") // .with_source(" // #module external - // struct Foo { byte f }; - // struct Bar { byte b }; + // struct Foo { s8 f }; + // struct Bar { s8 b }; // ") // .with_source(" // import external::*; - // byte caller() { + // s8 caller() { // auto f = Foo{f:0}; // auto b = Bar{b:0}; // return f.f + b.b; @@ -152,12 +152,12 @@ fn test_illegal_import_use() { Tester::new("unexpected polymorphic args") .with_source(" #module external - struct Foo { byte f } + struct Foo { s8 f } ") .with_source(" import external; - byte caller() { - auto foo = external::Foo{ f: 0 }; + s8 caller() { + auto foo = external::Foo{ f: 0 }; return foo.f; } ") @@ -174,8 +174,8 @@ fn test_illegal_import_use() { ") .with_source(" import external; - byte caller() { - auto foo = external::Foo{ f: 0 }; + s8 caller() { + auto foo = external::Foo{ f: 0 }; return foo.f; }") .compile() @@ -191,7 +191,7 @@ fn test_illegal_import_use() { ") .with_source(" import external; - byte caller() { + s8 caller() { auto foo = external{ f: 0 }; return 0; } @@ -205,11 +205,11 @@ fn test_illegal_import_use() { Tester::new("more namespaces than needed, not polymorphic") .with_source(" #module external - struct Foo { byte f } + struct Foo { s8 f } ") .with_source(" import external; - byte caller() { + s8 caller() { auto foo = external::Foo::f{ f: 0 }; return 0; }") @@ -223,7 +223,7 @@ fn test_illegal_import_use() { Tester::new("import from another import") .with_source(" #module mod1 - struct Foo { byte f } + struct Foo { s8 f } ") .with_source(" #module mod2 @@ -232,7 +232,7 @@ fn test_illegal_import_use() { ") .with_source(" import mod2; - byte caller() { + s8 caller() { auto bar = mod2::Bar{ f: mod2::Foo{ f: 0 } }; return var.f.f; }") diff --git a/src/protocol/tests/parser_inference.rs b/src/protocol/tests/parser_inference.rs index 24cc508e75e7af70f556403233bb27b8c5565652..5a378ec54cb013ca418cbbef553e366c60293484 100644 --- a/src/protocol/tests/parser_inference.rs +++ b/src/protocol/tests/parser_inference.rs @@ -9,7 +9,7 @@ fn test_integer_inference() { Tester::new_single_source_expect_ok( "by arguments", " - int call(byte b, short s, int i, long l) { + func call(u8 b, u16 s, u32 i, u64 l) -> u32 { auto b2 = b; auto s2 = s; auto i2 = i; @@ -20,27 +20,27 @@ fn test_integer_inference() { ).for_function("call", |f| { f .for_variable("b2", |v| { v .assert_parser_type("auto") - .assert_concrete_type("byte"); + .assert_concrete_type("u8"); }) .for_variable("s2", |v| { v .assert_parser_type("auto") - .assert_concrete_type("short"); + .assert_concrete_type("u16"); }) .for_variable("i2", |v| { v .assert_parser_type("auto") - .assert_concrete_type("int"); + .assert_concrete_type("u32"); }) .for_variable("l2", |v| { v .assert_parser_type("auto") - .assert_concrete_type("long"); + .assert_concrete_type("u64"); }); }); Tester::new_single_source_expect_ok( "by assignment", " - int call() { - byte b1 = 0; short s1 = 0; int i1 = 0; long l1 = 0; + func call() -> u32 { + u8 b1 = 0; u16 s1 = 0; u32 i1 = 0; u64 l1 = 0; auto b2 = b1; auto s2 = s1; auto i2 = i1; @@ -50,19 +50,19 @@ fn test_integer_inference() { ).for_function("call", |f| { f .for_variable("b2", |v| { v .assert_parser_type("auto") - .assert_concrete_type("byte"); + .assert_concrete_type("u8"); }) .for_variable("s2", |v| { v .assert_parser_type("auto") - .assert_concrete_type("short"); + .assert_concrete_type("u16"); }) .for_variable("i2", |v| { v .assert_parser_type("auto") - .assert_concrete_type("int"); + .assert_concrete_type("u32"); }) .for_variable("l2", |v| { v .assert_parser_type("auto") - .assert_concrete_type("long"); + .assert_concrete_type("u64"); }); }); } @@ -71,15 +71,15 @@ fn test_integer_inference() { fn test_binary_expr_inference() { Tester::new_single_source_expect_ok( "compatible types", - "int call() { - byte b0 = 0; - byte b1 = 1; - short s0 = 0; - short s1 = 1; - int i0 = 0; - int i1 = 1; - long l0 = 0; - long l1 = 1; + "func call() -> s32 { + s8 b0 = 0; + s8 b1 = 1; + s16 s0 = 0; + s16 s1 = 1; + s32 i0 = 0; + s32 i1 = 1; + s64 l0 = 0; + s64 l1 = 1; auto b = b0 + b1; auto s = s0 + s1; auto i = i0 + i1; @@ -89,27 +89,27 @@ fn test_binary_expr_inference() { ).for_function("call", |f| { f .for_expression_by_source( "b0 + b1", "+", - |e| { e.assert_concrete_type("byte"); } + |e| { e.assert_concrete_type("s8"); } ) .for_expression_by_source( "s0 + s1", "+", - |e| { e.assert_concrete_type("short"); } + |e| { e.assert_concrete_type("s16"); } ) .for_expression_by_source( "i0 + i1", "+", - |e| { e.assert_concrete_type("int"); } + |e| { e.assert_concrete_type("s32"); } ) .for_expression_by_source( "l0 + l1", "+", - |e| { e.assert_concrete_type("long"); } + |e| { e.assert_concrete_type("s64"); } ); }); Tester::new_single_source_expect_err( "incompatible types", - "int call() { - byte b = 0; - long l = 1; + "func call() -> s32 { + s8 b = 0; + s64 l = 1; auto r = b + l; return 0; }" @@ -117,8 +117,8 @@ fn test_binary_expr_inference() { .assert_ctx_has(0, "b + l") .assert_msg_has(0, "cannot apply") .assert_occurs_at(0, "+") - .assert_msg_has(1, "has type 'byte'") - .assert_msg_has(2, "has type 'long'"); + .assert_msg_has(1, "has type 's8'") + .assert_msg_has(2, "has type 's64'"); }); } @@ -130,12 +130,12 @@ fn test_struct_inference() { "by function calls", " struct Pair{ T1 first, T2 second } - Pair construct(T1 first, T2 second) { + func construct(T1 first, T2 second) -> Pair { return Pair{ first: first, second: second }; } - int fix_t1(Pair arg) { return 0; } - int fix_t2(Pair arg) { return 0; } - int test() { + func fix_t1(Pair arg) -> s32 { return 0; } + func fix_t2(Pair arg) -> s32 { return 0; } + func test() -> s32 { auto first = 0; auto second = 1; auto pair = construct(first, second); @@ -147,15 +147,15 @@ fn test_struct_inference() { ).for_function("test", |f| { f .for_variable("first", |v| { v .assert_parser_type("auto") - .assert_concrete_type("byte"); + .assert_concrete_type("s8"); }) .for_variable("second", |v| { v .assert_parser_type("auto") - .assert_concrete_type("int"); + .assert_concrete_type("s32"); }) .for_variable("pair", |v| { v .assert_parser_type("auto") - .assert_concrete_type("Pair"); + .assert_concrete_type("Pair"); }); }); @@ -163,15 +163,15 @@ fn test_struct_inference() { "by field access", " struct Pair{ T1 first, T2 second } - Pair construct(T1 first, T2 second) { + func construct(T1 first, T2 second) -> Pair { return Pair{ first: first, second: second }; } - int test() { + test() -> s32 { auto first = 0; auto second = 1; auto pair = construct(first, second); - byte assign_first = 0; - long assign_second = 1; + s8 assign_first = 0; + s64 assign_second = 1; pair.first = assign_first; pair.second = assign_second; return 0; @@ -180,15 +180,15 @@ fn test_struct_inference() { ).for_function("test", |f| { f .for_variable("first", |v| { v .assert_parser_type("auto") - .assert_concrete_type("byte"); + .assert_concrete_type("s8"); }) .for_variable("second", |v| { v .assert_parser_type("auto") - .assert_concrete_type("long"); + .assert_concrete_type("s64"); }) .for_variable("pair", |v| { v .assert_parser_type("auto") - .assert_concrete_type("Pair"); + .assert_concrete_type("Pair"); }); }); @@ -196,10 +196,12 @@ fn test_struct_inference() { "by nested field access", " struct Node{ T1 l, T2 r } - Node construct(T1 l, T2 r) { return Node{ l: l, r: r }; } - int fix_poly(Node a) { return 0; } - int test() { - byte assigned = 0; + func construct(T1 l, T2 r) -> Node { + return Node{ l: l, r: r }; + } + func fix_poly(Node a) -> s32 { return 0; } + func test() -> s32 { + s8 assigned = 0; auto thing = construct(assigned, construct(0, 1)); fix_poly(thing.r); thing.r.r = assigned; @@ -209,7 +211,7 @@ fn test_struct_inference() { ).for_function("test", |f| { f .for_variable("thing", |v| { v .assert_parser_type("auto") - .assert_concrete_type("Node>"); + .assert_concrete_type("Node>"); }); }); } @@ -220,7 +222,7 @@ fn test_enum_inference() { "no polymorphic vars", " enum Choice { A, B } - int test_instances() { + test_instances() -> s32 { auto foo = Choice::A; auto bar = Choice::B; return 0; @@ -244,21 +246,21 @@ fn test_enum_inference() { A, B, } - int fix_as_byte(Choice arg) { return 0; } - int fix_as_int(Choice arg) { return 0; } - int test_instances() { - auto choice_byte = Choice::A; - auto choice_int1 = Choice::B; - Choice choice_int2 = Choice::B; - fix_as_byte(choice_byte); - fix_as_int(choice_int1); - return fix_as_int(choice_int2); + func fix_as_s8(Choice arg) -> s32 { return 0; } + fix_as_s32(Choice arg) -> s32 { return 0; } + test_instances() -> s32 { + auto choice_s8 = Choice::A; + auto choice_s32_1 = Choice::B; + Choice choice_s32_2 = Choice::B; + fix_as_s8(choice_s8); + fix_as_s32(choice_s32_1); + return fix_as_int(choice_s32_2); } " ).for_function("test_instances", |f| { f - .for_variable("choice_byte", |v| { v + .for_variable("choice_s8", |v| { v .assert_parser_type("auto") - .assert_concrete_type("Choice"); + .assert_concrete_type("Choice"); }) .for_variable("choice_int1", |v| { v .assert_parser_type("auto") @@ -274,10 +276,10 @@ fn test_enum_inference() { "two polymorphic vars", " enum Choice{ A, B, } - int fix_t1(Choice arg) { return 0; } - int fix_t2(Choice arg) { return 0; } - int test_instances() { - Choice choice1 = Choice::A; + fix_t1(Choice arg) -> s32 { return 0; } + fix_t2(Choice arg) -> s32 { return 0; } + test_instances() -> int { + Choice choice1 = Choice::A; Choice choice2 = Choice::A; Choice choice3 = Choice::B; auto choice4 = Choice::B; @@ -288,20 +290,20 @@ fn test_enum_inference() { " ).for_function("test_instances", |f| { f .for_variable("choice1", |v| { v - .assert_parser_type("Choice") - .assert_concrete_type("Choice"); + .assert_parser_type("Choice") + .assert_concrete_type("Choice"); }) .for_variable("choice2", |v| { v .assert_parser_type("Choice") - .assert_concrete_type("Choice"); + .assert_concrete_type("Choice"); }) .for_variable("choice3", |v| { v .assert_parser_type("Choice") - .assert_concrete_type("Choice"); + .assert_concrete_type("Choice"); }) .for_variable("choice4", |v| { v .assert_parser_type("auto") - .assert_concrete_type("Choice"); + .assert_concrete_type("Choice"); }); }); } @@ -311,10 +313,10 @@ fn test_failed_polymorph_inference() { Tester::new_single_source_expect_err( "function call inference mismatch", " - int poly(T a, T b) { return 0; } - int call() { - byte first_arg = 5; - long second_arg = 2; + func poly(T a, T b) -> s32 { return 0; } + func call() -> s32 { + s8 first_arg = 5; + s64 second_arg = 2; return poly(first_arg, second_arg); } " @@ -324,18 +326,18 @@ fn test_failed_polymorph_inference() { .assert_occurs_at(0, "poly") .assert_msg_has(0, "Conflicting type for polymorphic variable 'T'") .assert_occurs_at(1, "second_arg") - .assert_msg_has(1, "inferred it to 'long'") + .assert_msg_has(1, "inferred it to 's64'") .assert_occurs_at(2, "first_arg") - .assert_msg_has(2, "inferred it to 'byte'"); + .assert_msg_has(2, "inferred it to 's8'"); }); Tester::new_single_source_expect_err( "struct literal inference mismatch", " struct Pair{ T first, T second } - int call() { - byte first_arg = 5; - long second_arg = 2; + call() -> s32 { + s8 first_arg = 5; + s64 second_arg = 2; auto pair = Pair{ first: first_arg, second: second_arg }; return 3; } @@ -346,9 +348,9 @@ fn test_failed_polymorph_inference() { .assert_occurs_at(0, "Pair{") .assert_msg_has(0, "Conflicting type for polymorphic variable 'T'") .assert_occurs_at(1, "second_arg") - .assert_msg_has(1, "inferred it to 'long'") + .assert_msg_has(1, "inferred it to 's64'") .assert_occurs_at(2, "first_arg") - .assert_msg_has(2, "inferred it to 'byte'"); + .assert_msg_has(2, "inferred it to 's8'"); }); // Cannot really test literal inference error, but this comes close @@ -356,17 +358,17 @@ fn test_failed_polymorph_inference() { "enum literal inference mismatch", " enum Uninteresting{ Variant } - int fix_t(Uninteresting arg) { return 0; } - int call() { + func fix_t(Uninteresting arg) -> s32 { return 0; } + func call() -> s32 { auto a = Uninteresting::Variant; - fix_t(a); + fix_t(a); fix_t(a); return 4; } " ).error(|e| { e .assert_num(2) - .assert_any_msg_has("type 'Uninteresting'") + .assert_any_msg_has("type 'Uninteresting'") .assert_any_msg_has("type 'Uninteresting'"); }); @@ -374,8 +376,8 @@ fn test_failed_polymorph_inference() { "field access inference mismatch", " struct Holder{ Shazam a } - int call() { - byte to_hold = 0; + func call() -> s32 { + s8 to_hold = 0; auto holder = Holder{ a: to_hold }; return holder.a; } @@ -385,7 +387,7 @@ fn test_failed_polymorph_inference() { .assert_ctx_has(0, "holder.a") .assert_occurs_at(0, ".") .assert_msg_has(0, "Conflicting type for polymorphic variable 'Shazam'") - .assert_msg_has(1, "inferred it to 'byte'") + .assert_msg_has(1, "inferred it to 's8'") .assert_msg_has(2, "inferred it to 'int'"); }); @@ -394,11 +396,11 @@ fn test_failed_polymorph_inference() { "nested field access inference mismatch", " struct Node{ T1 l, T2 r } - Node construct(T1 l, T2 r) { return Node{ l: l, r: r }; } - int fix_poly(Node a) { return 0; } - int test() { - byte assigned = 0; - long another = 1; + func construct(T1 l, T2 r) -> Node { return Node{ l: l, r: r }; } + func fix_poly(Node a) -> s32 { return 0; } + func test() -> s32 { + s8 assigned = 0; + s64 another = 1; auto thing = construct(assigned, construct(another, 1)); fix_poly(thing.r); thing.r.r = assigned; diff --git a/src/protocol/tests/parser_monomorphs.rs b/src/protocol/tests/parser_monomorphs.rs index ed633531181e350ba4283c98d5897d0892dfbceb..4f5a6fc4258f097e5ce3dd8439220b664e225e3e 100644 --- a/src/protocol/tests/parser_monomorphs.rs +++ b/src/protocol/tests/parser_monomorphs.rs @@ -9,7 +9,7 @@ use super::*; fn test_struct_monomorphs() { Tester::new_single_source_expect_ok( "no polymorph", - "struct Integer{ int field }" + "struct Integer{ s32 field }" ).for_struct("Integer", |s| { s .assert_num_monomorphs(0); }); @@ -18,25 +18,25 @@ fn test_struct_monomorphs() { "single polymorph", " struct Number{ T number } - int instantiator() { - auto a = Number{ number: 0 }; - auto b = Number{ number: 1 }; - auto c = Number{ number: 2 }; - auto d = Number{ number: 3 }; - auto e = Number>{ number: Number{ number: 4 }}; + s32 instantiator() { + auto a = Number{ number: 0 }; + auto b = Number{ number: 1 }; + auto c = Number{ number: 2 }; + auto d = Number{ number: 3 }; + auto e = Number>{ number: Number{ number: 4 }}; return 0; } " ).for_struct("Number", |s| { s - .assert_has_monomorph("byte") - .assert_has_monomorph("short") - .assert_has_monomorph("int") - .assert_has_monomorph("long") - .assert_has_monomorph("Number") + .assert_has_monomorph("s8") + .assert_has_monomorph("s16") + .assert_has_monomorph("s32") + .assert_has_monomorph("s64") + .assert_has_monomorph("Number") .assert_num_monomorphs(5); }).for_function("instantiator", |f| { f - .for_variable("a", |v| {v.assert_concrete_type("Number");} ) - .for_variable("e", |v| {v.assert_concrete_type("Number>");} ); + .for_variable("a", |v| {v.assert_concrete_type("Number");} ) + .for_variable("e", |v| {v.assert_concrete_type("Number>");} ); }); } @@ -46,7 +46,7 @@ fn test_enum_monomorphs() { "no polymorph", " enum Answer{ Yes, No } - int do_it() { auto a = Answer::Yes; return 0; } + s32 do_it() { auto a = Answer::Yes; return 0; } " ).for_enum("Answer", |e| { e .assert_num_monomorphs(0); @@ -56,19 +56,19 @@ fn test_enum_monomorphs() { "single polymorph", " enum Answer { Yes, No } - int instantiator() { - auto a = Answer::Yes; - auto b = Answer::No; - auto c = Answer::Yes; - auto d = Answer>>::No; + s32 instantiator() { + auto a = Answer::Yes; + auto b = Answer::No; + auto c = Answer::Yes; + auto d = Answer>>::No; return 0; } " ).for_enum("Answer", |e| { e .assert_num_monomorphs(3) - .assert_has_monomorph("byte") - .assert_has_monomorph("int") - .assert_has_monomorph("Answer>"); + .assert_has_monomorph("s8") + .assert_has_monomorph("s32") + .assert_has_monomorph("Answer>"); }); } @@ -78,37 +78,37 @@ fn test_union_monomorphs() { "no polymorph", " union Trinary { Undefined, Value(boolean) } - int do_it() { auto a = Trinary::Value(true); return 0; } + s32 do_it() { auto a = Trinary::Value(true); return 0; } " ).for_union("Trinary", |e| { e .assert_num_monomorphs(0); }); // TODO: Does this do what we want? Or do we expect the embedded monomorph - // Result to be instantiated as well? I don't think so. + // Result to be instantiated as well? I don't think so. Tester::new_single_source_expect_ok( "polymorphs", " union Result{ Ok(T), Err(E) } - int instantiator() { - short a_short = 5; - auto a = Result::Ok(0); - auto b = Result::Ok(true); - auto c = Result, Result>::Err(Result::Ok(5)); - auto d = Result, auto>::Err(Result::Ok(a_short)); + s32 instantiator() { + s16 a_s16 = 5; + auto a = Result::Ok(0); + auto b = Result::Ok(true); + auto c = Result, Result>::Err(Result::Ok(5)); + auto d = Result, auto>::Err(Result::Ok(a_s16)); return 0; } " ).for_union("Result", |e| { e .assert_num_monomorphs(4) - .assert_has_monomorph("byte;bool") - .assert_has_monomorph("bool;byte") - .assert_has_monomorph("Result;Result") - .assert_has_monomorph("short;long"); + .assert_has_monomorph("s8;bool") + .assert_has_monomorph("bool;s8") + .assert_has_monomorph("Result;Result") + .assert_has_monomorph("s16;s64"); }).for_function("instantiator", |f| { f .for_variable("d", |v| { v .assert_parser_type("auto") - .assert_concrete_type("Result,Result>"); + .assert_concrete_type("Result,Result>"); }); }); } \ No newline at end of file diff --git a/src/protocol/tests/parser_validation.rs b/src/protocol/tests/parser_validation.rs index 483a57202bac2bf69144a6abe98d24bd0314a7a8..bf9e3b8f311230cfd5de5d805fb1c6c4d4505e84 100644 --- a/src/protocol/tests/parser_validation.rs +++ b/src/protocol/tests/parser_validation.rs @@ -10,16 +10,16 @@ fn test_correct_struct_instance() { Tester::new_single_source_expect_ok( "single field", " - struct Foo { int a } - Foo bar(int arg) { return Foo{ a: arg }; } + struct Foo { s32 a } + Foo bar(s32 arg) { return Foo{ a: arg }; } " ); Tester::new_single_source_expect_ok( "multiple fields", " - struct Foo { int a, int b } - Foo bar(int arg) { return Foo{ a: arg, b: arg }; } + struct Foo { s32 a, s32 b } + Foo bar(s32 arg) { return Foo{ a: arg, b: arg }; } " ); @@ -27,7 +27,7 @@ fn test_correct_struct_instance() { "single field, explicit polymorph", " struct Foo{ T field } - Foo bar(int arg) { return Foo{ field: arg }; } + Foo bar(s32 arg) { return Foo{ field: arg }; } " ); @@ -35,7 +35,7 @@ fn test_correct_struct_instance() { "single field, implicit polymorph", " struct Foo{ T field } - int bar(int arg) { + s32 bar(s32 arg) { auto thingo = Foo{ field: arg }; return arg; } @@ -46,8 +46,8 @@ fn test_correct_struct_instance() { "multiple fields, same explicit polymorph", " struct Pair{ T1 first, T2 second } - int bar(int arg) { - auto qux = Pair{ first: arg, second: arg }; + s32 bar(s32 arg) { + auto qux = Pair{ first: arg, second: arg }; return arg; } " @@ -57,7 +57,7 @@ fn test_correct_struct_instance() { "multiple fields, same implicit polymorph", " struct Pair{ T1 first, T2 second } - int bar(int arg) { + s32 bar(s32 arg) { auto wup = Pair{ first: arg, second: arg }; return arg; } @@ -68,8 +68,8 @@ fn test_correct_struct_instance() { "multiple fields, different explicit polymorph", " struct Pair{ T1 first, T2 second } - int bar(int arg1, byte arg2) { - auto shoo = Pair{ first: arg1, second: arg2 }; + s32 bar(s32 arg1, s8 arg2) { + auto shoo = Pair{ first: arg1, second: arg2 }; return arg1; } " @@ -79,7 +79,7 @@ fn test_correct_struct_instance() { "multiple fields, different implicit polymorph", " struct Pair{ T1 first, T2 second } - int bar(int arg1, byte arg2) { + s32 bar(s32 arg1, s8 arg2) { auto shrubbery = Pair{ first: arg1, second: arg2 }; return arg1; } @@ -91,7 +91,7 @@ fn test_correct_struct_instance() { fn test_incorrect_struct_instance() { Tester::new_single_source_expect_err( "reused field in definition", - "struct Foo{ int a, byte a }" + "struct Foo{ s32 a, s8 a }" ).error(|e| { e .assert_num(2) .assert_occurs_at(0, "a }") @@ -173,7 +173,7 @@ fn test_correct_enum_instance() { "explicit multi-polymorph", " enum Foo{ A, B } - Foo bar() { return Foo::B; } + Foo bar() { return Foo::B; } " ); } @@ -235,7 +235,7 @@ fn test_correct_union_instance() { Tester::new_single_source_expect_ok( "multiple embedded", " - union Foo { A(int), B(byte) } + union Foo { A(int), B(s8) } Foo bar() { return Foo::B(2); } " ); @@ -243,7 +243,7 @@ fn test_correct_union_instance() { Tester::new_single_source_expect_ok( "multiple values in embedded", " - union Foo { A(int, byte) } + union Foo { A(int, s8) } Foo bar() { return Foo::A(0, 2); } " ); @@ -267,7 +267,7 @@ fn test_correct_union_instance() { "multiple polymorphic vars", " union Result { Ok(T), Err(E), } - Result bar() { return Result::Ok(3); } + Result bar() { return Result::Ok(3); } " ); @@ -275,7 +275,7 @@ fn test_correct_union_instance() { "multiple polymorphic in one variant", " union MaybePair{ None, Some(T1, T2) } - MaybePair bar() { return MaybePair::Some(1, 2); } + MaybePair bar() { return MaybePair::Some(1, 2); } " ); } @@ -298,11 +298,11 @@ fn test_incorrect_union_instance() { Tester::new_single_source_expect_err( "embedded-variant name reuse", " - union Foo{ A(int), A(byte) } + union Foo{ A(int), A(s8) } " ).error(|e| { e .assert_num(2) - .assert_occurs_at(0, "A(byte)") + .assert_occurs_at(0, "A(s8)") .assert_msg_has(0, "union variant is defined more than once") .assert_occurs_at(1, "A(int)") .assert_msg_has(1, "other union variant"); @@ -311,7 +311,7 @@ fn test_incorrect_union_instance() { Tester::new_single_source_expect_err( "undefined variant", " - union Silly{ Thing(byte) } + union Silly{ Thing(s8) } Silly bar() { return Silly::Undefined(5); } " ).error(|e| { e diff --git a/src/protocol/tests/utils.rs b/src/protocol/tests/utils.rs index 0cd60489e610f5f90119e2519266e18d6e059495..9a4552e1bb1cec481978aad5ca51698f20b7094a 100644 --- a/src/protocol/tests/utils.rs +++ b/src/protocol/tests/utils.rs @@ -1,3 +1,4 @@ +use crate::collections::StringPool; use crate::protocol::{ ast::*, input_source::*, @@ -123,6 +124,7 @@ pub(crate) struct AstOkTester { heap: Heap, symbols: SymbolTable, types: TypeTable, + pool: StringPool, } impl AstOkTester { @@ -133,6 +135,7 @@ impl AstOkTester { heap: parser.heap, symbols: parser.symbol_table, types: parser.type_table, + pool: parser.string_pool, } } @@ -209,6 +212,7 @@ impl AstOkTester { let mut found = false; for definition in self.heap.definitions.iter() { if let Definition::Function(definition) = definition { + println!("DEBUG: Have {}", definition.identifier.value.as_str()); if definition.identifier.value.as_str() != name { continue; } @@ -218,6 +222,8 @@ impl AstOkTester { f(tester); found = true; break; + } else { + println!("DEBUG: Have (not a function, but) {}", definition.identifier().value.as_str()); } }