From a67d4fde9cb55a76963f939a234b463f30b9acda 2022-02-23 14:08:11 From: mh Date: 2022-02-23 14:08:11 Subject: [PATCH] Finish refactoring type table --- diff --git a/src/collections/scoped_buffer.rs b/src/collections/scoped_buffer.rs index 518c889433f8045e9c80d5ce3862414e60c38369..da789984728f3c4796ea46d3715fc91395ab10fd 100644 --- a/src/collections/scoped_buffer.rs +++ b/src/collections/scoped_buffer.rs @@ -140,8 +140,8 @@ impl ScopedSection { pub(crate) fn contains(&self, value: &T) -> bool { self.check_length(); let vec = unsafe{&*self.inner}; - for index in self.start_size..vec.len() { - if vec[index] == value { + for index in self.start_size as usize..vec.len() { + if &vec[index] == value { return true; } } diff --git a/src/protocol/parser/pass_typing.rs b/src/protocol/parser/pass_typing.rs index 50118db8a453a2676beacabe0846c6d440c4aef2..225f8c22c1a2eef4117c1da4051ea3391a17f1dd 100644 --- a/src/protocol/parser/pass_typing.rs +++ b/src/protocol/parser/pass_typing.rs @@ -42,8 +42,6 @@ macro_rules! debug_log { }; } -use std::collections::{HashMap, HashSet}; - use crate::collections::{ScopedBuffer, ScopedSection, DequeSet}; use crate::protocol::ast::*; use crate::protocol::input_source::ParseError; @@ -809,7 +807,7 @@ enum SingleInferenceResult { // ----------------------------------------------------------------------------- type InferNodeIndex = usize; -type PolyDataIndex = usize; +type PolyDataIndex = isize; type VarDataIndex = usize; enum DefinitionType{ @@ -837,7 +835,6 @@ pub(crate) struct ResolveQueueElement { pub(crate) type ResolveQueue = Vec; -#[derive(Clone)] struct InferenceNode { expr_type: InferenceType, // result type from expression expr_id: ExpressionId, // expression that is evaluated @@ -891,6 +888,9 @@ impl InferenceRule { union_cast_method_impl!(as_variable_expr, InferenceRuleVariableExpr, InferenceRule::VariableExpr); } +// Note: InferenceRuleTemplate is `Copy`, so don't add dynamically allocated +// members in the future (or review places where this struct is copied) +#[derive(Clone, Copy)] struct InferenceRuleTemplate { template: &'static [InferenceTypePart], application: InferenceRuleTemplateApplication, @@ -1043,6 +1043,12 @@ struct PolyData { /// Inferred types of the polymorphic variables as they are written down /// at the type's definition. poly_vars: Vec, + expr_types: PolyDataTypes, +} + +// silly structure, just so we can use `PolyDataTypeIndex` ergonomically while +// making sure we're still capable of borrowing from `poly_vars`. +struct PolyDataTypes { /// Inferred types of associated types (e.g. struct fields, tuple members, /// function arguments). These types may depend on the polymorphic variables /// defined above. @@ -1059,7 +1065,7 @@ enum PolyDataTypeIndex { Returned, } -impl PolyData { +impl PolyDataTypes { fn get_type(&self, index: PolyDataTypeIndex) -> &InferenceType { match index { PolyDataTypeIndex::Associated(index) => return &self.associated[index], @@ -1205,10 +1211,6 @@ impl PassTyping { debug_log!("Visiting component '{}': {}", comp_def.identifier.value.as_str(), id.0.index); debug_log!("{}", "-".repeat(50)); - // Reserve data for expression types - debug_assert!(self.infer_nodes.is_empty()); - self.infer_nodes.resize(comp_def.num_expressions_in_body as usize, Default::default()); - // Visit parameters let section = self.var_buffer.start_section_initialized(comp_def.parameters.as_slice()); for param_id in section.iter_copied() { @@ -1251,10 +1253,6 @@ impl PassTyping { } debug_log!("{}", "-".repeat(50)); - // Reserve data for expression types - debug_assert!(self.infer_nodes.is_empty()); - self.infer_nodes.resize(func_def.num_expressions_in_body as usize, Default::default()); - // Visit parameters let section = self.var_buffer.start_section_initialized(func_def.parameters.as_slice()); for param_id in section.iter_copied() { @@ -1769,9 +1767,9 @@ impl PassTyping { let element_indices = expr_indices.into_vec(); // Assign rule and extra data index to inference node - let extra_index = self.insert_initial_struct_polymorph_data(ctx, id); + let poly_data_index = self.insert_initial_struct_polymorph_data(ctx, id); let node = &mut self.infer_nodes[self_index]; - node.poly_data_index = extra_index; + node.poly_data_index = poly_data_index; node.inference_rule = InferenceRule::LiteralStruct(InferenceRuleLiteralStruct{ element_indices, }); @@ -1781,15 +1779,15 @@ impl PassTyping { // have a user-defined polymorphic marker variable. For this // reason we may still have to apply inference to this // polymorphic variable - let extra_index = self.insert_initial_enum_polymorph_data(ctx, id); + let poly_data_index = self.insert_initial_enum_polymorph_data(ctx, id); let node = &mut self.infer_nodes[self_index]; - node.poly_data_index = extra_index; + node.poly_data_index = poly_data_index; node.inference_rule = InferenceRule::LiteralEnum; }, Literal::Union(literal) => { // May carry subexpressions and polymorphic arguments let expr_ids = self.expr_buffer.start_section_initialized(literal.values.as_slice()); - let extra_index = self.insert_initial_union_polymorph_data(ctx, id); + let poly_data_index = self.insert_initial_union_polymorph_data(ctx, id); let mut expr_indices = self.index_buffer.start_section(); for expr_id in expr_ids.iter_copied() { @@ -1800,15 +1798,15 @@ impl PassTyping { let element_indices = expr_indices.into_vec(); let node = &mut self.infer_nodes[self_index]; - node.poly_data_index = extra_index; + node.poly_data_index = poly_data_index; node.inference_rule = InferenceRule::LiteralUnion(InferenceRuleLiteralUnion{ element_indices, }); }, - Literal::Array(expressions) | Literal::Tuple(expressions) => { + Literal::Array(expressions) => { let expr_ids = self.expr_buffer.start_section_initialized(expressions.as_slice()); - let mut expr_indices = self.index_buffer.start_section(); + let mut expr_indices = self.index_buffer.start_section(); for expr_id in expr_ids.iter_copied() { let expr_index = self.visit_expr(ctx, expr_id)?; expr_indices.push(expr_index); @@ -1817,9 +1815,24 @@ impl PassTyping { let element_indices = expr_indices.into_vec(); let node = &mut self.infer_nodes[self_index]; - node.poly_data_index = extra_index; node.inference_rule = InferenceRule::LiteralArray(InferenceRuleLiteralArray{ element_indices, + }); + }, + Literal::Tuple(expressions) => { + let expr_ids = self.expr_buffer.start_section_initialized(expressions.as_slice()); + + let mut expr_indices = self.index_buffer.start_section(); + for expr_id in expr_ids.iter_copied() { + let expr_index = self.visit_expr(ctx, expr_id)?; + expr_indices.push(expr_index); + } + expr_ids.forget(); + let element_indices = expr_indices.into_vec(); + + let node = &mut self.infer_nodes[self_index]; + node.inference_rule = InferenceRule::LiteralTuple(InferenceRuleLiteralTuple{ + element_indices, }) } } @@ -1851,6 +1864,7 @@ impl PassTyping { // the programmer explicitly specified the output type, then we can // already perform that inference rule here. { + let cast_expr = &ctx.heap[id]; let specified_type = self.determine_inference_type_from_parser_type_elements(&cast_expr.to_type.elements, true); let _progress = self.apply_template_constraint(ctx, self_index, &specified_type.parts)?; } @@ -1960,11 +1974,9 @@ impl PassTyping { fn resolve_types(&mut self, ctx: &mut Ctx, queue: &mut ResolveQueue) -> Result<(), ParseError> { // Keep inferring until we can no longer make any progress while !self.node_queued.is_empty() { - // Make as much progress as possible without forced integer - // inference. while !self.node_queued.is_empty() { - let next_expr_idx = self.node_queued.pop_front().unwrap(); - self.progress_expr(ctx, next_expr_idx)?; + let node_index = self.node_queued.pop_front().unwrap(); + self.progress_inference_rule(ctx, node_index)?; } // Nothing is queued anymore. However we might have integer literals @@ -1979,8 +1991,8 @@ impl PassTyping { // Requeue expression (and its parent, if it exists) self.node_queued.push_back(infer_node_index); - if let Some(parent_node_index) = infer_node.parent_index { - self.expr_queued(parent_node_index); + if let Some(node_parent_index) = infer_node.parent_index { + self.node_queued.push_back(node_parent_index); } } } @@ -2047,23 +2059,23 @@ impl PassTyping { } // Expression is fine, check if any extra data is attached - if infer_expr.extra_data_idx < 0 { continue; } + if infer_expr.poly_data_index < 0 { continue; } // Extra data is attached, perform typechecking and transfer // resolved information to the expression - let extra_data = &self.poly_data[infer_expr.extra_data_idx as usize]; + let poly_data = &self.poly_data[infer_expr.poly_data_index as usize]; // Note that only call and literal expressions need full inference. // Select expressions also use `extra_data`, but only for temporary // storage of the struct type whose field it is selecting. - match &ctx.heap[extra_data.expr_id] { + match &ctx.heap[infer_expr.expr_id] { Expression::Call(expr) => { // Check if it is not a builtin function. If not, then // construct the first part of the concrete type. let first_concrete_part = if expr.method == Method::UserFunction { - ConcreteTypePart::Function(expr.definition, extra_data.poly_vars.len() as u32) + ConcreteTypePart::Function(expr.definition, poly_data.poly_vars.len() as u32) } else if expr.method == Method::UserComponent { - ConcreteTypePart::Component(expr.definition, extra_data.poly_vars.len() as u32) + ConcreteTypePart::Component(expr.definition, poly_data.poly_vars.len() as u32) } else { // Builtin function continue; @@ -2071,7 +2083,7 @@ impl PassTyping { let definition_id = expr.definition; let concrete_type = inference_type_to_concrete_type( - ctx, extra_data.expr_id, &extra_data.poly_vars, first_concrete_part + ctx, infer_expr.expr_id, &poly_data.poly_vars, first_concrete_part )?; match ctx.types.get_procedure_monomorph_type_id(&definition_id, &concrete_type.parts) { @@ -2098,9 +2110,9 @@ impl PassTyping { Literal::Struct(lit) => lit.definition, _ => unreachable!(), }; - let first_concrete_part = ConcreteTypePart::Instance(definition_id, extra_data.poly_vars.len() as u32); + let first_concrete_part = ConcreteTypePart::Instance(definition_id, poly_data.poly_vars.len() as u32); let concrete_type = inference_type_to_concrete_type( - ctx, extra_data.expr_id, &extra_data.poly_vars, first_concrete_part + ctx, infer_expr.expr_id, &poly_data.poly_vars, first_concrete_part )?; let type_id = ctx.types.add_monomorphed_type(ctx.modules, ctx.heap, ctx.arch, definition_id, concrete_type)?; infer_expr.type_id = type_id; @@ -2109,7 +2121,7 @@ impl PassTyping { debug_assert!(infer_expr.field_or_monomorph_index >= 0); }, _ => { - unreachable!("handling extra data for expression {:?}", &ctx.heap[extra_data.expr_id]); + unreachable!("handling extra data for expression {:?}", &ctx.heap[infer_expr.expr_id]); } } } @@ -2201,7 +2213,7 @@ impl PassTyping { fn progress_inference_rule_mono_template(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> { let node = &self.infer_nodes[node_index]; - let rule = node.inference_rule.as_mono_template(); + let rule = *node.inference_rule.as_mono_template(); let progress = self.progress_template(ctx, node_index, rule.application, rule.template)?; if progress { self.queue_node_parent(node_index); } @@ -2212,9 +2224,10 @@ impl PassTyping { fn progress_inference_rule_bi_equal(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> { let node = &self.infer_nodes[node_index]; let rule = node.inference_rule.as_bi_equal(); + let template = rule.template; let arg_index = rule.argument_index; - let base_progress = self.progress_template(ctx, node_index, rule.template.application, rule.template.template)?; + let base_progress = self.progress_template(ctx, node_index, template.application, template.template)?; let (node_progress, arg_progress) = self.apply_equal2_constraint(ctx, node_index, node_index, 0, arg_index, 0)?; if base_progress || node_progress { self.queue_node_parent(node_index); } @@ -2226,16 +2239,19 @@ impl PassTyping { fn progress_inference_rule_tri_equal_args(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> { let node = &self.infer_nodes[node_index]; let rule = node.inference_rule.as_tri_equal_args(); + + let result_template = rule.result_template; + let argument_template = rule.argument_template; let arg1_index = rule.argument1_index; let arg2_index = rule.argument2_index; - let self_template_progress = self.progress_template(ctx, node_index, rule.result_template.application, rule.result_template.template)?; - let arg1_template_progress = self.progress_template(ctx, arg1_index, rule.argument_template.application, rule.argument_template.template)?; + let self_template_progress = self.progress_template(ctx, node_index, result_template.application, result_template.template)?; + let arg1_template_progress = self.progress_template(ctx, arg1_index, argument_template.application, argument_template.template)?; let (arg1_progress, arg2_progress) = self.apply_equal2_constraint(ctx, node_index, arg1_index, 0, arg2_index, 0)?; if self_template_progress { self.queue_node_parent(node_index); } if arg1_template_progress || arg1_progress { self.queue_node(arg1_index); } - if arg2_template_progress || arg2_progress { self.queue_node(arg2_index); } + if arg2_progress { self.queue_node(arg2_index); } return Ok(()); } @@ -2243,10 +2259,12 @@ impl PassTyping { fn progress_inference_rule_tri_equal_all(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> { let node = &self.infer_nodes[node_index]; let rule = node.inference_rule.as_tri_equal_all(); + + let template = rule.template; let arg1_index = rule.argument1_index; let arg2_index = rule.argument2_index; - let template_progress = self.progress_template(ctx, node_index, rule.template.application, rule.template.template)?; + let template_progress = self.progress_template(ctx, node_index, template.application, template.template)?; let (node_progress, arg1_progress, arg2_progress) = self.apply_equal3_constraint(ctx, node_index, arg1_index, arg2_index, 0)?; @@ -2271,7 +2289,7 @@ impl PassTyping { let someone_is_str = expr_is_str || arg1_is_str || arg2_is_str; let someone_is_not_str = expr_is_not_str || arg1_is_not_str || arg2_is_not_str; - + println!("DEBUG: Running concat, is_str = {}, is_not_str = {}", someone_is_str, someone_is_not_str); // Note: this statement is an expression returning the progression bools let (node_progress, arg1_progress, arg2_progress) = if someone_is_str { // One of the arguments is a string, then all must be strings @@ -2342,6 +2360,7 @@ impl PassTyping { // Same as array indexing: result depends on whether subject is string // or array let (is_string, is_not_string) = self.type_is_certainly_or_certainly_not_string(node_index); + println!("DEBUG: Running slicing, is_str = {}, is_not_str = {}", is_string, is_not_string); let (node_progress, subject_progress) = if is_string { // Certainly a string ( @@ -2384,6 +2403,7 @@ impl PassTyping { let rule = node.inference_rule.as_select_struct_field(); let subject_index = rule.subject_index; + let selected_field = rule.selected_field.clone(); fn get_definition_id_from_inference_type(inference_type: &InferenceType) -> Result, ()> { for part in inference_type.parts.iter() { @@ -2413,9 +2433,9 @@ impl PassTyping { struct_definition } else { return Err(ParseError::new_error_at_span( - &ctx.module().source, rule.selected_field.span, format!( + &ctx.module().source, selected_field.span, format!( "Can only apply field access to structs, got a subject of type '{}'", - subject_type.display_name(&ctx.heap) + subject_node.expr_type.display_name(&ctx.heap) ) )); }; @@ -2424,7 +2444,7 @@ impl PassTyping { // expression let mut field_found = false; for (field_index, field) in struct_definition.fields.iter().enumerate() { - if field.identifier.value == rule.selected_field.value { + if field.identifier.value == selected_field.value { // Found the field of interest field_found = true; let node = &mut self.infer_nodes[node_index]; @@ -2436,9 +2456,9 @@ impl PassTyping { if !field_found { let struct_definition = ctx.heap[definition_id].as_struct(); return Err(ParseError::new_error_at_span( - &ctx.module().source, rule.selected_field.span, format!( + &ctx.module().source, selected_field.span, format!( "this field does not exist on the struct '{}'", - ast_struct_def.identifier.value.as_str() + struct_definition.identifier.value.as_str() ) )); } @@ -2458,7 +2478,7 @@ impl PassTyping { return Err(ParseError::new_error_at_span( &ctx.module().source, rule.selected_field.span, format!( "Can only apply field access to structs, got a subject of type '{}'", - subject_type.display_name(&ctx.heap) + subject_node.expr_type.display_name(&ctx.heap) ) )); }, @@ -2551,7 +2571,7 @@ impl PassTyping { } let (progress_member, progress_subject) = self.apply_equal2_constraint( - ctx, node_index, node_index, 0, subject_index, selected_member_start_idx + ctx, node_index, node_index, 0, subject_index, selected_member_start_index )?; if progress_member { self.queue_node_parent(node_index); } @@ -2562,13 +2582,15 @@ impl PassTyping { fn progress_inference_rule_literal_struct(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> { let node = &self.infer_nodes[node_index]; + let node_expr_id = node.expr_id; let rule = node.inference_rule.as_literal_struct(); // For each of the fields in the literal struct, apply the type equality // constraint. If the literal is polymorphic, then we try to progress // their types during this process + let element_indices_section = self.index_buffer.start_section_initialized(&rule.element_indices); let mut poly_progress_section = self.poly_progress_buffer.start_section(); - for (field_index, field_node_index) in rule.element_indices.iter().copied().enumerate() { + for (field_index, field_node_index) in element_indices_section.iter_copied().enumerate() { let field_expr_id = self.infer_nodes[field_node_index].expr_id; let (_, progress_field) = self.apply_polydata_equal2_constraint( ctx, node_index, field_expr_id, "struct field's", @@ -2582,14 +2604,14 @@ impl PassTyping { // Now we do the same thing for the struct literal expression (the type // of the struct itself). let (_, progress_literal_1) = self.apply_polydata_equal2_constraint( - ctx, node_index, node.expr_id, "struct literal's", + ctx, node_index, node_expr_id, "struct literal's", PolyDataTypeIndex::Returned, 0, node_index, 0, &mut poly_progress_section )?; // And the other way around: if any of our polymorphic variables are // more specific then they were before, then we forward that information // back to our struct/fields. - for (field_index, field_node_index) in rule.element_indices.iter().copied().enumerate() { + for (field_index, field_node_index) in element_indices_section.iter_copied().enumerate() { let progress_field = self.apply_polydata_polyvar_constraint( ctx, node_index, PolyDataTypeIndex::Associated(field_index), field_node_index, &poly_progress_section @@ -2606,20 +2628,21 @@ impl PassTyping { if progress_literal_1 || progress_literal_2 { self.queue_node_parent(node_index); } poly_progress_section.forget(); + element_indices_section.forget(); + self.finish_polydata_constraint(node_index); return Ok(()) } fn progress_inference_rule_literal_enum(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> { let node = &self.infer_nodes[node_index]; - let rule = node.inference_rule.as_literal_enum(); - + let node_expr_id = node.expr_id; let mut poly_progress_section = self.poly_progress_buffer.start_section(); // An enum literal type is simply, well, the enum's type. However, it // might still have polymorphic variables, hence the use of `PolyData`. let (_, progress_literal_1) = self.apply_polydata_equal2_constraint( - ctx, node_index, node.expr_id, "enum literal's", + ctx, node_index, node_expr_id, "enum literal's", PolyDataTypeIndex::Returned, 0, node_index, 0, &mut poly_progress_section )?; @@ -2636,13 +2659,15 @@ impl PassTyping { fn progress_inference_rule_literal_union(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> { let node = &self.infer_nodes[node_index]; + let node_expr_id = node.expr_id; let rule = node.inference_rule.as_literal_union(); // Infer type of any embedded values in the union variant. At the same // time progress the polymorphic variables associated with the union. + let element_indices_section = self.index_buffer.start_section_initialized(&rule.element_indices); let mut poly_progress_section = self.poly_progress_buffer.start_section(); - for (embedded_index, embedded_node_index) in rule.element_indices.iter().copied().enumerate() { + for (embedded_index, embedded_node_index) in element_indices_section.iter_copied().enumerate() { let embedded_node_expr_id = self.infer_nodes[embedded_node_index].expr_id; let (_, progress_embedded) = self.apply_polydata_equal2_constraint( ctx, node_index, embedded_node_expr_id, "embedded value's", @@ -2654,13 +2679,13 @@ impl PassTyping { } let (_, progress_literal_1) = self.apply_polydata_equal2_constraint( - ctx, node_index, node.expr_id, "union's", + ctx, node_index, node_expr_id, "union's", PolyDataTypeIndex::Returned, 0, node_index, 0, &mut poly_progress_section )?; // Propagate progress in the polymorphic variables to the expressions // that constitute the union literal. - for (embedded_index, embedded_node_index) in rule.element_indices.iter().copied().enumerate() { + for (embedded_index, embedded_node_index) in element_indices_section.iter_copied().enumerate() { let progress_embedded = self.apply_polydata_polyvar_constraint( ctx, node_index, PolyDataTypeIndex::Associated(embedded_index), embedded_node_index, &poly_progress_section @@ -2710,7 +2735,7 @@ impl PassTyping { // It is possible that the `Array` has a more progress `T` then // the arguments. So in the case we progress our argument type we // simply queue this rule again - if progress_argument { self.queue_expr(node_index); } + if progress_argument { self.queue_node(node_index); } } argument_node_indices.forget(); @@ -2752,10 +2777,11 @@ impl PassTyping { } // Prepare for next element + let node = &self.infer_nodes[node_index]; let subtree_end_index = InferenceType::find_subtree_end_idx(&node.expr_type.parts, element_subtree_start_index); element_subtree_start_index = subtree_end_index; } - debug_assert_eq!(element_subtree_end_index, node.expr_type.parts.len()); + debug_assert_eq!(element_subtree_start_index, self.infer_nodes[node_index].expr_type.parts.len()); if progress_literal { self.queue_node_parent(node_index); } @@ -2789,11 +2815,7 @@ impl PassTyping { debug_assert!(index != parts.len()); let part = &parts[index]; - if ( - *part == InferenceTypePart::Bool || - *part == InferenceTypePart::Character || - part.is_concrete_integer() - ) { + if *part == InferenceTypePart::Bool || *part == InferenceTypePart::Character || part.is_concrete_integer() { debug_assert!(index + 1 == parts.len()); // type is done, first part does not have children -> must be at end return true; } else { @@ -2816,7 +2838,7 @@ impl PassTyping { return Err(ParseError::new_error_str_at_span( &ctx.module().source, cast_expr.full_span(), "invalid casting operation" ).with_info_at_span( - &ctx.module.source, subject_expr.full_span(), format!( + &ctx.module().source, subject_expr.full_span(), format!( "cannot cast the argument type '{}' to the type '{}'", subject.expr_type.display_name(&ctx.heap), node.expr_type.display_name(&ctx.heap) @@ -2829,6 +2851,7 @@ impl PassTyping { fn progress_inference_rule_call_expr(&mut self, ctx: &Ctx, node_index: InferNodeIndex) -> Result<(), ParseError> { let node = &self.infer_nodes[node_index]; + let node_expr_id = node.expr_id; let rule = node.inference_rule.as_call_expr(); let mut poly_progress_section = self.poly_progress_buffer.start_section(); @@ -2848,9 +2871,8 @@ impl PassTyping { } // Same for the return type. - let call_expr_id = node.expr_id; let (_, progress_call_1) = self.apply_polydata_equal2_constraint( - ctx, node_index, call_expr_id, "return", + ctx, node_index, node_expr_id, "return", PolyDataTypeIndex::Returned, 0, node_index, 0, &mut poly_progress_section )?; @@ -2885,8 +2907,8 @@ impl PassTyping { let node = &mut self.infer_nodes[node_index]; let rule = node.inference_rule.as_variable_expr(); let var_data_index = rule.var_data_index; - let var_data = &mut self.var_data[var_data_index]; + let var_data = &mut self.var_data[var_data_index]; // Apply inference to the shared variable type and the expression type let shared_type: *mut _ = &mut var_data.var_type; let expr_type: *mut _ = &mut node.expr_type; @@ -2908,7 +2930,7 @@ impl PassTyping { // to all associated variable expressions (and relatived variables). for other_node_index in var_data.used_at.iter().copied() { if other_node_index != node_index { - self.queue_node(other_node_index); + self.node_queued.push_back(other_node_index); } } @@ -2938,7 +2960,7 @@ impl PassTyping { match inference_result { SingleInferenceResult::Modified => { for used_at in linked_var_data.used_at.iter().copied() { - self.queue_node(used_at); + self.node_queued.push_back(used_at); } }, SingleInferenceResult::Unmodified => {}, @@ -2995,14 +3017,36 @@ impl PassTyping { /// not a string (false, true), or still unknown (false, false). fn type_is_certainly_or_certainly_not_string(&self, node_index: InferNodeIndex) -> (bool, bool) { let expr_type = &self.infer_nodes[node_index].expr_type; - if expr_type.is_done { - if expr_type.parts[0] == InferenceTypePart::String { + println!("DEBUG: Running test on {:?}", expr_type.parts); + let mut part_index = 0; + while part_index < expr_type.parts.len() { + let part = &expr_type.parts[part_index]; + + if part.is_marker() { continue; } + if !part.is_concrete() { break; } + + if *part == InferenceTypePart::String { + // First part is a string return (true, false); } else { return (false, true); } } + // If here then first non-marker type is not concrete + if part_index == expr_type.parts.len() { + // nothing known at all + return (false, false); + } + + // Special case: array-like where its argument is not a character + if part_index + 1 < expr_type.parts.len() { + if expr_type.parts[part_index] == InferenceTypePart::ArrayLike && expr_type.parts[part_index + 1] != InferenceTypePart::Character { + return (false, true); + } + } + + (false, false) } @@ -3090,7 +3134,8 @@ impl PassTyping { poly_progress_section: &mut ScopedSection, ) -> Result<(bool, bool), ParseError> { let poly_data_index = self.infer_nodes[outer_node_index].poly_data_index; - let poly_data_type = self.poly_data[poly_data_index].get_type_mut(poly_data_type_index); + let poly_data = &mut self.poly_data[poly_data_index as usize]; + let poly_data_type = poly_data.expr_types.get_type_mut(poly_data_type_index); let associated_type: *mut _ = &mut self.infer_nodes[associated_node_index].expr_type; let inference_result = unsafe{ @@ -3131,7 +3176,7 @@ impl PassTyping { // (hopefully) more specific types to update their representation // in the PolyData struct for (poly_var_index, poly_var_section) in poly_data_type.marker_iter() { - let poly_var_type = &mut self.poly_data[poly_data_index].poly_vars[poly_var_index as usize]; + let poly_var_type = &mut poly_data.poly_vars[poly_var_index as usize]; match InferenceType::infer_subtree_for_single_type(poly_var_type, 0, poly_var_section, 0, false) { SingleInferenceResult::Modified => { poly_progress_section.push_unique(poly_var_index); @@ -3141,7 +3186,7 @@ impl PassTyping { }, SingleInferenceResult::Incompatible => { return Err(Self::construct_poly_arg_error( - ctx, &self.poly_data[poly_data_index], + ctx, &self.poly_data[poly_data_index as usize], self.infer_nodes[outer_node_index].expr_id )); } @@ -3178,7 +3223,7 @@ impl PassTyping { associated_node_index: InferNodeIndex, poly_progress_section: &ScopedSection ) -> bool { let poly_data_index = self.infer_nodes[outer_node_index].poly_data_index; - let poly_data = &mut self.poly_data[poly_data_index]; + let poly_data = &mut self.poly_data[poly_data_index as usize]; // Early exit, most common case (literals or functions calls which are // actually not polymorphic) @@ -3187,7 +3232,7 @@ impl PassTyping { } // safety: we're borrowing from two distinct fields, so should be fine - let poly_data_type = poly_data.get_type_mut(poly_data_type_index); + let poly_data_type = poly_data.expr_types.get_type_mut(poly_data_type_index); let mut last_start_index = 0; let mut modified_poly_type = false; @@ -3236,7 +3281,7 @@ impl PassTyping { /// constraints. fn finish_polydata_constraint(&mut self, outer_node_index: InferNodeIndex) { let poly_data_index = self.infer_nodes[outer_node_index].poly_data_index; - let poly_data = &mut self.poly_data[poly_data_index]; + let poly_data = &mut self.poly_data[poly_data_index as usize]; poly_data.first_rule_application = false; } @@ -3419,7 +3464,7 @@ impl PassTyping { inference_rule: InferenceRule::Noop, parent_index: self.parent_index, field_or_monomorph_index: -1, - poly_data_index: PolyDataIndex::MAX, + poly_data_index: -1, type_id: TypeId::new_invalid(), }); @@ -3484,8 +3529,10 @@ impl PassTyping { first_rule_application: true, definition_id: call.definition, poly_vars: poly_args, - associated: parameter_types, - returned: return_type + expr_types: PolyDataTypes { + associated: parameter_types, + returned: return_type + } }); return extra_data_idx } @@ -3546,8 +3593,10 @@ impl PassTyping { first_rule_application: true, definition_id: literal.definition, poly_vars: poly_args, - associated: embedded_types, - returned: return_type, + expr_types: PolyDataTypes { + associated: embedded_types, + returned: return_type, + }, }); return extra_data_index @@ -3593,8 +3642,10 @@ impl PassTyping { first_rule_application: true, definition_id: literal.definition, poly_vars: poly_args, - associated: Vec::new(), - returned: enum_type, + expr_types: PolyDataTypes { + associated: Vec::new(), + returned: enum_type, + }, }); return extra_data_index; @@ -3649,13 +3700,15 @@ impl PassTyping { debug_assert_eq!(parts_reserved, parts.len()); let union_type = InferenceType::new(!poly_args.is_empty(), union_type_done, parts); - let extra_data_index = self.poly_data.len(); + let extra_data_index = self.poly_data.len() as isize; self.poly_data.push(PolyData { first_rule_application: true, definition_id: literal.definition, poly_vars: poly_args, - associated: embedded, - returned: union_type + expr_types: PolyDataTypes { + associated: embedded, + returned: union_type, + }, }); return extra_data_index; @@ -3697,8 +3750,10 @@ impl PassTyping { first_rule_application: true, definition_id: struct_def_id, poly_vars, - associated: vec![InferenceType::new(num_poly_vars != 0, num_poly_vars == 0, struct_parts)], - returned: field_type + expr_types: PolyDataTypes { + associated: vec![InferenceType::new(num_poly_vars != 0, num_poly_vars == 0, struct_parts)], + returned: field_type, + }, }); return extra_data_index; @@ -4060,7 +4115,7 @@ impl PassTyping { // - check return type with itself if let Some((poly_idx, section_a, section_b)) = has_poly_mismatch( - &poly_data.returned, &poly_data.returned + &poly_data.expr_types.returned, &poly_data.expr_types.returned ) { return construct_main_error(ctx, poly_data, poly_idx, expr) .with_info_at_span( @@ -4074,8 +4129,8 @@ impl PassTyping { } // - check arguments with each other argument and with return type - for (arg_a_idx, arg_a) in poly_data.associated.iter().enumerate() { - for (arg_b_idx, arg_b) in poly_data.associated.iter().enumerate() { + for (arg_a_idx, arg_a) in poly_data.expr_types.associated.iter().enumerate() { + for (arg_b_idx, arg_b) in poly_data.expr_types.associated.iter().enumerate() { if arg_b_idx > arg_a_idx { break; } @@ -4111,7 +4166,7 @@ impl PassTyping { } // Check with return type - if let Some((poly_idx, section_arg, section_ret)) = has_poly_mismatch(arg_a, &poly_data.returned) { + if let Some((poly_idx, section_arg, section_ret)) = has_poly_mismatch(arg_a, &poly_data.expr_types.returned) { let arg = &ctx.heap[expr_args[arg_a_idx]]; return construct_main_error(ctx, poly_data, poly_idx, expr) .with_info_at_span( @@ -4132,7 +4187,7 @@ impl PassTyping { // Now check against the explicitly specified polymorphic variables (if // any). - for (arg_idx, arg) in poly_data.associated.iter().enumerate() { + for (arg_idx, arg) in poly_data.expr_types.associated.iter().enumerate() { if let Some((poly_idx, poly_section, arg_section)) = has_explicit_poly_mismatch(&poly_data.poly_vars, arg) { let arg = &ctx.heap[expr_args[arg_idx]]; return construct_main_error(ctx, poly_data, poly_idx, expr) @@ -4146,7 +4201,7 @@ impl PassTyping { } } - if let Some((poly_idx, poly_section, ret_section)) = has_explicit_poly_mismatch(&poly_data.poly_vars, &poly_data.returned) { + if let Some((poly_idx, poly_section, ret_section)) = has_explicit_poly_mismatch(&poly_data.poly_vars, &poly_data.expr_types.returned) { return construct_main_error(ctx, poly_data, poly_idx, expr) .with_info_at_span( &ctx.module().source, expr.full_span(), format!( diff --git a/src/protocol/tests/parser_validation.rs b/src/protocol/tests/parser_validation.rs index 3b8dfcb0602f276740252c11130e9a4e659f525a..a3fc6900e3388a8fff3c4f2dc2fa6d2c91b93097 100644 --- a/src/protocol/tests/parser_validation.rs +++ b/src/protocol/tests/parser_validation.rs @@ -347,7 +347,7 @@ fn test_incorrect_union_instance() { " ).error(|e| { e .assert_occurs_at(0, "Foo::A") - .assert_msg_has(0, "failed to fully resolve") + .assert_msg_has(0, "failed to resolve") .assert_occurs_at(1, "false") .assert_msg_has(1, "has been resolved to 's32'") .assert_msg_has(1, "has been resolved to 'bool'");