Changeset - e2849e9bfb16
[Not reviewed]
0 9 0
mh - 4 years ago 2021-05-05 12:00:13
contact@maxhenger.nl
moving to desktop
9 files changed with 136 insertions and 122 deletions:
0 comments (0 inline, 0 general)
src/collections/scoped_buffer.rs
Show inline comments
 
@@ -113,11 +113,11 @@ impl<T: Sized> std::ops::Index<usize> for ScopedSection<T> {
 
    }
 
}
 

	
 
#[cfg(debug_assertions)]
 
impl<T: Sized> Drop for ScopedSection<T> {
 
    fn drop(&mut self) {
 
        let mut vec = unsafe{&mut *self.inner};
 
        let vec = unsafe{&mut *self.inner};
 
        debug_assert_eq!(vec.len(), self.cur_size as usize);
 
        vec.truncate(self.start_size as usize);
 
    }
 
}
 
\ No newline at end of file
src/protocol/ast_printer.rs
Show inline comments
 
#![allow(dead_code)]
 

	
 
use std::fmt::{Debug, Display, Write};
 
use std::fmt::{Debug, Display};
 
use std::io::Write as IOWrite;
 

	
 
use super::ast::*;
 
use super::token_parsing::*;
 

	
 
const INDENT: usize = 2;
src/protocol/parser/pass_definitions.rs
Show inline comments
 
@@ -15,13 +15,13 @@ pub(crate) struct PassDefinitions {
 
    struct_fields: ScopedBuffer<StructFieldDefinition>,
 
    enum_variants: ScopedBuffer<EnumVariantDefinition>,
 
    union_variants: ScopedBuffer<UnionVariantDefinition>,
 
    parameters: ScopedBuffer<ParameterId>,
 
    expressions: ScopedBuffer<ExpressionId>,
 
    statements: ScopedBuffer<StatementId>,
 
    parser_types: Vec<ParserType>,
 
    parser_types: ScopedBuffer<ParserType>,
 
}
 

	
 
impl PassDefinitions {
 
    pub(crate) fn new() -> Self {
 
        Self{
 
            cur_definition: DefinitionId::new_invalid(),
 
@@ -29,13 +29,13 @@ impl PassDefinitions {
 
            struct_fields: ScopedBuffer::new_reserved(128),
 
            enum_variants: ScopedBuffer::new_reserved(128),
 
            union_variants: ScopedBuffer::new_reserved(128),
 
            parameters: ScopedBuffer::new_reserved(128),
 
            expressions: ScopedBuffer::new_reserved(128),
 
            statements: ScopedBuffer::new_reserved(128),
 
            parser_types: Vec::with_capacity(128),
 
            parser_types: ScopedBuffer::new_reserved(128),
 
        }
 
    }
 

	
 
    pub(crate) fn parse(&mut self, modules: &mut [Module], module_idx: usize, ctx: &mut PassCtx) -> Result<(), ParseError> {
 
        let module = &modules[module_idx];
 
        let module_range = &module.tokens.ranges[0];
 
@@ -201,29 +201,31 @@ impl PassDefinitions {
 
        consume_comma_separated(
 
            TokenKind::OpenCurly, TokenKind::CloseCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let identifier = consume_ident_interned(source, iter, ctx)?;
 
                let mut close_pos = identifier.span.end;
 

	
 
                let mut types_section = self.parser_types.start_section();
 

	
 
                let has_embedded = maybe_consume_comma_separated(
 
                    TokenKind::OpenParen, TokenKind::CloseParen, source, iter, ctx,
 
                    |source, iter, ctx| {
 
                        let poly_vars = ctx.heap[definition_id].poly_vars(); // TODO: @Cleanup, this is really ugly. But rust...
 
                        consume_parser_type(
 
                            source, iter, &ctx.symbols, &ctx.heap, poly_vars,
 
                            module_scope, definition_id, false, 0
 
                        )
 
                    },
 
                    &mut self.parser_types, "an embedded type", Some(&mut close_pos)
 
                    &mut types_section, "an embedded type", Some(&mut close_pos)
 
                )?;
 
                let value = if has_embedded {
 
                    UnionVariantValue::Embedded(self.parser_types.clone())
 
                    UnionVariantValue::Embedded(types_section.into_vec())
 
                } else {
 
                    types_section.forget();
 
                    UnionVariantValue::None
 
                };
 
                self.parser_types.clear();
 

	
 
                Ok(UnionVariantDefinition{
 
                    span: InputSpan::from_positions(identifier.span.begin, close_pos),
 
                    identifier,
 
                    value
 
                })
 
@@ -258,22 +260,23 @@ impl PassDefinitions {
 
            &module.source, iter, ctx, &mut parameter_section, module_scope, definition_id
 
        )?;
 
        let parameters = parameter_section.into_vec();
 

	
 
        // Consume return types
 
        consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
        let mut open_curly_pos = iter.last_valid_pos();
 
        let mut return_types = self.parser_types.start_section();
 
        let mut open_curly_pos = iter.last_valid_pos(); // bogus value
 
        consume_comma_separated_until(
 
            TokenKind::OpenCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let poly_vars = ctx.heap[definition_id].poly_vars(); // TODO: @Cleanup, this is really ugly. But rust...
 
                consume_parser_type(source, iter, &ctx.symbols, &ctx.heap, poly_vars, module_scope, definition_id, false, 0)
 
            },
 
            &mut self.parser_types, "a return type", Some(&mut open_curly_pos)
 
            &mut return_types, "a return type", Some(&mut open_curly_pos)
 
        )?;
 
        let return_types = self.parser_types.clone();
 
        let return_types = return_types.into_vec();
 

	
 
        // TODO: @ReturnValues
 
        match return_types.len() {
 
            0 => return Err(ParseError::new_error_str_at_pos(&module.source, open_curly_pos, "expected a return type")),
 
            1 => {},
 
            _ => return Err(ParseError::new_error_str_at_pos(&module.source, open_curly_pos, "multiple return types are not (yet) allowed")),
src/protocol/parser/pass_imports.rs
Show inline comments
 
@@ -96,19 +96,22 @@ impl PassImport {
 
                this,
 
                span: import_span,
 
                module: module_identifier,
 
                alias: alias_identifier,
 
                module_id: target_root_id
 
            }));
 
            ctx.symbols.insert_symbol(SymbolScope::Module(module.root_id), Symbol{
 

	
 
            if let Err((new_symbol, old_symbol)) = ctx.symbols.insert_symbol(SymbolScope::Module(module.root_id), Symbol{
 
                name: alias_name,
 
                variant: SymbolVariant::Module(SymbolModule{
 
                    root_id: target_root_id,
 
                    introduced_at: import_id,
 
                }),
 
            });
 
            }) {
 
                return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, &old_symbol));
 
            }
 
        } else if Some(TokenKind::ColonColon) == next {
 
            iter.consume();
 

	
 
            // Helper function to consume symbols, their alias, and the
 
            // definition the symbol is pointing to.
 
            fn consume_symbol_and_maybe_alias<'a>(
src/protocol/parser/pass_symbols.rs
Show inline comments
 
@@ -88,12 +88,13 @@ impl PassSymbols {
 
        }
 

	
 
        // Add the module's symbol scope and the symbols we just parsed
 
        let module_scope = SymbolScope::Module(root_id);
 
        ctx.symbols.insert_scope(None, module_scope);
 
        for symbol in self.symbols.drain(..) {
 
            ctx.symbols.insert_scope(Some(module_scope), SymbolScope::Definition(symbol.variant.as_definition().definition_id));
 
            if let Err((new_symbol, old_symbol)) = ctx.symbols.insert_symbol(module_scope, symbol) {
 
                return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, &old_symbol))
 
            }
 
        }
 

	
 
        // Modify the preallocated root
src/protocol/parser/pass_tokenizer.rs
Show inline comments
 
@@ -148,15 +148,16 @@ impl PassTokenizer {
 
            self.pop_range(target, target.tokens.len() as u32);
 
        }
 

	
 
        // And finally, we may have a token range at the end that doesn't belong
 
        // to a range yet, so insert a "code" range if this is the case.
 
        debug_assert_eq!(self.stack_idx, 0);
 
        let last_registered_idx = target.ranges[0].end;
 
        let last_token_idx = target.tokens.len() as u32;
 
        if target.ranges[0].end != last_token_idx {
 

	
 
        if last_registered_idx != last_token_idx {
 
            self.add_code_range(target, 0, last_registered_idx, last_token_idx);
 
        }
 

	
 
        // TODO: @remove once I'm sure the algorithm works. For now it is better
 
        //  if the debugging is a little more expedient
 
        if cfg!(debug_assertions) {
 
            // For each range make sure its children make sense
 
@@ -638,59 +639,63 @@ impl PassTokenizer {
 
            source.consume();
 
        }
 

	
 
        has_newline
 
    }
 

	
 
    fn add_code_range(
 
        &mut self, target: &mut TokenBuffer, parent_idx: i32,
 
        code_start_idx: u32, code_end_idx: u32
 
    ) {
 
        let new_range_idx = target.ranges.len() as i32;
 
        let parent_range = &mut target.ranges[parent_idx as usize];
 
        debug_assert_ne!(parent_range.end, code_start_idx, "called push_code_range without a need to do so");
 

	
 
        let sibling_idx = parent_range.last_child_idx;
 

	
 
        parent_range.last_child_idx = new_range_idx;
 
        parent_range.end = code_end_idx;
 
        parent_range.num_child_ranges += 1;
 

	
 
        let curly_depth = self.curly_stack.len() as u32;
 
        target.ranges.push(TokenRange{
 
            parent_idx,
 
            range_kind: TokenRangeKind::Code,
 
            curly_depth,
 
            start: code_start_idx,
 
            end: code_end_idx,
 
            num_child_ranges: 0,
 
            first_child_idx: NO_RELATION,
 
            last_child_idx: NO_RELATION,
 
            next_sibling_idx: new_range_idx + 1, // we're going to push this range below
 
        });
 

	
 
        // Fix up the sibling indices
 
        if sibling_idx != NO_RELATION {
 
            let sibling_range = &mut target.ranges[sibling_idx as usize];
 
            sibling_range.next_sibling_idx = new_range_idx;
 
        }
 
    }
 

	
 
    fn push_range(&mut self, target: &mut TokenBuffer, range_kind: TokenRangeKind, first_token_idx: u32) {
 
        let new_range_idx = target.ranges.len() as i32;
 
        let parent_idx = self.stack_idx as i32;
 
        let parent_range = &mut target.ranges[self.stack_idx];
 
        let curly_depth = self.curly_stack.len() as u32;
 

	
 
        if parent_range.first_child_idx == NO_RELATION {
 
            parent_range.first_child_idx = new_range_idx;
 
        }
 

	
 
        if parent_range.end != first_token_idx {
 
            // We popped a range, processed some intermediate tokens and now
 
            // enter a new range. Those intermediate tokens do not belong to a
 
            // particular range yet. So we put them in a "code" range.
 

	
 
            // Remember last sibling from parent (if any)
 
            let sibling_idx = parent_range.last_child_idx;
 

	
 
            // Push the code range
 
            let code_start_idx = parent_range.end;
 
            let code_end_idx = first_token_idx;
 

	
 
            parent_range.last_child_idx = new_range_idx;
 
            parent_range.end = code_end_idx;
 
            parent_range.num_child_ranges += 1;
 

	
 
            target.ranges.push(TokenRange{
 
                parent_idx,
 
                range_kind: TokenRangeKind::Code,
 
                curly_depth,
 
                start: code_start_idx,
 
                end: code_end_idx,
 
                num_child_ranges: 0,
 
                first_child_idx: NO_RELATION,
 
                last_child_idx: NO_RELATION,
 
                next_sibling_idx: new_range_idx + 1, // we're going to push this range below
 
            });
 

	
 
            // Fix up the sibling indices
 
            if sibling_idx != NO_RELATION {
 
                let sibling_range = &mut target.ranges[sibling_idx as usize];
 
                sibling_range.next_sibling_idx = new_range_idx;
 
            }
 
        let last_registered_idx = parent_range.end;
 
        if last_registered_idx != first_token_idx {
 
            self.add_code_range(target, parent_idx, last_registered_idx, first_token_idx);
 
        }
 

	
 
        // Push the new range
 
        self.stack_idx = target.ranges.len();
 
        let curly_depth = self.curly_stack.len() as u32;
 
        target.ranges.push(TokenRange{
 
            parent_idx,
 
            range_kind,
 
            curly_depth,
 
            start: first_token_idx,
 
            end: first_token_idx, // modified when popped
src/protocol/parser/pass_typing.rs
Show inline comments
 
@@ -1929,20 +1929,20 @@ impl PassTyping {
 
                }
 

	
 
                // Reapply progress in polymorphic variables to struct's type
 
                let signature_type: *mut _ = &mut poly_data.embedded[0];
 
                let subject_type: *mut _ = self.expr_types.get_mut(&subject_id).unwrap();
 
                
 
                let progress_subject = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                let progress_subject = Self::apply_equal2_polyvar_constraint(
 
                    poly_data, &poly_progress, signature_type, subject_type
 
                );
 

	
 
                let signature_type: *mut _ = &mut poly_data.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(
 
                    poly_data, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                (progress_subject, progress_expr)
 
            }
 
        };
 
@@ -1982,14 +1982,14 @@ impl PassTyping {
 
            Literal::String(_) => {
 
                self.apply_forced_constraint(ctx, upcast_id, &STRING_TEMPLATE)?;
 
                todo!("check string literal type inference");
 
            },
 
            Literal::Struct(data) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                for _poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", _poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
                debug_assert_eq!(extra.embedded.len(), data.fields.len());
 

	
 
                debug_log!(" * During (inferring types from fields and struct type):");
 

	
 
@@ -2047,13 +2047,13 @@ impl PassTyping {
 
                for field_idx in 0..extra.embedded.len() {
 
                    debug_assert_eq!(field_idx, data.fields[field_idx].field_idx, "confusing, innit?");
 
                    let signature_type: *mut _ = &mut extra.embedded[field_idx];
 
                    let field_expr_id = data.fields[field_idx].value;
 
                    let field_type: *mut _ = self.expr_types.get_mut(&field_expr_id).unwrap();
 

	
 
                    let progress_arg = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    let progress_arg = Self::apply_equal2_polyvar_constraint(
 
                        extra, &poly_progress, signature_type, field_type
 
                    );
 

	
 
                    debug_log!(
 
                        "   - Field {} type | sig: {}, field: {}", field_idx,
 
                        unsafe{&*signature_type}.display_name(&ctx.heap),
 
@@ -2066,21 +2066,21 @@ impl PassTyping {
 
                
 
                // For the return type
 
                let signature_type: *mut _ = &mut extra.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(
 
                    &ctx.heap, extra, &poly_progress, signature_type, expr_type
 
                    extra, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                progress_expr
 
            },
 
            Literal::Enum(_) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                for _poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", _poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
                
 
                debug_log!(" * During (inferring types from return type)");
 

	
 
                let signature_type: *mut _ = &mut extra.returned;
 
@@ -2102,21 +2102,21 @@ impl PassTyping {
 
                        self.expr_queued.insert(parent_id);
 
                    }
 
                }
 

	
 
                debug_log!(" * During (reinferring from progress polyvars):");
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(
 
                    &ctx.heap, extra, &poly_progress, signature_type, expr_type
 
                    extra, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                progress_expr
 
            },
 
            Literal::Union(data) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                for _poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", _poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
                debug_assert_eq!(extra.embedded.len(), data.values.len());
 

	
 
                debug_log!(" * During (inferring types from variant values and union type):");
 

	
 
@@ -2171,13 +2171,13 @@ impl PassTyping {
 
                for value_idx in 0..extra.embedded.len() {
 
                    let signature_type: *mut _ = &mut extra.embedded[value_idx];
 
                    let value_expr_id = data.values[value_idx];
 
                    let value_type: *mut _ = self.expr_types.get_mut(&value_expr_id).unwrap();
 
                    
 
                    let progress_arg = Self::apply_equal2_polyvar_constraint(
 
                        &ctx.heap, extra, &poly_progress, signature_type, value_type
 
                        extra, &poly_progress, signature_type, value_type
 
                    );
 

	
 
                    debug_log!(
 
                        "   - Value {} type | sig: {}, value: {}", value_idx,
 
                        unsafe{&*signature_type}.display_name(&ctx.heap),
 
                        unsafe{&*value_type}.display_name(&ctx.heap)
 
@@ -2189,13 +2189,13 @@ impl PassTyping {
 

	
 
                // And for the union type itself
 
                let signature_type: *mut _ = &mut extra.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(
 
                    &ctx.heap, extra, &poly_progress, signature_type, expr_type
 
                    extra, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                progress_expr
 
            },
 
            Literal::Array(data) => {
 
                let expr_elements = data.clone(); // TODO: @performance
 
@@ -2302,24 +2302,24 @@ impl PassTyping {
 
        }
 

	
 
        // If we did not have an error in the polymorph inference above, then
 
        // reapplying the polymorph type to each argument type and the return
 
        // type should always succeed.
 
        debug_log!(" * During (reinferring from progressed polyvars):");
 
        for (poly_idx, poly_var) in extra.poly_vars.iter().enumerate() {
 
            debug_log!("   - Poly {} | sig: {}", poly_idx, poly_var.display_name(&ctx.heap));
 
        for (_poly_idx, _poly_var) in extra.poly_vars.iter().enumerate() {
 
            debug_log!("   - Poly {} | sig: {}", _poly_idx, _poly_var.display_name(&ctx.heap));
 
        }
 
        // TODO: @performance If the algorithm is changed to be more "on demand
 
        //  argument re-evaluation", instead of "all-argument re-evaluation",
 
        //  then this is no longer true
 
        for arg_idx in 0..extra.embedded.len() {
 
            let signature_type: *mut _ = &mut extra.embedded[arg_idx];
 
            let arg_expr_id = expr.arguments[arg_idx];
 
            let arg_type: *mut _ = self.expr_types.get_mut(&arg_expr_id).unwrap();
 
            
 
            let progress_arg = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
            let progress_arg = Self::apply_equal2_polyvar_constraint(
 
                extra, &poly_progress,
 
                signature_type, arg_type
 
            );
 
            
 
            debug_log!(
 
                "   - Arg {} type | sig: {}, arg: {}", arg_idx, 
 
@@ -2332,13 +2332,13 @@ impl PassTyping {
 
        }
 

	
 
        // Once more for the return type
 
        let signature_type: *mut _ = &mut extra.returned;
 
        let ret_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
        let progress_ret = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
        let progress_ret = Self::apply_equal2_polyvar_constraint(
 
            extra, &poly_progress, signature_type, ret_type
 
        );
 
        debug_log!(
 
            "   - Ret type | sig: {}, arg: {}", 
 
            unsafe{&*signature_type}.display_name(&ctx.heap), 
 
            unsafe{&*ret_type}.display_name(&ctx.heap)
 
@@ -2597,13 +2597,12 @@ impl PassTyping {
 
    /// progressed as far as possible by calling 
 
    /// `apply_equal2_signature_constraint`. As such, we expect to not encounter
 
    /// any errors.
 
    ///
 
    /// This function returns true if the expression's type has been progressed
 
    fn apply_equal2_polyvar_constraint(
 
        heap: &Heap,
 
        polymorph_data: &ExtraData, _polymorph_progress: &HashSet<usize>,
 
        signature_type: *mut InferenceType, expr_type: *mut InferenceType
 
    ) -> bool {
 
        // Safety: all pointers should be distinct
 
        //         polymorph_data contains may not be modified
 
        debug_assert_ptrs_distinct!(signature_type, expr_type);
src/protocol/parser/symbol_table.rs
Show inline comments
 
@@ -132,13 +132,13 @@ impl SymbolVariant {
 
            SymbolVariant::Definition(v) => v,
 
        }
 
    }
 
}
 

	
 
/// TODO: @Cleanup - remove clone everywhere
 
#[derive(Clone)]
 
#[derive(Debug, Clone)]
 
pub struct Symbol {
 
    pub name: StringRef<'static>,
 
    pub variant: SymbolVariant,
 
}
 

	
 
impl Symbol {
 
@@ -188,12 +188,14 @@ impl SymbolTable {
 
        debug_assert!(
 
            parent_scope.is_none() || self.scope_lookup.contains_key(parent_scope.as_ref().unwrap()),
 
            "inserting scope {:?} but parent {:?} does not exist", new_scope, parent_scope
 
        );
 
        debug_assert!(!self.scope_lookup.contains_key(&new_scope), "inserting scope {:?}, but it already exists", new_scope);
 

	
 
        println!("DEBUG: Inserting scope {:?} with parent {:?}", new_scope, parent_scope);
 

	
 
        if let Some(parent_scope) = parent_scope {
 
            let parent = self.scope_lookup.get_mut(&parent_scope).unwrap();
 
            parent.child_scopes.push(new_scope);
 
        }
 

	
 
        let scope = ScopedSymbols {
 
@@ -209,12 +211,13 @@ impl SymbolTable {
 
    /// exist in the scope or any of its parents. If it does collide then the
 
    /// symbol will be returned, together with the symbol that has the same
 
    /// name.
 
    // Note: we do not return a reference because Rust doesn't like it.
 
    pub(crate) fn insert_symbol(&mut self, in_scope: SymbolScope, symbol: Symbol) -> Result<(), (Symbol, Symbol)> {
 
        debug_assert!(self.scope_lookup.contains_key(&in_scope), "inserting symbol {}, but scope {:?} does not exist", symbol.name.as_str(), in_scope);
 
        println!("DEBUG: Inserting symbol {:?} in scope {:?}", symbol, in_scope);
 
        let mut seek_scope = in_scope;
 
        loop {
 
            let scoped_symbols = self.scope_lookup.get(&seek_scope).unwrap();
 
            for existing_symbol in scoped_symbols.symbols.iter() {
 
                if symbol.name == existing_symbol.name {
 
                    return Err((symbol, existing_symbol.clone()))
src/protocol/tests/parser_inference.rs
Show inline comments
 
@@ -156,67 +156,67 @@ fn test_struct_inference() {
 
        .for_variable("pair", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Pair<s8,s32>");
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "by field access",
 
        "
 
        struct Pair<T1, T2>{ T1 first, T2 second }
 
        func construct<T1, T2>(T1 first, T2 second) -> Pair<T1, T2> {
 
            return Pair{ first: first, second: second };
 
        }
 
        test() -> s32 {
 
            auto first = 0;
 
            auto second = 1;
 
            auto pair = construct(first, second);
 
            s8 assign_first = 0;
 
            s64 assign_second = 1;
 
            pair.first = assign_first;
 
            pair.second = assign_second;
 
            return 0;
 
        }
 
        "
 
    ).for_function("test", |f| { f
 
        .for_variable("first", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("s8");
 
        })
 
        .for_variable("second", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("s64");
 
        })
 
        .for_variable("pair", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Pair<s8,s64>");
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "by nested field access",
 
        "
 
        struct Node<T1, T2>{ T1 l, T2 r }
 
        func construct<T1, T2>(T1 l, T2 r) -> Node<T1, T2> {
 
            return Node{ l: l, r: r };
 
        }
 
        func fix_poly<T>(Node<T, T> a) -> s32 { return 0; }
 
        func test() -> s32 {
 
            s8 assigned = 0;
 
            auto thing = construct(assigned, construct(0, 1));
 
            fix_poly(thing.r);
 
            thing.r.r = assigned;
 
            return 0;
 
        }
 
        ",
 
    ).for_function("test", |f| { f
 
        .for_variable("thing", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Node<s8,Node<s8,s8>>");
 
        });
 
    });
 
    // Tester::new_single_source_expect_ok(
 
    //     "by field access",
 
    //     "
 
    //     struct Pair<T1, T2>{ T1 first, T2 second }
 
    //     func construct<T1, T2>(T1 first, T2 second) -> Pair<T1, T2> {
 
    //         return Pair{ first: first, second: second };
 
    //     }
 
    //     test() -> s32 {
 
    //         auto first = 0;
 
    //         auto second = 1;
 
    //         auto pair = construct(first, second);
 
    //         s8 assign_first = 0;
 
    //         s64 assign_second = 1;
 
    //         pair.first = assign_first;
 
    //         pair.second = assign_second;
 
    //         return 0;
 
    //     }
 
    //     "
 
    // ).for_function("test", |f| { f
 
    //     .for_variable("first", |v| { v
 
    //         .assert_parser_type("auto")
 
    //         .assert_concrete_type("s8");
 
    //     })
 
    //     .for_variable("second", |v| { v
 
    //         .assert_parser_type("auto")
 
    //         .assert_concrete_type("s64");
 
    //     })
 
    //     .for_variable("pair", |v| { v
 
    //         .assert_parser_type("auto")
 
    //         .assert_concrete_type("Pair<s8,s64>");
 
    //     });
 
    // });
 
    //
 
    // Tester::new_single_source_expect_ok(
 
    //     "by nested field access",
 
    //     "
 
    //     struct Node<T1, T2>{ T1 l, T2 r }
 
    //     func construct<T1, T2>(T1 l, T2 r) -> Node<T1, T2> {
 
    //         return Node{ l: l, r: r };
 
    //     }
 
    //     func fix_poly<T>(Node<T, T> a) -> s32 { return 0; }
 
    //     func test() -> s32 {
 
    //         s8 assigned = 0;
 
    //         auto thing = construct(assigned, construct(0, 1));
 
    //         fix_poly(thing.r);
 
    //         thing.r.r = assigned;
 
    //         return 0;
 
    //     }
 
    //     ",
 
    // ).for_function("test", |f| { f
 
    //     .for_variable("thing", |v| { v
 
    //         .assert_parser_type("auto")
 
    //         .assert_concrete_type("Node<s8,Node<s8,s8>>");
 
    //     });
 
    // });
 
}
 

	
 
#[test]
 
fn test_enum_inference() {
 
    Tester::new_single_source_expect_ok(
 
        "no polymorphic vars",
0 comments (0 inline, 0 general)