Changeset - e2849e9bfb16
[Not reviewed]
0 9 0
mh - 4 years ago 2021-05-05 12:00:13
contact@maxhenger.nl
moving to desktop
9 files changed with 120 insertions and 106 deletions:
0 comments (0 inline, 0 general)
src/collections/scoped_buffer.rs
Show inline comments
 
@@ -116,7 +116,7 @@ impl<T: Sized> std::ops::Index<usize> for ScopedSection<T> {
 
#[cfg(debug_assertions)]
 
impl<T: Sized> Drop for ScopedSection<T> {
 
    fn drop(&mut self) {
 
        let mut vec = unsafe{&mut *self.inner};
 
        let vec = unsafe{&mut *self.inner};
 
        debug_assert_eq!(vec.len(), self.cur_size as usize);
 
        vec.truncate(self.start_size as usize);
 
    }
src/protocol/ast_printer.rs
Show inline comments
 
#![allow(dead_code)]
 

	
 
use std::fmt::{Debug, Display, Write};
 
use std::fmt::{Debug, Display};
 
use std::io::Write as IOWrite;
 

	
 
use super::ast::*;
src/protocol/parser/pass_definitions.rs
Show inline comments
 
@@ -18,7 +18,7 @@ pub(crate) struct PassDefinitions {
 
    parameters: ScopedBuffer<ParameterId>,
 
    expressions: ScopedBuffer<ExpressionId>,
 
    statements: ScopedBuffer<StatementId>,
 
    parser_types: Vec<ParserType>,
 
    parser_types: ScopedBuffer<ParserType>,
 
}
 

	
 
impl PassDefinitions {
 
@@ -32,7 +32,7 @@ impl PassDefinitions {
 
            parameters: ScopedBuffer::new_reserved(128),
 
            expressions: ScopedBuffer::new_reserved(128),
 
            statements: ScopedBuffer::new_reserved(128),
 
            parser_types: Vec::with_capacity(128),
 
            parser_types: ScopedBuffer::new_reserved(128),
 
        }
 
    }
 

	
 
@@ -204,6 +204,8 @@ impl PassDefinitions {
 
                let identifier = consume_ident_interned(source, iter, ctx)?;
 
                let mut close_pos = identifier.span.end;
 

	
 
                let mut types_section = self.parser_types.start_section();
 

	
 
                let has_embedded = maybe_consume_comma_separated(
 
                    TokenKind::OpenParen, TokenKind::CloseParen, source, iter, ctx,
 
                    |source, iter, ctx| {
 
@@ -213,14 +215,14 @@ impl PassDefinitions {
 
                            module_scope, definition_id, false, 0
 
                        )
 
                    },
 
                    &mut self.parser_types, "an embedded type", Some(&mut close_pos)
 
                    &mut types_section, "an embedded type", Some(&mut close_pos)
 
                )?;
 
                let value = if has_embedded {
 
                    UnionVariantValue::Embedded(self.parser_types.clone())
 
                    UnionVariantValue::Embedded(types_section.into_vec())
 
                } else {
 
                    types_section.forget();
 
                    UnionVariantValue::None
 
                };
 
                self.parser_types.clear();
 

	
 
                Ok(UnionVariantDefinition{
 
                    span: InputSpan::from_positions(identifier.span.begin, close_pos),
 
@@ -261,16 +263,17 @@ impl PassDefinitions {
 

	
 
        // Consume return types
 
        consume_token(&module.source, iter, TokenKind::ArrowRight)?;
 
        let mut open_curly_pos = iter.last_valid_pos();
 
        let mut return_types = self.parser_types.start_section();
 
        let mut open_curly_pos = iter.last_valid_pos(); // bogus value
 
        consume_comma_separated_until(
 
            TokenKind::OpenCurly, &module.source, iter, ctx,
 
            |source, iter, ctx| {
 
                let poly_vars = ctx.heap[definition_id].poly_vars(); // TODO: @Cleanup, this is really ugly. But rust...
 
                consume_parser_type(source, iter, &ctx.symbols, &ctx.heap, poly_vars, module_scope, definition_id, false, 0)
 
            },
 
            &mut self.parser_types, "a return type", Some(&mut open_curly_pos)
 
            &mut return_types, "a return type", Some(&mut open_curly_pos)
 
        )?;
 
        let return_types = self.parser_types.clone();
 
        let return_types = return_types.into_vec();
 

	
 
        // TODO: @ReturnValues
 
        match return_types.len() {
src/protocol/parser/pass_imports.rs
Show inline comments
 
@@ -99,13 +99,16 @@ impl PassImport {
 
                alias: alias_identifier,
 
                module_id: target_root_id
 
            }));
 
            ctx.symbols.insert_symbol(SymbolScope::Module(module.root_id), Symbol{
 

	
 
            if let Err((new_symbol, old_symbol)) = ctx.symbols.insert_symbol(SymbolScope::Module(module.root_id), Symbol{
 
                name: alias_name,
 
                variant: SymbolVariant::Module(SymbolModule{
 
                    root_id: target_root_id,
 
                    introduced_at: import_id,
 
                }),
 
            });
 
            }) {
 
                return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, &old_symbol));
 
            }
 
        } else if Some(TokenKind::ColonColon) == next {
 
            iter.consume();
 

	
src/protocol/parser/pass_symbols.rs
Show inline comments
 
@@ -91,6 +91,7 @@ impl PassSymbols {
 
        let module_scope = SymbolScope::Module(root_id);
 
        ctx.symbols.insert_scope(None, module_scope);
 
        for symbol in self.symbols.drain(..) {
 
            ctx.symbols.insert_scope(Some(module_scope), SymbolScope::Definition(symbol.variant.as_definition().definition_id));
 
            if let Err((new_symbol, old_symbol)) = ctx.symbols.insert_symbol(module_scope, symbol) {
 
                return Err(construct_symbol_conflict_error(modules, module_idx, ctx, &new_symbol, &old_symbol))
 
            }
src/protocol/parser/pass_tokenizer.rs
Show inline comments
 
@@ -151,9 +151,10 @@ impl PassTokenizer {
 
        // And finally, we may have a token range at the end that doesn't belong
 
        // to a range yet, so insert a "code" range if this is the case.
 
        debug_assert_eq!(self.stack_idx, 0);
 
        let last_registered_idx = target.ranges[0].end;
 
        let last_token_idx = target.tokens.len() as u32;
 
        if target.ranges[0].end != last_token_idx {
 

	
 
        if last_registered_idx != last_token_idx {
 
            self.add_code_range(target, 0, last_registered_idx, last_token_idx);
 
        }
 

	
 
        // TODO: @remove once I'm sure the algorithm works. For now it is better
 
@@ -641,32 +642,21 @@ impl PassTokenizer {
 
        has_newline
 
    }
 

	
 
    fn push_range(&mut self, target: &mut TokenBuffer, range_kind: TokenRangeKind, first_token_idx: u32) {
 
    fn add_code_range(
 
        &mut self, target: &mut TokenBuffer, parent_idx: i32,
 
        code_start_idx: u32, code_end_idx: u32
 
    ) {
 
        let new_range_idx = target.ranges.len() as i32;
 
        let parent_idx = self.stack_idx as i32;
 
        let parent_range = &mut target.ranges[self.stack_idx];
 
        let curly_depth = self.curly_stack.len() as u32;
 

	
 
        if parent_range.first_child_idx == NO_RELATION {
 
            parent_range.first_child_idx = new_range_idx;
 
        }
 
        let parent_range = &mut target.ranges[parent_idx as usize];
 
        debug_assert_ne!(parent_range.end, code_start_idx, "called push_code_range without a need to do so");
 

	
 
        if parent_range.end != first_token_idx {
 
            // We popped a range, processed some intermediate tokens and now
 
            // enter a new range. Those intermediate tokens do not belong to a
 
            // particular range yet. So we put them in a "code" range.
 

	
 
            // Remember last sibling from parent (if any)
 
        let sibling_idx = parent_range.last_child_idx;
 

	
 
            // Push the code range
 
            let code_start_idx = parent_range.end;
 
            let code_end_idx = first_token_idx;
 

	
 
        parent_range.last_child_idx = new_range_idx;
 
        parent_range.end = code_end_idx;
 
        parent_range.num_child_ranges += 1;
 

	
 
        let curly_depth = self.curly_stack.len() as u32;
 
        target.ranges.push(TokenRange{
 
            parent_idx,
 
            range_kind: TokenRangeKind::Code,
 
@@ -686,8 +676,23 @@ impl PassTokenizer {
 
        }
 
    }
 

	
 
    fn push_range(&mut self, target: &mut TokenBuffer, range_kind: TokenRangeKind, first_token_idx: u32) {
 
        let new_range_idx = target.ranges.len() as i32;
 
        let parent_idx = self.stack_idx as i32;
 
        let parent_range = &mut target.ranges[self.stack_idx];
 

	
 
        if parent_range.first_child_idx == NO_RELATION {
 
            parent_range.first_child_idx = new_range_idx;
 
        }
 

	
 
        let last_registered_idx = parent_range.end;
 
        if last_registered_idx != first_token_idx {
 
            self.add_code_range(target, parent_idx, last_registered_idx, first_token_idx);
 
        }
 

	
 
        // Push the new range
 
        self.stack_idx = target.ranges.len();
 
        let curly_depth = self.curly_stack.len() as u32;
 
        target.ranges.push(TokenRange{
 
            parent_idx,
 
            range_kind,
src/protocol/parser/pass_typing.rs
Show inline comments
 
@@ -1932,14 +1932,14 @@ impl PassTyping {
 
                let signature_type: *mut _ = &mut poly_data.embedded[0];
 
                let subject_type: *mut _ = self.expr_types.get_mut(&subject_id).unwrap();
 
                
 
                let progress_subject = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                let progress_subject = Self::apply_equal2_polyvar_constraint(
 
                    poly_data, &poly_progress, signature_type, subject_type
 
                );
 

	
 
                let signature_type: *mut _ = &mut poly_data.returned;
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(
 
                    poly_data, &poly_progress, signature_type, expr_type
 
                );
 

	
 
@@ -1985,8 +1985,8 @@ impl PassTyping {
 
            },
 
            Literal::Struct(data) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                for _poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", _poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
                debug_assert_eq!(extra.embedded.len(), data.fields.len());
 
@@ -2050,7 +2050,7 @@ impl PassTyping {
 
                    let field_expr_id = data.fields[field_idx].value;
 
                    let field_type: *mut _ = self.expr_types.get_mut(&field_expr_id).unwrap();
 

	
 
                    let progress_arg = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
                    let progress_arg = Self::apply_equal2_polyvar_constraint(
 
                        extra, &poly_progress, signature_type, field_type
 
                    );
 

	
 
@@ -2069,15 +2069,15 @@ impl PassTyping {
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(
 
                    &ctx.heap, extra, &poly_progress, signature_type, expr_type
 
                    extra, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                progress_expr
 
            },
 
            Literal::Enum(_) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                for _poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", _poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
                
 
@@ -2105,15 +2105,15 @@ impl PassTyping {
 

	
 
                debug_log!(" * During (reinferring from progress polyvars):");
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(
 
                    &ctx.heap, extra, &poly_progress, signature_type, expr_type
 
                    extra, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                progress_expr
 
            },
 
            Literal::Union(data) => {
 
                let extra = self.extra_data.get_mut(&upcast_id).unwrap();
 
                for poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", poly.display_name(&ctx.heap));
 
                for _poly in &extra.poly_vars {
 
                    debug_log!(" * Poly: {}", _poly.display_name(&ctx.heap));
 
                }
 
                let mut poly_progress = HashSet::new();
 
                debug_assert_eq!(extra.embedded.len(), data.values.len());
 
@@ -2174,7 +2174,7 @@ impl PassTyping {
 
                    let value_type: *mut _ = self.expr_types.get_mut(&value_expr_id).unwrap();
 
                    
 
                    let progress_arg = Self::apply_equal2_polyvar_constraint(
 
                        &ctx.heap, extra, &poly_progress, signature_type, value_type
 
                        extra, &poly_progress, signature_type, value_type
 
                    );
 

	
 
                    debug_log!(
 
@@ -2192,7 +2192,7 @@ impl PassTyping {
 
                let expr_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
                let progress_expr = Self::apply_equal2_polyvar_constraint(
 
                    &ctx.heap, extra, &poly_progress, signature_type, expr_type
 
                    extra, &poly_progress, signature_type, expr_type
 
                );
 

	
 
                progress_expr
 
@@ -2305,8 +2305,8 @@ impl PassTyping {
 
        // reapplying the polymorph type to each argument type and the return
 
        // type should always succeed.
 
        debug_log!(" * During (reinferring from progressed polyvars):");
 
        for (poly_idx, poly_var) in extra.poly_vars.iter().enumerate() {
 
            debug_log!("   - Poly {} | sig: {}", poly_idx, poly_var.display_name(&ctx.heap));
 
        for (_poly_idx, _poly_var) in extra.poly_vars.iter().enumerate() {
 
            debug_log!("   - Poly {} | sig: {}", _poly_idx, _poly_var.display_name(&ctx.heap));
 
        }
 
        // TODO: @performance If the algorithm is changed to be more "on demand
 
        //  argument re-evaluation", instead of "all-argument re-evaluation",
 
@@ -2316,7 +2316,7 @@ impl PassTyping {
 
            let arg_expr_id = expr.arguments[arg_idx];
 
            let arg_type: *mut _ = self.expr_types.get_mut(&arg_expr_id).unwrap();
 
            
 
            let progress_arg = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
            let progress_arg = Self::apply_equal2_polyvar_constraint(
 
                extra, &poly_progress,
 
                signature_type, arg_type
 
            );
 
@@ -2335,7 +2335,7 @@ impl PassTyping {
 
        let signature_type: *mut _ = &mut extra.returned;
 
        let ret_type: *mut _ = self.expr_types.get_mut(&upcast_id).unwrap();
 

	
 
        let progress_ret = Self::apply_equal2_polyvar_constraint(&ctx.heap,
 
        let progress_ret = Self::apply_equal2_polyvar_constraint(
 
            extra, &poly_progress, signature_type, ret_type
 
        );
 
        debug_log!(
 
@@ -2600,7 +2600,6 @@ impl PassTyping {
 
    ///
 
    /// This function returns true if the expression's type has been progressed
 
    fn apply_equal2_polyvar_constraint(
 
        heap: &Heap,
 
        polymorph_data: &ExtraData, _polymorph_progress: &HashSet<usize>,
 
        signature_type: *mut InferenceType, expr_type: *mut InferenceType
 
    ) -> bool {
src/protocol/parser/symbol_table.rs
Show inline comments
 
@@ -135,7 +135,7 @@ impl SymbolVariant {
 
}
 

	
 
/// TODO: @Cleanup - remove clone everywhere
 
#[derive(Clone)]
 
#[derive(Debug, Clone)]
 
pub struct Symbol {
 
    pub name: StringRef<'static>,
 
    pub variant: SymbolVariant,
 
@@ -191,6 +191,8 @@ impl SymbolTable {
 
        );
 
        debug_assert!(!self.scope_lookup.contains_key(&new_scope), "inserting scope {:?}, but it already exists", new_scope);
 

	
 
        println!("DEBUG: Inserting scope {:?} with parent {:?}", new_scope, parent_scope);
 

	
 
        if let Some(parent_scope) = parent_scope {
 
            let parent = self.scope_lookup.get_mut(&parent_scope).unwrap();
 
            parent.child_scopes.push(new_scope);
 
@@ -212,6 +214,7 @@ impl SymbolTable {
 
    // Note: we do not return a reference because Rust doesn't like it.
 
    pub(crate) fn insert_symbol(&mut self, in_scope: SymbolScope, symbol: Symbol) -> Result<(), (Symbol, Symbol)> {
 
        debug_assert!(self.scope_lookup.contains_key(&in_scope), "inserting symbol {}, but scope {:?} does not exist", symbol.name.as_str(), in_scope);
 
        println!("DEBUG: Inserting symbol {:?} in scope {:?}", symbol, in_scope);
 
        let mut seek_scope = in_scope;
 
        loop {
 
            let scoped_symbols = self.scope_lookup.get(&seek_scope).unwrap();
src/protocol/tests/parser_inference.rs
Show inline comments
 
@@ -159,61 +159,61 @@ fn test_struct_inference() {
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "by field access",
 
        "
 
        struct Pair<T1, T2>{ T1 first, T2 second }
 
        func construct<T1, T2>(T1 first, T2 second) -> Pair<T1, T2> {
 
            return Pair{ first: first, second: second };
 
        }
 
        test() -> s32 {
 
            auto first = 0;
 
            auto second = 1;
 
            auto pair = construct(first, second);
 
            s8 assign_first = 0;
 
            s64 assign_second = 1;
 
            pair.first = assign_first;
 
            pair.second = assign_second;
 
            return 0;
 
        }
 
        "
 
    ).for_function("test", |f| { f
 
        .for_variable("first", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("s8");
 
        })
 
        .for_variable("second", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("s64");
 
        })
 
        .for_variable("pair", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Pair<s8,s64>");
 
        });
 
    });
 

	
 
    Tester::new_single_source_expect_ok(
 
        "by nested field access",
 
        "
 
        struct Node<T1, T2>{ T1 l, T2 r }
 
        func construct<T1, T2>(T1 l, T2 r) -> Node<T1, T2> {
 
            return Node{ l: l, r: r };
 
        }
 
        func fix_poly<T>(Node<T, T> a) -> s32 { return 0; }
 
        func test() -> s32 {
 
            s8 assigned = 0;
 
            auto thing = construct(assigned, construct(0, 1));
 
            fix_poly(thing.r);
 
            thing.r.r = assigned;
 
            return 0;
 
        }
 
        ",
 
    ).for_function("test", |f| { f
 
        .for_variable("thing", |v| { v
 
            .assert_parser_type("auto")
 
            .assert_concrete_type("Node<s8,Node<s8,s8>>");
 
        });
 
    });
 
    // Tester::new_single_source_expect_ok(
 
    //     "by field access",
 
    //     "
 
    //     struct Pair<T1, T2>{ T1 first, T2 second }
 
    //     func construct<T1, T2>(T1 first, T2 second) -> Pair<T1, T2> {
 
    //         return Pair{ first: first, second: second };
 
    //     }
 
    //     test() -> s32 {
 
    //         auto first = 0;
 
    //         auto second = 1;
 
    //         auto pair = construct(first, second);
 
    //         s8 assign_first = 0;
 
    //         s64 assign_second = 1;
 
    //         pair.first = assign_first;
 
    //         pair.second = assign_second;
 
    //         return 0;
 
    //     }
 
    //     "
 
    // ).for_function("test", |f| { f
 
    //     .for_variable("first", |v| { v
 
    //         .assert_parser_type("auto")
 
    //         .assert_concrete_type("s8");
 
    //     })
 
    //     .for_variable("second", |v| { v
 
    //         .assert_parser_type("auto")
 
    //         .assert_concrete_type("s64");
 
    //     })
 
    //     .for_variable("pair", |v| { v
 
    //         .assert_parser_type("auto")
 
    //         .assert_concrete_type("Pair<s8,s64>");
 
    //     });
 
    // });
 
    //
 
    // Tester::new_single_source_expect_ok(
 
    //     "by nested field access",
 
    //     "
 
    //     struct Node<T1, T2>{ T1 l, T2 r }
 
    //     func construct<T1, T2>(T1 l, T2 r) -> Node<T1, T2> {
 
    //         return Node{ l: l, r: r };
 
    //     }
 
    //     func fix_poly<T>(Node<T, T> a) -> s32 { return 0; }
 
    //     func test() -> s32 {
 
    //         s8 assigned = 0;
 
    //         auto thing = construct(assigned, construct(0, 1));
 
    //         fix_poly(thing.r);
 
    //         thing.r.r = assigned;
 
    //         return 0;
 
    //     }
 
    //     ",
 
    // ).for_function("test", |f| { f
 
    //     .for_variable("thing", |v| { v
 
    //         .assert_parser_type("auto")
 
    //         .assert_concrete_type("Node<s8,Node<s8,s8>>");
 
    //     });
 
    // });
 
}
 

	
 
#[test]
0 comments (0 inline, 0 general)