diff --git a/src/protocol/parser/mod.rs b/src/protocol/parser/mod.rs index 59235a8cd2397b44eea99458f897fb910aeb2959..39de7073241552963fc106ab248880774a9e45fa 100644 --- a/src/protocol/parser/mod.rs +++ b/src/protocol/parser/mod.rs @@ -52,10 +52,9 @@ pub enum ModuleCompilationPhase { } pub struct Module { - // Buffers pub source: InputSource, pub tokens: TokenBuffer, - // Identifiers + pub is_compiler_file: bool, // TODO: @Hack pub root_id: RootId, pub name: Option<(PragmaId, StringRef<'static>)>, pub version: Option<(PragmaId, i64)>, @@ -156,7 +155,7 @@ impl Parser { pass_typing: PassTyping::new(), pass_rewriting: PassRewriting::new(), pass_stack_size: PassStackSize::new(), - write_tokens_to: Some("tokens.txt".to_string()), + write_tokens_to: None, write_ast_to: None, arch: TargetArch::new(), }; @@ -194,21 +193,7 @@ impl Parser { /// it internally for later parsing (when all modules are present). Returns /// the index of the new module. pub fn feed(&mut self, mut source: InputSource) -> Result { - let mut token_buffer = TokenBuffer::new(); - self.pass_tokenizer.tokenize(&mut source, &mut token_buffer)?; - - let module = Module{ - source, - tokens: token_buffer, - root_id: RootId::new_invalid(), - name: None, - version: None, - phase: ModuleCompilationPhase::Tokenized, - }; - let module_index = self.modules.len(); - self.modules.push(module); - - return Ok(module_index); + return self.feed_internal(source, false); } pub fn parse(&mut self) -> Result<(), ParseError> { @@ -353,7 +338,7 @@ impl Parser { let source = source.unwrap(); let input_source = InputSource::new(file.to_string(), source); - let module_index = self.feed(input_source); + let module_index = self.feed_internal(input_source, true); if let Err(err) = module_index { // A bit of a hack, but shouldn't really happen anyway: the // compiler should ship with a decent standard library (at some @@ -370,6 +355,25 @@ impl Parser { return Ok(()) } + + fn feed_internal(&mut self, mut source: InputSource, is_compiler_file: bool) -> Result { + let mut token_buffer = TokenBuffer::new(); + self.pass_tokenizer.tokenize(&mut source, &mut token_buffer)?; + + let module = Module{ + source, + tokens: token_buffer, + is_compiler_file, + root_id: RootId::new_invalid(), + name: None, + version: None, + phase: ModuleCompilationPhase::Tokenized, + }; + let module_index = self.modules.len(); + self.modules.push(module); + + return Ok(module_index); + } } fn insert_builtin_type(type_table: &mut TypeTable, parts: Vec, has_poly_var: bool, size: usize, alignment: usize) -> TypeId {