From 4356240fa42eebbd17fdb63b2d0e6f86387ca21c Mon Sep 17 00:00:00 2001 From: darksv Date: Thu, 13 Sep 2018 23:25:05 +0200 Subject: [PATCH] Incremental reparsing for single tokens (WHITESPACE, COMMENT, DOC_COMMENT, IDENT, STRING, RAW_STRING) --- crates/libsyntax2/src/lib.rs | 80 +++++++++++++++++++++++----- crates/libsyntax2/tests/test/main.rs | 56 ++++++++++++++++++- 2 files changed, 122 insertions(+), 14 deletions(-) diff --git a/crates/libsyntax2/src/lib.rs b/crates/libsyntax2/src/lib.rs index fd58cb4fa6e..bae685fb441 100644 --- a/crates/libsyntax2/src/lib.rs +++ b/crates/libsyntax2/src/lib.rs @@ -82,22 +82,68 @@ impl File { self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) } pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option { + let (node, green, new_errors) = + self.reparse_leaf(&edit).or_else(|| self.reparse_block(&edit))?; + + let green_root = node.replace_with(green); + let errors = merge_errors(self.errors(), new_errors, node, edit); + Some(File::new(green_root, errors)) + } + fn reparse_leaf(&self, edit: &AtomEdit) -> Option<(SyntaxNodeRef, GreenNode, Vec)> { + let node = algo::find_covering_node(self.syntax(), edit.delete); + match node.kind() { + | WHITESPACE + | COMMENT + | DOC_COMMENT + | IDENT + | STRING + | RAW_STRING => { + let text = get_text_after_edit(node, &edit); + let tokens = tokenize(&text); + if tokens.len() != 1 || tokens[0].kind != node.kind() { + return None; + } + + let reparser: fn(&mut Parser) = if node.kind().is_trivia() { + // since trivia is omitted by parser when it doesn't have a parent, \ + // we need to create one for it + |p| { + p.start().complete(p, ROOT); + } + } else { + |p| { + p.bump(); + } + }; + + let (green, new_errors) = + parser_impl::parse_with::( + &text, &tokens, reparser, + ); + + let green = if node.kind().is_trivia() { + green.children().first().cloned().unwrap() + } else { + green + }; + + Some((node, green, new_errors)) + }, + _ => None, + } + } + fn reparse_block(&self, edit: &AtomEdit) -> Option<(SyntaxNodeRef, GreenNode, Vec)> { let (node, reparser) = find_reparsable_node(self.syntax(), edit.delete)?; - let text = replace_range( - node.text().to_string(), - edit.delete - node.range().start(), - &edit.insert, - ); + let text = get_text_after_edit(node, &edit); let tokens = tokenize(&text); if !is_balanced(&tokens) { return None; } - let (green, new_errors) = parser_impl::parse_with::( - &text, &tokens, reparser, - ); - let green_root = node.replace_with(green); - let errors = merge_errors(self.errors(), new_errors, node, edit); - Some(File::new(green_root, errors)) + let (green, new_errors) = + parser_impl::parse_with::( + &text, &tokens, reparser, + ); + Some((node, green, new_errors)) } fn full_reparse(&self, edit: &AtomEdit) -> File { let text = replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert); @@ -134,6 +180,14 @@ impl AtomEdit { } } +fn get_text_after_edit(node: SyntaxNodeRef, edit: &AtomEdit) -> String { + replace_range( + node.text().to_string(), + edit.delete - node.range().start(), + &edit.insert, + ) +} + fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> { let node = algo::find_covering_node(node, range); return algo::ancestors(node) @@ -200,9 +254,9 @@ fn merge_errors( ) -> Vec { let mut res = Vec::new(); for e in old_errors { - if e.offset < old_node.range().start() { + if e.offset <= old_node.range().start() { res.push(e) - } else if e.offset > old_node.range().end() { + } else if e.offset >= old_node.range().end() { res.push(SyntaxError { msg: e.msg, offset: e.offset + TextUnit::of_str(&edit.insert) - edit.delete.len(), diff --git a/crates/libsyntax2/tests/test/main.rs b/crates/libsyntax2/tests/test/main.rs index 644df9f3c68..ce7e075f868 100644 --- a/crates/libsyntax2/tests/test/main.rs +++ b/crates/libsyntax2/tests/test/main.rs @@ -33,7 +33,7 @@ fn reparse_test() { let incrementally_reparsed = { let f = File::parse(&before); let edit = AtomEdit { delete: range, insert: replace_with.to_string() }; - f.incremental_reparse(&edit).unwrap() + f.incremental_reparse(&edit).expect("cannot incrementally reparse") }; assert_eq_text!( &dump_tree(fully_reparsed.syntax()), @@ -45,6 +45,11 @@ fn reparse_test() { fn foo() { let x = foo + <|>bar<|> } +", "baz"); + do_check(r" +fn foo() { + let x = foo<|> + bar<|> +} ", "baz"); do_check(r" struct Foo { @@ -67,6 +72,11 @@ mod foo { trait Foo { type <|>Foo<|>; } +", "Output"); + do_check(r" +trait Foo { + type<|> Foo<|>; +} ", "Output"); do_check(r" impl IntoIterator for Foo { @@ -94,6 +104,50 @@ extern { fn<|>;<|> } ", " exit(code: c_int)"); +do_check(r"<|><|> +fn foo() -> i32 { + 1 +} +", "\n\n\n \n"); + do_check(r" +fn foo() -> <|><|> {} +", " \n"); + do_check(r" +fn <|>foo<|>() -> i32 { + 1 +} +", "bar"); +do_check(r" +fn aa<|><|>bb() { + +} +", "foofoo"); + do_check(r" +fn aabb /* <|><|> */ () { + +} +", "some comment"); + do_check(r" +fn aabb <|><|> () { + +} +", " \t\t\n\n"); + do_check(r" +trait foo { +// comment <|><|> +} +", "\n"); + do_check(r" +/// good <|><|>omment +mod { +} +", "c"); + do_check(r#" +fn -> &str { "Hello<|><|>" } +"#, ", world"); + do_check(r#" +fn -> &str { // "Hello<|><|>" +"#, ", world"); } #[test]