3 use crate::lexer::{tokenize, Token};
4 use crate::parser_api::Parser;
5 use crate::parser_impl;
6 use crate::text_utils::replace_range;
7 use crate::yellow::{self, GreenNode, SyntaxError, SyntaxNodeRef};
8 use crate::{SyntaxKind::*, TextRange, TextUnit};
10 #[derive(Debug, Clone)]
12 pub delete: TextRange,
17 pub fn replace(range: TextRange, replace_with: String) -> AtomEdit {
24 pub fn delete(range: TextRange) -> AtomEdit {
25 AtomEdit::replace(range, String::new())
28 pub fn insert(offset: TextUnit, text: String) -> AtomEdit {
29 AtomEdit::replace(TextRange::offset_len(offset, 0.into()), text)
33 pub(crate) fn incremental_reparse(
36 errors: Vec<SyntaxError>,
37 ) -> Option<(GreenNode, Vec<SyntaxError>)> {
38 let (node, green, new_errors) =
39 reparse_leaf(node, &edit).or_else(|| reparse_block(node, &edit))?;
40 let green_root = node.replace_with(green);
41 let errors = merge_errors(errors, new_errors, node, edit);
42 Some((green_root, errors))
45 fn reparse_leaf<'node>(
46 node: SyntaxNodeRef<'node>,
48 ) -> Option<(SyntaxNodeRef<'node>, GreenNode, Vec<SyntaxError>)> {
49 let node = algo::find_covering_node(node, edit.delete);
51 WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => {
52 let text = get_text_after_edit(node, &edit);
53 let tokens = tokenize(&text);
54 let token = match tokens[..] {
55 [token] if token.kind == node.kind() => token,
59 if token.kind == IDENT && is_contextual_kw(&text) {
63 let green = GreenNode::new_leaf(node.kind(), text.into());
64 let new_errors = vec![];
65 Some((node, green, new_errors))
71 fn reparse_block<'node>(
72 node: SyntaxNodeRef<'node>,
74 ) -> Option<(SyntaxNodeRef<'node>, GreenNode, Vec<SyntaxError>)> {
75 let (node, reparser) = find_reparsable_node(node, edit.delete)?;
76 let text = get_text_after_edit(node, &edit);
77 let tokens = tokenize(&text);
78 if !is_balanced(&tokens) {
81 let (green, new_errors) =
82 parser_impl::parse_with(yellow::GreenBuilder::new(), &text, &tokens, reparser);
83 Some((node, green, new_errors))
86 fn get_text_after_edit(node: SyntaxNodeRef, edit: &AtomEdit) -> String {
88 node.text().to_string(),
89 edit.delete - node.range().start(),
94 fn is_contextual_kw(text: &str) -> bool {
96 "auto" | "default" | "union" => true,
101 type ParseFn = fn(&mut Parser);
102 fn find_reparsable_node(
103 node: SyntaxNodeRef<'_>,
105 ) -> Option<(SyntaxNodeRef<'_>, ParseFn)> {
106 let node = algo::find_covering_node(node, range);
109 .filter_map(|node| reparser(node).map(|r| (node, r)))
112 fn reparser(node: SyntaxNodeRef) -> Option<ParseFn> {
113 let res = match node.kind() {
114 BLOCK => grammar::block,
115 NAMED_FIELD_DEF_LIST => grammar::named_field_def_list,
116 NAMED_FIELD_LIST => grammar::named_field_list,
117 ENUM_VARIANT_LIST => grammar::enum_variant_list,
118 MATCH_ARM_LIST => grammar::match_arm_list,
119 USE_TREE_LIST => grammar::use_tree_list,
120 EXTERN_ITEM_LIST => grammar::extern_item_list,
121 TOKEN_TREE if node.first_child().unwrap().kind() == L_CURLY => grammar::token_tree,
123 let parent = node.parent().unwrap();
124 match parent.kind() {
125 IMPL_ITEM => grammar::impl_item_list,
126 TRAIT_DEF => grammar::trait_item_list,
127 MODULE => grammar::mod_item_list,
137 fn is_balanced(tokens: &[Token]) -> bool {
139 || tokens.first().unwrap().kind != L_CURLY
140 || tokens.last().unwrap().kind != R_CURLY
144 let mut balance = 0usize;
145 for t in tokens.iter() {
147 L_CURLY => balance += 1,
149 balance = match balance.checked_sub(1) {
151 None => return false,
161 old_errors: Vec<SyntaxError>,
162 new_errors: Vec<SyntaxError>,
163 old_node: SyntaxNodeRef,
165 ) -> Vec<SyntaxError> {
166 let mut res = Vec::new();
167 for e in old_errors {
168 if e.offset <= old_node.range().start() {
170 } else if e.offset >= old_node.range().end() {
171 res.push(SyntaxError {
173 offset: e.offset + TextUnit::of_str(&edit.insert) - edit.delete.len(),
177 for e in new_errors {
178 res.push(SyntaxError {
180 offset: e.offset + old_node.range().start(),
189 super::{test_utils::extract_range, text_utils::replace_range, utils::dump_tree, File},
190 reparse_block, reparse_leaf, AtomEdit, GreenNode, SyntaxError, SyntaxNodeRef,
193 fn do_check<F>(before: &str, replace_with: &str, reparser: F)
195 for<'a> F: Fn(SyntaxNodeRef<'a>, &AtomEdit)
196 -> Option<(SyntaxNodeRef<'a>, GreenNode, Vec<SyntaxError>)>,
198 let (range, before) = extract_range(before);
199 let after = replace_range(before.clone(), range, replace_with);
201 let fully_reparsed = File::parse(&after);
202 let incrementally_reparsed = {
203 let f = File::parse(&before);
204 let edit = AtomEdit {
206 insert: replace_with.to_string(),
208 let (node, green, new_errors) =
209 reparser(f.syntax(), &edit).expect("cannot incrementally reparse");
210 let green_root = node.replace_with(green);
211 let errors = super::merge_errors(f.errors(), new_errors, node, &edit);
212 File::new(green_root, errors)
216 &dump_tree(fully_reparsed.syntax()),
217 &dump_tree(incrementally_reparsed.syntax()),
222 fn reparse_block_tests() {
223 let do_check = |before, replace_to| do_check(before, replace_to, reparse_block);
228 let x = foo + <|>bar<|>
236 let x = foo<|> + bar<|>
277 impl IntoIterator<Item=i32> for Foo {
285 use a::b::{foo,<|>,bar<|>};
317 " exit(code: c_int)",
322 fn reparse_leaf_tests() {
323 let do_check = |before, replace_to| do_check(before, replace_to, reparse_leaf);
327 fn foo() -> i32 { 1 }
333 fn foo() -> <|><|> {}
339 fn <|>foo<|>() -> i32 { 1 }
345 fn foo<|><|>foo() { }
351 fn foo /* <|><|> */ () {}
376 fn -> &str { "Hello<|><|>" }
382 fn -> &str { // "Hello<|><|>"
388 fn -> &str { r#"Hello<|><|>"#
394 #[derive(<|>Copy<|>)]