]> git.lizzy.rs Git - rust.git/commitdiff
Merge #8641
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>
Sat, 24 Apr 2021 06:09:21 +0000 (06:09 +0000)
committerGitHub <noreply@github.com>
Sat, 24 Apr 2021 06:09:21 +0000 (06:09 +0000)
8641: minor r=matklad a=matklad

bors r+
🤖

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
15 files changed:
crates/ide_assists/src/ast_transform.rs
crates/ide_assists/src/handlers/auto_import.rs
crates/ide_assists/src/handlers/extract_struct_from_enum_variant.rs
crates/ide_assists/src/handlers/reorder_fields.rs
crates/ide_assists/src/handlers/reorder_impl.rs
crates/ide_assists/src/handlers/replace_qualified_name_with_use.rs
crates/ide_assists/src/utils.rs
crates/ide_completion/src/completions/flyimport.rs
crates/ide_completion/src/item.rs
crates/ide_completion/src/lib.rs
crates/ide_db/src/helpers/insert_use.rs
crates/ide_db/src/helpers/insert_use/tests.rs
crates/syntax/src/algo.rs
crates/syntax/src/ast/make.rs
crates/syntax/src/ted.rs

index 4a3ed7783efcc6c1e2069cd3531678cf6136f0bf..e5ae718c9fd148292bb2b62ad9a0a244e2e90a11 100644 (file)
@@ -3,20 +3,27 @@
 use ide_db::helpers::mod_path_to_ast;
 use rustc_hash::FxHashMap;
 use syntax::{
-    algo::SyntaxRewriter,
     ast::{self, AstNode},
-    SyntaxNode,
+    ted, SyntaxNode,
 };
 
-pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: N) -> N {
-    SyntaxRewriter::from_fn(|element| match element {
-        syntax::SyntaxElement::Node(n) => {
-            let replacement = transformer.get_substitution(&n, transformer)?;
-            Some(replacement.into())
+pub fn apply<'a, N: AstNode>(transformer: &dyn AstTransform<'a>, node: &N) {
+    let mut skip_to = None;
+    for event in node.syntax().preorder() {
+        match event {
+            syntax::WalkEvent::Enter(node) if skip_to.is_none() => {
+                skip_to = transformer.get_substitution(&node, transformer).zip(Some(node));
+            }
+            syntax::WalkEvent::Enter(_) => (),
+            syntax::WalkEvent::Leave(node) => match &skip_to {
+                Some((replacement, skip_target)) if *skip_target == node => {
+                    ted::replace(node, replacement.clone_for_update());
+                    skip_to.take();
+                }
+                _ => (),
+            },
         }
-        _ => None,
-    })
-    .rewrite_ast(&node)
+    }
 }
 
 /// `AstTransform` helps with applying bulk transformations to syntax nodes.
@@ -191,11 +198,9 @@ fn get_substitution(
                 let found_path = from.find_use_path(self.source_scope.db.upcast(), def)?;
                 let mut path = mod_path_to_ast(&found_path);
 
-                let type_args = p
-                    .segment()
-                    .and_then(|s| s.generic_arg_list())
-                    .map(|arg_list| apply(recur, arg_list));
+                let type_args = p.segment().and_then(|s| s.generic_arg_list());
                 if let Some(type_args) = type_args {
+                    apply(recur, &type_args);
                     let last_segment = path.segment().unwrap();
                     path = path.with_segment(last_segment.with_generic_args(type_args))
                 }
index 49aa70f74e77cec079a89f1e439aeb7ba25941b0..a454a2af3291508c49708e352ae3f97b38d30ff7 100644 (file)
@@ -93,7 +93,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
 
     let range = ctx.sema.original_range(&syntax_under_caret).range;
     let group_label = group_label(import_assets.import_candidate());
-    let scope = ImportScope::find_insert_use_container(&syntax_under_caret, &ctx.sema)?;
+    let scope = ImportScope::find_insert_use_container_with_macros(&syntax_under_caret, &ctx.sema)?;
     for import in proposed_imports {
         acc.add_group(
             &group_label,
@@ -101,9 +101,11 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
             format!("Import `{}`", import.import_path),
             range,
             |builder| {
-                let rewriter =
-                    insert_use(&scope, mod_path_to_ast(&import.import_path), ctx.config.insert_use);
-                builder.rewrite(rewriter);
+                let scope = match scope.clone() {
+                    ImportScope::File(it) => ImportScope::File(builder.make_ast_mut(it)),
+                    ImportScope::Module(it) => ImportScope::Module(builder.make_ast_mut(it)),
+                };
+                insert_use(&scope, mod_path_to_ast(&import.import_path), ctx.config.insert_use);
             },
         );
     }
index a8d6355bdd2ff6f84b43e7245005e76c61dad17b..66f274fa78c1398a4bf3c23d0c517502f0d38dad 100644 (file)
@@ -5,7 +5,7 @@
 use ide_db::{
     defs::Definition,
     helpers::{
-        insert_use::{insert_use, ImportScope},
+        insert_use::{insert_use, ImportScope, InsertUseConfig},
         mod_path_to_ast,
     },
     search::FileReference,
@@ -13,9 +13,9 @@
 };
 use rustc_hash::FxHashSet;
 use syntax::{
-    algo::{find_node_at_offset, SyntaxRewriter},
-    ast::{self, edit::IndentLevel, make, AstNode, NameOwner, VisibilityOwner},
-    SourceFile, SyntaxElement, SyntaxNode, T,
+    algo::find_node_at_offset,
+    ast::{self, make, AstNode, NameOwner, VisibilityOwner},
+    ted, SyntaxNode, T,
 };
 
 use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -62,40 +62,50 @@ pub(crate) fn extract_struct_from_enum_variant(
             let mut visited_modules_set = FxHashSet::default();
             let current_module = enum_hir.module(ctx.db());
             visited_modules_set.insert(current_module);
-            let mut def_rewriter = None;
+            // record file references of the file the def resides in, we only want to swap to the edited file in the builder once
+            let mut def_file_references = None;
             for (file_id, references) in usages {
-                let mut rewriter = SyntaxRewriter::default();
-                let source_file = ctx.sema.parse(file_id);
-                for reference in references {
-                    update_reference(
-                        ctx,
-                        &mut rewriter,
-                        reference,
-                        &source_file,
-                        &enum_module_def,
-                        &variant_hir_name,
-                        &mut visited_modules_set,
-                    );
-                }
                 if file_id == ctx.frange.file_id {
-                    def_rewriter = Some(rewriter);
+                    def_file_references = Some(references);
                     continue;
                 }
                 builder.edit_file(file_id);
-                builder.rewrite(rewriter);
+                let source_file = builder.make_ast_mut(ctx.sema.parse(file_id));
+                let processed = process_references(
+                    ctx,
+                    &mut visited_modules_set,
+                    source_file.syntax(),
+                    &enum_module_def,
+                    &variant_hir_name,
+                    references,
+                );
+                processed.into_iter().for_each(|(path, node, import)| {
+                    apply_references(ctx.config.insert_use, path, node, import)
+                });
             }
-            let mut rewriter = def_rewriter.unwrap_or_default();
-            update_variant(&mut rewriter, &variant);
-            extract_struct_def(
-                &mut rewriter,
-                &enum_ast,
-                variant_name.clone(),
-                &field_list,
-                &variant.parent_enum().syntax().clone().into(),
-                enum_ast.visibility(),
-            );
             builder.edit_file(ctx.frange.file_id);
-            builder.rewrite(rewriter);
+            let source_file = builder.make_ast_mut(ctx.sema.parse(ctx.frange.file_id));
+            let variant = builder.make_ast_mut(variant.clone());
+            if let Some(references) = def_file_references {
+                let processed = process_references(
+                    ctx,
+                    &mut visited_modules_set,
+                    source_file.syntax(),
+                    &enum_module_def,
+                    &variant_hir_name,
+                    references,
+                );
+                processed.into_iter().for_each(|(path, node, import)| {
+                    apply_references(ctx.config.insert_use, path, node, import)
+                });
+            }
+
+            let def = create_struct_def(variant_name.clone(), &field_list, enum_ast.visibility());
+            let start_offset = &variant.parent_enum().syntax().clone();
+            ted::insert_raw(ted::Position::before(start_offset), def.syntax());
+            ted::insert_raw(ted::Position::before(start_offset), &make::tokens::blank_line());
+
+            update_variant(&variant);
         },
     )
 }
@@ -136,34 +146,11 @@ fn existing_definition(db: &RootDatabase, variant_name: &ast::Name, variant: &Va
         .any(|(name, _)| name.to_string() == variant_name.to_string())
 }
 
-fn insert_import(
-    ctx: &AssistContext,
-    rewriter: &mut SyntaxRewriter,
-    scope_node: &SyntaxNode,
-    module: &Module,
-    enum_module_def: &ModuleDef,
-    variant_hir_name: &Name,
-) -> Option<()> {
-    let db = ctx.db();
-    let mod_path =
-        module.find_use_path_prefixed(db, *enum_module_def, ctx.config.insert_use.prefix_kind);
-    if let Some(mut mod_path) = mod_path {
-        mod_path.pop_segment();
-        mod_path.push_segment(variant_hir_name.clone());
-        let scope = ImportScope::find_insert_use_container(scope_node, &ctx.sema)?;
-        *rewriter += insert_use(&scope, mod_path_to_ast(&mod_path), ctx.config.insert_use);
-    }
-    Some(())
-}
-
-fn extract_struct_def(
-    rewriter: &mut SyntaxRewriter,
-    enum_: &ast::Enum,
+fn create_struct_def(
     variant_name: ast::Name,
     field_list: &Either<ast::RecordFieldList, ast::TupleFieldList>,
-    start_offset: &SyntaxElement,
     visibility: Option<ast::Visibility>,
-) -> Option<()> {
+) -> ast::Struct {
     let pub_vis = Some(make::visibility_pub());
     let field_list = match field_list {
         Either::Left(field_list) => {
@@ -180,65 +167,90 @@ fn extract_struct_def(
         .into(),
     };
 
-    rewriter.insert_before(
-        start_offset,
-        make::struct_(visibility, variant_name, None, field_list).syntax(),
-    );
-    rewriter.insert_before(start_offset, &make::tokens::blank_line());
-
-    if let indent_level @ 1..=usize::MAX = IndentLevel::from_node(enum_.syntax()).0 as usize {
-        rewriter
-            .insert_before(start_offset, &make::tokens::whitespace(&" ".repeat(4 * indent_level)));
-    }
-    Some(())
+    make::struct_(visibility, variant_name, None, field_list).clone_for_update()
 }
 
-fn update_variant(rewriter: &mut SyntaxRewriter, variant: &ast::Variant) -> Option<()> {
+fn update_variant(variant: &ast::Variant) -> Option<()> {
     let name = variant.name()?;
     let tuple_field = make::tuple_field(None, make::ty(&name.text()));
     let replacement = make::variant(
         name,
         Some(ast::FieldList::TupleFieldList(make::tuple_field_list(iter::once(tuple_field)))),
-    );
-    rewriter.replace(variant.syntax(), replacement.syntax());
+    )
+    .clone_for_update();
+    ted::replace(variant.syntax(), replacement.syntax());
     Some(())
 }
 
-fn update_reference(
+fn apply_references(
+    insert_use_cfg: InsertUseConfig,
+    segment: ast::PathSegment,
+    node: SyntaxNode,
+    import: Option<(ImportScope, hir::ModPath)>,
+) {
+    if let Some((scope, path)) = import {
+        insert_use(&scope, mod_path_to_ast(&path), insert_use_cfg);
+    }
+    ted::insert_raw(
+        ted::Position::before(segment.syntax()),
+        make::path_from_text(&format!("{}", segment)).clone_for_update().syntax(),
+    );
+    ted::insert_raw(ted::Position::before(segment.syntax()), make::token(T!['(']));
+    ted::insert_raw(ted::Position::after(&node), make::token(T![')']));
+}
+
+fn process_references(
     ctx: &AssistContext,
-    rewriter: &mut SyntaxRewriter,
-    reference: FileReference,
-    source_file: &SourceFile,
+    visited_modules: &mut FxHashSet<Module>,
+    source_file: &SyntaxNode,
     enum_module_def: &ModuleDef,
     variant_hir_name: &Name,
-    visited_modules_set: &mut FxHashSet<Module>,
-) -> Option<()> {
+    refs: Vec<FileReference>,
+) -> Vec<(ast::PathSegment, SyntaxNode, Option<(ImportScope, hir::ModPath)>)> {
+    // we have to recollect here eagerly as we are about to edit the tree we need to calculate the changes
+    // and corresponding nodes up front
+    refs.into_iter()
+        .flat_map(|reference| {
+            let (segment, scope_node, module) =
+                reference_to_node(&ctx.sema, source_file, reference)?;
+            if !visited_modules.contains(&module) {
+                let mod_path = module.find_use_path_prefixed(
+                    ctx.sema.db,
+                    *enum_module_def,
+                    ctx.config.insert_use.prefix_kind,
+                );
+                if let Some(mut mod_path) = mod_path {
+                    mod_path.pop_segment();
+                    mod_path.push_segment(variant_hir_name.clone());
+                    let scope = ImportScope::find_insert_use_container(&scope_node)?;
+                    visited_modules.insert(module);
+                    return Some((segment, scope_node, Some((scope, mod_path))));
+                }
+            }
+            Some((segment, scope_node, None))
+        })
+        .collect()
+}
+
+fn reference_to_node(
+    sema: &hir::Semantics<RootDatabase>,
+    source_file: &SyntaxNode,
+    reference: FileReference,
+) -> Option<(ast::PathSegment, SyntaxNode, hir::Module)> {
     let offset = reference.range.start();
-    let (segment, expr) = if let Some(path_expr) =
-        find_node_at_offset::<ast::PathExpr>(source_file.syntax(), offset)
-    {
+    if let Some(path_expr) = find_node_at_offset::<ast::PathExpr>(source_file, offset) {
         // tuple variant
-        (path_expr.path()?.segment()?, path_expr.syntax().parent()?)
-    } else if let Some(record_expr) =
-        find_node_at_offset::<ast::RecordExpr>(source_file.syntax(), offset)
-    {
+        Some((path_expr.path()?.segment()?, path_expr.syntax().parent()?))
+    } else if let Some(record_expr) = find_node_at_offset::<ast::RecordExpr>(source_file, offset) {
         // record variant
-        (record_expr.path()?.segment()?, record_expr.syntax().clone())
+        Some((record_expr.path()?.segment()?, record_expr.syntax().clone()))
     } else {
-        return None;
-    };
-
-    let module = ctx.sema.scope(&expr).module()?;
-    if !visited_modules_set.contains(&module) {
-        if insert_import(ctx, rewriter, &expr, &module, enum_module_def, variant_hir_name).is_some()
-        {
-            visited_modules_set.insert(module);
-        }
+        None
     }
-    rewriter.insert_after(segment.syntax(), &make::token(T!['(']));
-    rewriter.insert_after(segment.syntax(), segment.syntax());
-    rewriter.insert_after(&expr, &make::token(T![')']));
-    Some(())
+    .and_then(|(segment, expr)| {
+        let module = sema.scope(&expr).module()?;
+        Some((segment, expr, module))
+    })
 }
 
 #[cfg(test)]
@@ -345,7 +357,7 @@ fn another_fn() {
 
         pub struct MyField(pub u8, pub u8);
 
-        pub enum MyEnum {
+pub enum MyEnum {
             MyField(MyField),
         }
     }
index 1a95135ca1dca8982bf19eff04f0f96c022f0eea..e90bbdbcf2678ca87f818ab772bda0dc7dc1028c 100644 (file)
@@ -83,11 +83,9 @@ fn replace<T: AstNode + PartialEq>(
     fields: impl Iterator<Item = T>,
     sorted_fields: impl IntoIterator<Item = T>,
 ) {
-    fields.zip(sorted_fields).filter(|(field, sorted)| field != sorted).for_each(
-        |(field, sorted_field)| {
-            ted::replace(field.syntax(), sorted_field.syntax().clone_for_update());
-        },
-    );
+    fields.zip(sorted_fields).for_each(|(field, sorted_field)| {
+        ted::replace(field.syntax(), sorted_field.syntax().clone_for_update())
+    });
 }
 
 fn compute_fields_ranks(path: &ast::Path, ctx: &AssistContext) -> Option<FxHashMap<String, usize>> {
index f976e73adc30b9d95c545104e7ec10e7068a8969..72d8892481c87b77306b0a4398b97b9499ada0cd 100644 (file)
@@ -4,9 +4,8 @@
 use hir::{PathResolution, Semantics};
 use ide_db::RootDatabase;
 use syntax::{
-    algo,
     ast::{self, NameOwner},
-    AstNode,
+    ted, AstNode,
 };
 
 use crate::{AssistContext, AssistId, AssistKind, Assists};
@@ -75,13 +74,16 @@ pub(crate) fn reorder_impl(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
     }
 
     let target = items.syntax().text_range();
-    acc.add(AssistId("reorder_impl", AssistKind::RefactorRewrite), "Sort methods", target, |edit| {
-        let mut rewriter = algo::SyntaxRewriter::default();
-        for (old, new) in methods.iter().zip(&sorted) {
-            rewriter.replace(old.syntax(), new.syntax());
-        }
-        edit.rewrite(rewriter);
-    })
+    acc.add(
+        AssistId("reorder_impl", AssistKind::RefactorRewrite),
+        "Sort methods",
+        target,
+        |builder| {
+            methods.into_iter().zip(sorted).for_each(|(old, new)| {
+                ted::replace(builder.make_ast_mut(old).syntax(), new.clone_for_update().syntax())
+            });
+        },
+    )
 }
 
 fn compute_method_ranks(path: &ast::Path, ctx: &AssistContext) -> Option<FxHashMap<String, usize>> {
index 36d2e0331ec5fd0867d971f1e8262e688619f858..99ba798606bbb9aff8b3c7e3e602857017747925 100644 (file)
@@ -1,5 +1,5 @@
 use ide_db::helpers::insert_use::{insert_use, ImportScope};
-use syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SyntaxNode};
+use syntax::{ast, match_ast, ted, AstNode, SyntaxNode};
 
 use crate::{AssistContext, AssistId, AssistKind, Assists};
 
@@ -31,7 +31,7 @@ pub(crate) fn replace_qualified_name_with_use(
     }
 
     let target = path.syntax().text_range();
-    let scope = ImportScope::find_insert_use_container(path.syntax(), &ctx.sema)?;
+    let scope = ImportScope::find_insert_use_container_with_macros(path.syntax(), &ctx.sema)?;
     let syntax = scope.as_syntax_node();
     acc.add(
         AssistId("replace_qualified_name_with_use", AssistKind::RefactorRewrite),
@@ -40,18 +40,17 @@ pub(crate) fn replace_qualified_name_with_use(
         |builder| {
             // Now that we've brought the name into scope, re-qualify all paths that could be
             // affected (that is, all paths inside the node we added the `use` to).
-            let mut rewriter = SyntaxRewriter::default();
-            shorten_paths(&mut rewriter, syntax.clone(), &path);
+            let syntax = builder.make_mut(syntax.clone());
             if let Some(ref import_scope) = ImportScope::from(syntax.clone()) {
-                rewriter += insert_use(import_scope, path, ctx.config.insert_use);
-                builder.rewrite(rewriter);
+                shorten_paths(&syntax, &path.clone_for_update());
+                insert_use(import_scope, path, ctx.config.insert_use);
             }
         },
     )
 }
 
 /// Adds replacements to `re` that shorten `path` in all descendants of `node`.
-fn shorten_paths(rewriter: &mut SyntaxRewriter<'static>, node: SyntaxNode, path: &ast::Path) {
+fn shorten_paths(node: &SyntaxNode, path: &ast::Path) {
     for child in node.children() {
         match_ast! {
             match child {
@@ -60,34 +59,26 @@ fn shorten_paths(rewriter: &mut SyntaxRewriter<'static>, node: SyntaxNode, path:
                 ast::Use(_it) => continue,
                 // Don't descend into submodules, they don't have the same `use` items in scope.
                 ast::Module(_it) => continue,
-
-                ast::Path(p) => {
-                    match maybe_replace_path(rewriter, p.clone(), path.clone()) {
-                        Some(()) => {},
-                        None => shorten_paths(rewriter, p.syntax().clone(), path),
-                    }
+                ast::Path(p) => if maybe_replace_path(p.clone(), path.clone()).is_none() {
+                    shorten_paths(p.syntax(), path);
                 },
-                _ => shorten_paths(rewriter, child, path),
+                _ => shorten_paths(&child, path),
             }
         }
     }
 }
 
-fn maybe_replace_path(
-    rewriter: &mut SyntaxRewriter<'static>,
-    path: ast::Path,
-    target: ast::Path,
-) -> Option<()> {
+fn maybe_replace_path(path: ast::Path, target: ast::Path) -> Option<()> {
     if !path_eq(path.clone(), target) {
         return None;
     }
 
     // Shorten `path`, leaving only its last segment.
     if let Some(parent) = path.qualifier() {
-        rewriter.delete(parent.syntax());
+        ted::remove(parent.syntax());
     }
     if let Some(double_colon) = path.coloncolon_token() {
-        rewriter.delete(&double_colon);
+        ted::remove(&double_colon);
     }
 
     Some(())
@@ -150,6 +141,7 @@ fn test_replace_add_use_no_anchor() {
     ",
         );
     }
+
     #[test]
     fn test_replace_add_use_no_anchor_with_item_below() {
         check_assist(
index d67524937e60b6f85e6634a656139385dda6c053..5a90ad715b16d29f85bce9b5c16bf82e8a5c9c80 100644 (file)
@@ -140,7 +140,8 @@ pub fn add_trait_assoc_items_to_impl(
 
     let items = items
         .into_iter()
-        .map(|it| ast_transform::apply(&*ast_transform, it))
+        .map(|it| it.clone_for_update())
+        .inspect(|it| ast_transform::apply(&*ast_transform, it))
         .map(|it| match it {
             ast::AssocItem::Fn(def) => ast::AssocItem::Fn(add_body(def)),
             ast::AssocItem::TypeAlias(def) => ast::AssocItem::TypeAlias(def.remove_bounds()),
index 8e211ae1ed41e25758fe5f7f971b90b86809282f..9d5b61562a083618523b8147eb88bec8a758963a 100644 (file)
@@ -132,7 +132,7 @@ pub(crate) fn import_on_the_fly(acc: &mut Completions, ctx: &CompletionContext)
 
     let user_input_lowercased = potential_import_name.to_lowercase();
     let import_assets = import_assets(ctx, potential_import_name)?;
-    let import_scope = ImportScope::find_insert_use_container(
+    let import_scope = ImportScope::find_insert_use_container_with_macros(
         position_for_import(ctx, Some(import_assets.import_candidate()))?,
         &ctx.sema,
     )?;
index 16991b6880da1a11de12d2b2982cc521f2edd8e6..99edb94992020e2914582d3ac4d52a66c0be5dd4 100644 (file)
@@ -377,11 +377,11 @@ impl ImportEdit {
     pub fn to_text_edit(&self, cfg: InsertUseConfig) -> Option<TextEdit> {
         let _p = profile::span("ImportEdit::to_text_edit");
 
-        let rewriter =
-            insert_use::insert_use(&self.scope, mod_path_to_ast(&self.import.import_path), cfg);
-        let old_ast = rewriter.rewrite_root()?;
+        let new_ast = self.scope.clone_for_update();
+        insert_use::insert_use(&new_ast, mod_path_to_ast(&self.import.import_path), cfg);
         let mut import_insert = TextEdit::builder();
-        algo::diff(&old_ast, &rewriter.rewrite(&old_ast)).into_text_edit(&mut import_insert);
+        algo::diff(self.scope.as_syntax_node(), new_ast.as_syntax_node())
+            .into_text_edit(&mut import_insert);
 
         Some(import_insert.finish())
     }
index 6f3d5c5c57fb8d4cc3c8a6e71075b2d163f8303d..e32633565400659246b697853ffd3f157ae0653d 100644 (file)
@@ -179,7 +179,7 @@ pub fn resolve_completion_edits(
 ) -> Option<Vec<TextEdit>> {
     let ctx = CompletionContext::new(db, position, config)?;
     let position_for_import = position_for_import(&ctx, None)?;
-    let scope = ImportScope::find_insert_use_container(position_for_import, &ctx.sema)?;
+    let scope = ImportScope::find_insert_use_container_with_macros(position_for_import, &ctx.sema)?;
 
     let current_module = ctx.sema.scope(position_for_import).module()?;
     let current_crate = current_module.krate();
index be3a22725ab1d6d0bce229f189e2bb695fafebfc..a43504a275817cf7323c751b3fad86abbe151a06 100644 (file)
@@ -4,13 +4,9 @@
 use hir::Semantics;
 use itertools::{EitherOrBoth, Itertools};
 use syntax::{
-    algo::SyntaxRewriter,
-    ast::{
-        self,
-        edit::{AstNodeEdit, IndentLevel},
-        make, AstNode, AttrsOwner, PathSegmentKind, VisibilityOwner,
-    },
-    AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
+    algo,
+    ast::{self, edit::AstNodeEdit, make, AstNode, AttrsOwner, PathSegmentKind, VisibilityOwner},
+    ted, AstToken, Direction, NodeOrToken, SyntaxNode, SyntaxToken,
 };
 
 use crate::RootDatabase;
@@ -42,13 +38,18 @@ pub fn from(syntax: SyntaxNode) -> Option<Self> {
     }
 
     /// Determines the containing syntax node in which to insert a `use` statement affecting `position`.
-    pub fn find_insert_use_container(
+    pub fn find_insert_use_container_with_macros(
         position: &SyntaxNode,
         sema: &Semantics<'_, RootDatabase>,
     ) -> Option<Self> {
         sema.ancestors_with_macros(position.clone()).find_map(Self::from)
     }
 
+    /// Determines the containing syntax node in which to insert a `use` statement affecting `position`.
+    pub fn find_insert_use_container(position: &SyntaxNode) -> Option<Self> {
+        std::iter::successors(Some(position.clone()), SyntaxNode::parent).find_map(Self::from)
+    }
+
     pub fn as_syntax_node(&self) -> &SyntaxNode {
         match self {
             ImportScope::File(file) => file.syntax(),
@@ -56,127 +57,32 @@ pub fn as_syntax_node(&self) -> &SyntaxNode {
         }
     }
 
-    fn indent_level(&self) -> IndentLevel {
-        match self {
-            ImportScope::File(file) => file.indent_level(),
-            ImportScope::Module(item_list) => item_list.indent_level() + 1,
-        }
-    }
-
-    fn first_insert_pos(&self) -> (InsertPosition<SyntaxElement>, AddBlankLine) {
+    pub fn clone_for_update(&self) -> Self {
         match self {
-            ImportScope::File(_) => (InsertPosition::First, AddBlankLine::AfterTwice),
-            // don't insert the imports before the item list's opening curly brace
-            ImportScope::Module(item_list) => item_list
-                .l_curly_token()
-                .map(|b| (InsertPosition::After(b.into()), AddBlankLine::Around))
-                .unwrap_or((InsertPosition::First, AddBlankLine::AfterTwice)),
+            ImportScope::File(file) => ImportScope::File(file.clone_for_update()),
+            ImportScope::Module(item_list) => ImportScope::Module(item_list.clone_for_update()),
         }
     }
-
-    fn insert_pos_after_last_inner_element(&self) -> (InsertPosition<SyntaxElement>, AddBlankLine) {
-        self.as_syntax_node()
-            .children_with_tokens()
-            .filter(|child| match child {
-                NodeOrToken::Node(node) => is_inner_attribute(node.clone()),
-                NodeOrToken::Token(token) => is_inner_comment(token.clone()),
-            })
-            .last()
-            .map(|last_inner_element| {
-                (InsertPosition::After(last_inner_element), AddBlankLine::BeforeTwice)
-            })
-            .unwrap_or_else(|| self.first_insert_pos())
-    }
-}
-
-fn is_inner_attribute(node: SyntaxNode) -> bool {
-    ast::Attr::cast(node).map(|attr| attr.kind()) == Some(ast::AttrKind::Inner)
-}
-
-fn is_inner_comment(token: SyntaxToken) -> bool {
-    ast::Comment::cast(token).and_then(|comment| comment.kind().doc)
-        == Some(ast::CommentPlacement::Inner)
 }
 
 /// Insert an import path into the given file/node. A `merge` value of none indicates that no import merging is allowed to occur.
-pub fn insert_use<'a>(
-    scope: &ImportScope,
-    path: ast::Path,
-    cfg: InsertUseConfig,
-) -> SyntaxRewriter<'a> {
+pub fn insert_use<'a>(scope: &ImportScope, path: ast::Path, cfg: InsertUseConfig) {
     let _p = profile::span("insert_use");
-    let mut rewriter = SyntaxRewriter::default();
-    let use_item = make::use_(None, make::use_tree(path.clone(), None, None, false));
+    let use_item =
+        make::use_(None, make::use_tree(path.clone(), None, None, false)).clone_for_update();
     // merge into existing imports if possible
     if let Some(mb) = cfg.merge {
         for existing_use in scope.as_syntax_node().children().filter_map(ast::Use::cast) {
             if let Some(merged) = try_merge_imports(&existing_use, &use_item, mb) {
-                rewriter.replace(existing_use.syntax(), merged.syntax());
-                return rewriter;
+                ted::replace(existing_use.syntax(), merged.syntax());
+                return;
             }
         }
     }
 
     // either we weren't allowed to merge or there is no import that fits the merge conditions
     // so look for the place we have to insert to
-    let (insert_position, add_blank) = find_insert_position(scope, path, cfg.group);
-
-    let indent = if let ident_level @ 1..=usize::MAX = scope.indent_level().0 as usize {
-        Some(make::tokens::whitespace(&" ".repeat(4 * ident_level)).into())
-    } else {
-        None
-    };
-
-    let to_insert: Vec<SyntaxElement> = {
-        let mut buf = Vec::new();
-
-        match add_blank {
-            AddBlankLine::Before | AddBlankLine::Around => {
-                buf.push(make::tokens::single_newline().into())
-            }
-            AddBlankLine::BeforeTwice => buf.push(make::tokens::blank_line().into()),
-            _ => (),
-        }
-
-        if add_blank.has_before() {
-            if let Some(indent) = indent.clone() {
-                cov_mark::hit!(insert_use_indent_before);
-                buf.push(indent);
-            }
-        }
-
-        buf.push(use_item.syntax().clone().into());
-
-        match add_blank {
-            AddBlankLine::After | AddBlankLine::Around => {
-                buf.push(make::tokens::single_newline().into())
-            }
-            AddBlankLine::AfterTwice => buf.push(make::tokens::blank_line().into()),
-            _ => (),
-        }
-
-        // only add indentation *after* our stuff if there's another node directly after it
-        if add_blank.has_after() && matches!(insert_position, InsertPosition::Before(_)) {
-            if let Some(indent) = indent {
-                cov_mark::hit!(insert_use_indent_after);
-                buf.push(indent);
-            }
-        } else if add_blank.has_after() && matches!(insert_position, InsertPosition::After(_)) {
-            cov_mark::hit!(insert_use_no_indent_after);
-        }
-
-        buf
-    };
-
-    match insert_position {
-        InsertPosition::First => {
-            rewriter.insert_many_as_first_children(scope.as_syntax_node(), to_insert)
-        }
-        InsertPosition::Last => return rewriter, // actually unreachable
-        InsertPosition::Before(anchor) => rewriter.insert_many_before(&anchor, to_insert),
-        InsertPosition::After(anchor) => rewriter.insert_many_after(&anchor, to_insert),
-    }
-    rewriter
+    insert_use_(scope, path, cfg.group, use_item);
 }
 
 fn eq_visibility(vis0: Option<ast::Visibility>, vis1: Option<ast::Visibility>) -> bool {
@@ -235,7 +141,7 @@ pub fn try_merge_trees(
     } else {
         (lhs.split_prefix(&lhs_prefix), rhs.split_prefix(&rhs_prefix))
     };
-    recursive_merge(&lhs, &rhs, merge).map(|it| it.clone_for_update())
+    recursive_merge(&lhs, &rhs, merge)
 }
 
 /// Recursively "zips" together lhs and rhs.
@@ -334,7 +240,12 @@ fn recursive_merge(
             }
         }
     }
-    Some(lhs.with_use_tree_list(make::use_tree_list(use_trees)))
+
+    Some(if let Some(old) = lhs.use_tree_list() {
+        lhs.replace_descendant(old, make::use_tree_list(use_trees)).clone_for_update()
+    } else {
+        lhs.clone()
+    })
 }
 
 /// Traverses both paths until they differ, returning the common prefix of both.
@@ -520,32 +431,15 @@ fn new(path: &ast::Path) -> ImportGroup {
     }
 }
 
-#[derive(PartialEq, Eq)]
-enum AddBlankLine {
-    Before,
-    BeforeTwice,
-    Around,
-    After,
-    AfterTwice,
-}
-
-impl AddBlankLine {
-    fn has_before(&self) -> bool {
-        matches!(self, AddBlankLine::Before | AddBlankLine::BeforeTwice | AddBlankLine::Around)
-    }
-    fn has_after(&self) -> bool {
-        matches!(self, AddBlankLine::After | AddBlankLine::AfterTwice | AddBlankLine::Around)
-    }
-}
-
-fn find_insert_position(
+fn insert_use_(
     scope: &ImportScope,
     insert_path: ast::Path,
     group_imports: bool,
-) -> (InsertPosition<SyntaxElement>, AddBlankLine) {
+    use_item: ast::Use,
+) {
+    let scope_syntax = scope.as_syntax_node();
     let group = ImportGroup::new(&insert_path);
-    let path_node_iter = scope
-        .as_syntax_node()
+    let path_node_iter = scope_syntax
         .children()
         .filter_map(|node| ast::Use::cast(node.clone()).zip(Some(node)))
         .flat_map(|(use_, node)| {
@@ -557,9 +451,14 @@ fn find_insert_position(
 
     if !group_imports {
         if let Some((_, _, node)) = path_node_iter.last() {
-            return (InsertPosition::After(node.into()), AddBlankLine::Before);
+            cov_mark::hit!(insert_no_grouping_last);
+            ted::insert(ted::Position::after(node), use_item.syntax());
+        } else {
+            cov_mark::hit!(insert_no_grouping_last2);
+            ted::insert(ted::Position::first_child_of(scope_syntax), make::tokens::blank_line());
+            ted::insert(ted::Position::first_child_of(scope_syntax), use_item.syntax());
         }
-        return (InsertPosition::First, AddBlankLine::AfterTwice);
+        return;
     }
 
     // Iterator that discards anything thats not in the required grouping
@@ -572,43 +471,91 @@ fn find_insert_position(
     // track the last element we iterated over, if this is still None after the iteration then that means we never iterated in the first place
     let mut last = None;
     // find the element that would come directly after our new import
-    let post_insert = group_iter.inspect(|(.., node)| last = Some(node.clone())).find(
-        |&(ref path, has_tl, _)| {
+    let post_insert: Option<(_, _, SyntaxNode)> = group_iter
+        .inspect(|(.., node)| last = Some(node.clone()))
+        .find(|&(ref path, has_tl, _)| {
             use_tree_path_cmp(&insert_path, false, path, has_tl) != Ordering::Greater
-        },
-    );
+        });
 
-    match post_insert {
+    if let Some((.., node)) = post_insert {
+        cov_mark::hit!(insert_group);
         // insert our import before that element
-        Some((.., node)) => (InsertPosition::Before(node.into()), AddBlankLine::After),
+        return ted::insert(ted::Position::before(node), use_item.syntax());
+    }
+    if let Some(node) = last {
+        cov_mark::hit!(insert_group_last);
         // there is no element after our new import, so append it to the end of the group
-        None => match last {
-            Some(node) => (InsertPosition::After(node.into()), AddBlankLine::Before),
-            // the group we were looking for actually doesnt exist, so insert
+        return ted::insert(ted::Position::after(node), use_item.syntax());
+    }
+
+    // the group we were looking for actually doesn't exist, so insert
+
+    let mut last = None;
+    // find the group that comes after where we want to insert
+    let post_group = path_node_iter
+        .inspect(|(.., node)| last = Some(node.clone()))
+        .find(|(p, ..)| ImportGroup::new(p) > group);
+    if let Some((.., node)) = post_group {
+        cov_mark::hit!(insert_group_new_group);
+        ted::insert(ted::Position::before(&node), use_item.syntax());
+        if let Some(node) = algo::non_trivia_sibling(node.into(), Direction::Prev) {
+            ted::insert(ted::Position::after(node), make::tokens::single_newline());
+        }
+        return;
+    }
+    // there is no such group, so append after the last one
+    if let Some(node) = last {
+        cov_mark::hit!(insert_group_no_group);
+        ted::insert(ted::Position::after(&node), use_item.syntax());
+        ted::insert(ted::Position::after(node), make::tokens::single_newline());
+        return;
+    }
+    // there are no imports in this file at all
+    if let Some(last_inner_element) = scope_syntax
+        .children_with_tokens()
+        .filter(|child| match child {
+            NodeOrToken::Node(node) => is_inner_attribute(node.clone()),
+            NodeOrToken::Token(token) => is_inner_comment(token.clone()),
+        })
+        .last()
+    {
+        cov_mark::hit!(insert_group_empty_inner_attr);
+        ted::insert(ted::Position::after(&last_inner_element), use_item.syntax());
+        ted::insert(ted::Position::after(last_inner_element), make::tokens::single_newline());
+        return;
+    }
+    match scope {
+        ImportScope::File(_) => {
+            cov_mark::hit!(insert_group_empty_file);
+            ted::insert(ted::Position::first_child_of(scope_syntax), make::tokens::blank_line());
+            ted::insert(ted::Position::first_child_of(scope_syntax), use_item.syntax())
+        }
+        // don't insert the imports before the item list's opening curly brace
+        ImportScope::Module(item_list) => match item_list.l_curly_token() {
+            Some(b) => {
+                cov_mark::hit!(insert_group_empty_module);
+                ted::insert(ted::Position::after(&b), make::tokens::single_newline());
+                ted::insert(ted::Position::after(&b), use_item.syntax());
+            }
             None => {
-                // similar concept here to the `last` from above
-                let mut last = None;
-                // find the group that comes after where we want to insert
-                let post_group = path_node_iter
-                    .inspect(|(.., node)| last = Some(node.clone()))
-                    .find(|(p, ..)| ImportGroup::new(p) > group);
-                match post_group {
-                    Some((.., node)) => {
-                        (InsertPosition::Before(node.into()), AddBlankLine::AfterTwice)
-                    }
-                    // there is no such group, so append after the last one
-                    None => match last {
-                        Some(node) => {
-                            (InsertPosition::After(node.into()), AddBlankLine::BeforeTwice)
-                        }
-                        // there are no imports in this file at all
-                        None => scope.insert_pos_after_last_inner_element(),
-                    },
-                }
+                // This should never happens, broken module syntax node
+                ted::insert(
+                    ted::Position::first_child_of(scope_syntax),
+                    make::tokens::blank_line(),
+                );
+                ted::insert(ted::Position::first_child_of(scope_syntax), use_item.syntax());
             }
         },
     }
 }
 
+fn is_inner_attribute(node: SyntaxNode) -> bool {
+    ast::Attr::cast(node).map(|attr| attr.kind()) == Some(ast::AttrKind::Inner)
+}
+
+fn is_inner_comment(token: SyntaxToken) -> bool {
+    ast::Comment::cast(token).and_then(|comment| comment.kind().doc)
+        == Some(ast::CommentPlacement::Inner)
+}
 #[cfg(test)]
 mod tests;
index 3d151e629d29f9ded330fe85d46f24175ecf26c2..048c213e229ef76d0509c3232e2e2fc2f1ae7ebe 100644 (file)
@@ -5,6 +5,7 @@
 
 #[test]
 fn insert_not_group() {
+    cov_mark::check!(insert_no_grouping_last);
     check(
         "use external_crate2::bar::A",
         r"
@@ -26,6 +27,21 @@ fn insert_not_group() {
     );
 }
 
+#[test]
+fn insert_not_group_empty() {
+    cov_mark::check!(insert_no_grouping_last2);
+    check(
+        "use external_crate2::bar::A",
+        r"",
+        r"use external_crate2::bar::A;
+
+",
+        None,
+        false,
+        false,
+    );
+}
+
 #[test]
 fn insert_existing() {
     check_full("std::fs", "use std::fs;", "use std::fs;")
@@ -51,21 +67,21 @@ fn insert_start() {
 
 #[test]
 fn insert_start_indent() {
-    cov_mark::check!(insert_use_indent_after);
     check_none(
         "std::bar::AA",
         r"
     use std::bar::B;
-    use std::bar::D;",
+    use std::bar::C;",
         r"
     use std::bar::AA;
     use std::bar::B;
-    use std::bar::D;",
-    )
+    use std::bar::C;",
+    );
 }
 
 #[test]
 fn insert_middle() {
+    cov_mark::check!(insert_group);
     check_none(
         "std::bar::EE",
         r"
@@ -102,6 +118,7 @@ fn insert_middle_indent() {
 
 #[test]
 fn insert_end() {
+    cov_mark::check!(insert_group_last);
     check_none(
         "std::bar::ZZ",
         r"
@@ -120,7 +137,6 @@ fn insert_end() {
 
 #[test]
 fn insert_end_indent() {
-    cov_mark::check!(insert_use_indent_before);
     check_none(
         "std::bar::ZZ",
         r"
@@ -201,6 +217,7 @@ fn insert_first_matching_group() {
 
 #[test]
 fn insert_missing_group_std() {
+    cov_mark::check!(insert_group_new_group);
     check_none(
         "std::fmt",
         r"
@@ -216,6 +233,7 @@ fn insert_missing_group_std() {
 
 #[test]
 fn insert_missing_group_self() {
+    cov_mark::check!(insert_group_no_group);
     check_none(
         "self::fmt",
         r"
@@ -242,6 +260,7 @@ fn main() {}",
 
 #[test]
 fn insert_empty_file() {
+    cov_mark::check!(insert_group_empty_file);
     // empty files will get two trailing newlines
     // this is due to the test case insert_no_imports above
     check_full(
@@ -255,7 +274,7 @@ fn insert_empty_file() {
 
 #[test]
 fn insert_empty_module() {
-    cov_mark::check!(insert_use_no_indent_after);
+    cov_mark::check!(insert_group_empty_module);
     check(
         "foo::bar",
         "mod x {}",
@@ -270,6 +289,7 @@ fn insert_empty_module() {
 
 #[test]
 fn insert_after_inner_attr() {
+    cov_mark::check!(insert_group_empty_inner_attr);
     check_full(
         "foo::bar",
         r"#![allow(unused_imports)]",
@@ -615,7 +635,7 @@ fn check(
     if module {
         syntax = syntax.descendants().find_map(ast::Module::cast).unwrap().syntax().clone();
     }
-    let file = super::ImportScope::from(syntax).unwrap();
+    let file = super::ImportScope::from(syntax.clone_for_update()).unwrap();
     let path = ast::SourceFile::parse(&format!("use {};", path))
         .tree()
         .syntax()
@@ -623,12 +643,8 @@ fn check(
         .find_map(ast::Path::cast)
         .unwrap();
 
-    let rewriter = insert_use(
-        &file,
-        path,
-        InsertUseConfig { merge: mb, prefix_kind: PrefixKind::Plain, group },
-    );
-    let result = rewriter.rewrite(file.as_syntax_node()).to_string();
+    insert_use(&file, path, InsertUseConfig { merge: mb, prefix_kind: PrefixKind::Plain, group });
+    let result = file.as_syntax_node().to_string();
     assert_eq_text!(ra_fixture_after, &result);
 }
 
index a153a9e1c38b79d0f66c6b4e1e3590385758d74a..c9229c4e07dabfa7e943740bdf3e18355c1bef22 100644 (file)
@@ -342,10 +342,10 @@ enum InsertPos {
 
 #[derive(Default)]
 pub struct SyntaxRewriter<'a> {
-    f: Option<Box<dyn Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a>>,
     //FIXME: add debug_assertions that all elements are in fact from the same file.
     replacements: FxHashMap<SyntaxElement, Replacement>,
     insertions: IndexMap<InsertPos, Vec<SyntaxElement>>,
+    _pd: std::marker::PhantomData<&'a ()>,
 }
 
 impl fmt::Debug for SyntaxRewriter<'_> {
@@ -357,14 +357,7 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
     }
 }
 
-impl<'a> SyntaxRewriter<'a> {
-    pub fn from_fn(f: impl Fn(&SyntaxElement) -> Option<SyntaxElement> + 'a) -> SyntaxRewriter<'a> {
-        SyntaxRewriter {
-            f: Some(Box::new(f)),
-            replacements: FxHashMap::default(),
-            insertions: IndexMap::default(),
-        }
-    }
+impl SyntaxRewriter<'_> {
     pub fn delete<T: Clone + Into<SyntaxElement>>(&mut self, what: &T) {
         let what = what.clone().into();
         let replacement = Replacement::Delete;
@@ -470,7 +463,7 @@ pub fn replace_ast<T: AstNode>(&mut self, what: &T, with: &T) {
     pub fn rewrite(&self, node: &SyntaxNode) -> SyntaxNode {
         let _p = profile::span("rewrite");
 
-        if self.f.is_none() && self.replacements.is_empty() && self.insertions.is_empty() {
+        if self.replacements.is_empty() && self.insertions.is_empty() {
             return node.clone();
         }
         let green = self.rewrite_children(node);
@@ -495,7 +488,6 @@ fn element_to_node_or_parent(element: &SyntaxElement) -> Option<SyntaxNode> {
             }
         }
 
-        assert!(self.f.is_none());
         self.replacements
             .keys()
             .filter_map(element_to_node_or_parent)
@@ -510,10 +502,6 @@ fn element_to_node_or_parent(element: &SyntaxElement) -> Option<SyntaxNode> {
     }
 
     fn replacement(&self, element: &SyntaxElement) -> Option<Replacement> {
-        if let Some(f) = &self.f {
-            assert!(self.replacements.is_empty());
-            return f(element).map(Replacement::Single);
-        }
         self.replacements.get(element).cloned()
     }
 
@@ -574,7 +562,6 @@ fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, row
 
 impl ops::AddAssign for SyntaxRewriter<'_> {
     fn add_assign(&mut self, rhs: SyntaxRewriter) {
-        assert!(rhs.f.is_none());
         self.replacements.extend(rhs.replacements);
         for (pos, insertions) in rhs.insertions.into_iter() {
             match self.insertions.entry(pos) {
index 222b7e212b375f1b368231c918d3756f1ff6fef8..42da0960615c4dadee55b34248ce56b775f9457f 100644 (file)
@@ -632,6 +632,7 @@ pub fn blank_line() -> SyntaxToken {
         SOURCE_FILE
             .tree()
             .syntax()
+            .clone_for_update()
             .descendants_with_tokens()
             .filter_map(|it| it.into_token())
             .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n")
index 450f2e447ab7ee5b86d8d296560c183bcae931ab..91a06101f5984548d1558b94e1b17adc6185067f 100644 (file)
@@ -7,7 +7,7 @@
 use parser::T;
 
 use crate::{
-    ast::{edit::IndentLevel, make},
+    ast::{self, edit::IndentLevel, make, AstNode},
     SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
 };
 
@@ -147,6 +147,16 @@ pub fn append_child_raw(node: &(impl Into<SyntaxNode> + Clone), child: impl Elem
 fn ws_before(position: &Position, new: &SyntaxElement) -> Option<SyntaxToken> {
     let prev = match &position.repr {
         PositionRepr::FirstChild(_) => return None,
+        PositionRepr::After(it) if it.kind() == SyntaxKind::L_CURLY => {
+            if new.kind() == SyntaxKind::USE {
+                if let Some(item_list) = it.parent().and_then(ast::ItemList::cast) {
+                    let mut indent = IndentLevel::from_element(&item_list.syntax().clone().into());
+                    indent.0 += 1;
+                    return Some(make::tokens::whitespace(&format!("\n{}", indent)));
+                }
+            }
+            it
+        }
         PositionRepr::After(it) => it,
     };
     ws_between(prev, new)
@@ -173,7 +183,10 @@ fn ws_between(left: &SyntaxElement, right: &SyntaxElement) -> Option<SyntaxToken
     }
 
     if right.kind() == SyntaxKind::USE {
-        let indent = IndentLevel::from_element(left);
+        let mut indent = IndentLevel::from_element(left);
+        if left.kind() == SyntaxKind::USE {
+            indent.0 = IndentLevel::from_element(right).0.max(indent.0);
+        }
         return Some(make::tokens::whitespace(&format!("\n{}", indent)));
     }
     Some(make::tokens::single_space())