Some((version, url)) => {
(version, CrateOrigin::CratesIo { repo: Some(url.to_owned()), name: None })
}
- _ => panic!("Bad crates.io parameter: {}", data),
+ _ => panic!("Bad crates.io parameter: {data}"),
},
- _ => panic!("Bad string for crate origin: {}", b),
+ _ => panic!("Bad string for crate origin: {b}"),
};
(a.to_owned(), origin, Some(version.to_string()))
} else {
introduce_new_source_root: f.introduce_new_source_root.map(|kind| match &*kind {
"local" => SourceRootKind::Local,
"library" => SourceRootKind::Library,
- invalid => panic!("invalid source root kind '{}'", invalid),
+ invalid => panic!("invalid source root kind '{invalid}'"),
}),
target_data_layout: f.target_data_layout,
}
impl fmt::Display for CyclicDependenciesError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let render = |(id, name): &(CrateId, Option<CrateDisplayName>)| match name {
- Some(it) => format!("{}({:?})", it, id),
- None => format!("{:?}", id),
+ Some(it) => format!("{it}({id:?})"),
+ None => format!("{id:?}"),
};
let path = self.path.iter().rev().map(render).collect::<Vec<String>>().join(" -> ");
write!(
}
fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
- let _p = profile::span("parse_query").detail(|| format!("{:?}", file_id));
+ let _p = profile::span("parse_query").detail(|| format!("{file_id:?}"));
let text = db.file_text(file_id);
SourceFile::parse(&text)
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
CfgAtom::Flag(name) => name.fmt(f),
- CfgAtom::KeyValue { key, value } => write!(f, "{} = {:?}", key, value),
+ CfgAtom::KeyValue { key, value } => write!(f, "{key} = {value:?}"),
}
}
}
.iter()
.map(|atom| match atom {
CfgAtom::Flag(it) => it.to_string(),
- CfgAtom::KeyValue { key, value } => format!("{}={}", key, value),
+ CfgAtom::KeyValue { key, value } => format!("{key}={value}"),
})
.collect::<Vec<_>>();
items.sort();
atom.fmt(f)?;
}
let is_are = if self.enabled.len() == 1 { "is" } else { "are" };
- write!(f, " {} enabled", is_are)?;
+ write!(f, " {is_are} enabled")?;
if !self.disabled.is_empty() {
f.write_str(" and ")?;
atom.fmt(f)?;
}
let is_are = if self.disabled.len() == 1 { "is" } else { "are" };
- write!(f, " {} disabled", is_are)?;
+ write!(f, " {is_are} disabled")?;
}
Ok(())
impl fmt::Display for FlycheckConfig {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
- FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {}", command),
+ FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {command}"),
FlycheckConfig::CustomCommand { command, args, .. } => {
- write!(f, "{} {}", command, args.join(" "))
+ write!(f, "{command} {}", args.join(" "))
}
}
}
);
match output {
Ok(_) => Ok((read_at_least_one_message, error)),
- Err(e) => Err(io::Error::new(e.kind(), format!("{:?}: {}", e, error))),
+ Err(e) => Err(io::Error::new(e.kind(), format!("{e:?}: {error}"))),
}
}
}
self.source
.get(ast_idx)
.map(|it| InFile::new(file_id, it))
- .unwrap_or_else(|| panic!("cannot find attr at index {:?}", id))
+ .unwrap_or_else(|| panic!("cannot find attr at index {id:?}"))
}
}
Some(name) => name.to_string(),
None => "_".to_string(),
};
- format!("const {} = ", name)
+ format!("const {name} = ")
}
DefWithBodyId::VariantId(it) => {
needs_semi = false;
Some(name) => name.to_string(),
None => "_".to_string(),
};
- format!("{}", name)
+ format!("{name}")
}
};
fn check_found_path_(ra_fixture: &str, path: &str, prefix_kind: Option<PrefixKind>) {
let (db, pos) = TestDB::with_position(ra_fixture);
let module = db.module_at_position(pos);
- let parsed_path_file = syntax::SourceFile::parse(&format!("use {};", path));
+ let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};"));
let ast_path =
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap();
let found_path =
find_path_inner(&db, ItemInNs::Types(resolved), module, prefix_kind, false);
- assert_eq!(found_path, Some(mod_path), "{:?}", prefix_kind);
+ assert_eq!(found_path, Some(mod_path), "{prefix_kind:?}");
}
fn check_found_path(
ItemInNs::Values(_) => "v",
ItemInNs::Macros(_) => "m",
};
- format!("- {} ({})", info.path, ns)
+ format!("- {} ({ns})", info.path)
})
.collect();
krate: CrateId,
query: Query,
) -> FxHashSet<ItemInNs> {
- let _p = profile::span("search_dependencies").detail(|| format!("{:?}", query));
+ let _p = profile::span("search_dependencies").detail(|| format!("{query:?}"));
let graph = db.crate_graph();
let import_maps: Vec<_> =
None
}
})?;
- return Some(format!("{}::{}", dependency_imports.path_of(trait_)?, assoc_item_name));
+ return Some(format!("{}::{assoc_item_name}", dependency_imports.path_of(trait_)?));
}
None
}
let map = db.import_map(krate);
- Some(format!("{}:\n{:?}\n", name, map))
+ Some(format!("{name}:\n{map:?}\n"))
})
.sorted()
.collect::<String>();
impl ItemTree {
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
- let _p = profile::span("file_item_tree_query").detail(|| format!("{:?}", file_id));
+ let _p = profile::span("file_item_tree_query").detail(|| format!("{file_id:?}"));
let syntax = match db.parse_or_expand(file_id) {
Some(node) => node,
None => return Default::default(),
ctx.lower_macro_stmts(stmts)
},
_ => {
- panic!("cannot create item tree from {:?} {}", syntax, syntax);
+ panic!("cannot create item tree from {syntax:?} {syntax}");
},
}
};
if tree {
let tree = format!("{:#?}", parse.syntax_node())
.split_inclusive('\n')
- .map(|line| format!("// {}", line))
+ .map(|line| format!("// {line}"))
.collect::<String>();
format_to!(expn_text, "\n{}", tree)
}
for (name, child) in
map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0))
{
- let path = format!("{}::{}", path, name);
+ let path = format!("{path}::{name}");
buf.push('\n');
go(buf, map, &path, *child);
}
None => true,
Some(old_vis) => {
let max_vis = old_vis.max(vis, &self.def_map).unwrap_or_else(|| {
- panic!("`Tr as _` imports with unrelated visibilities {:?} and {:?} (trait {:?})", old_vis, vis, tr);
+ panic!("`Tr as _` imports with unrelated visibilities {old_vis:?} and {vis:?} (trait {tr:?})");
});
if max_vis == old_vis {
candidate_files.push(self.dir_path.join_attr(attr_path, self.root_non_dir_owner))
}
None if file_id.is_include_macro(db.upcast()) => {
- candidate_files.push(format!("{}.rs", name));
- candidate_files.push(format!("{}/mod.rs", name));
+ candidate_files.push(format!("{name}.rs"));
+ candidate_files.push(format!("{name}/mod.rs"));
}
None => {
- candidate_files.push(format!("{}{}.rs", self.dir_path.0, name));
- candidate_files.push(format!("{}{}/mod.rs", self.dir_path.0, name));
+ candidate_files.push(format!("{}{name}.rs", self.dir_path.0));
+ candidate_files.push(format!("{}{name}/mod.rs", self.dir_path.0));
}
};
let (dir_path, root_non_dir_owner) = if is_mod_rs || attr_path.is_some() {
(DirPath::empty(), false)
} else {
- (DirPath::new(format!("{}/", name)), true)
+ (DirPath::new(format!("{name}/")), true)
};
if let Some(mod_dir) = self.child(dir_path, root_non_dir_owner) {
return Ok((file_id, is_mod_rs, mod_dir));
} else {
attr
};
- let res = format!("{}{}", base, attr);
+ let res = format!("{base}{attr}");
res
}
}
) -> ResolvePathResult {
let graph = db.crate_graph();
let _cx = stdx::panic_context::enter(format!(
- "DefMap {:?} crate_name={:?} block={:?} path={}",
- self.krate, graph[self.krate].display_name, self.block, path
+ "DefMap {:?} crate_name={:?} block={:?} path={path}",
+ self.krate, graph[self.krate].display_name, self.block
));
let mut segments = path.segments().iter().enumerate();
let events = db.log_executed(|| {
db.crate_def_map(krate);
});
- assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
+ assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
}
db.set_file_text(pos.file_id, Arc::new(ra_fixture_change.to_string()));
let events = db.log_executed(|| {
db.crate_def_map(krate);
});
- assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
+ assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}")
}
}
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 1);
});
- assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
+ assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
}
db.set_file_text(pos.file_id, Arc::new("m!(Y);".to_string()));
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 1);
});
- assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events)
+ assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}")
}
}
pub(crate) fn print_generic_arg(arg: &GenericArg, buf: &mut dyn Write) -> fmt::Result {
match arg {
GenericArg::Type(ty) => print_type_ref(ty, buf),
- GenericArg::Const(c) => write!(buf, "{}", c),
+ GenericArg::Const(c) => write!(buf, "{c}"),
GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name),
}
}
Mutability::Shared => "*const",
Mutability::Mut => "*mut",
};
- write!(buf, "{} ", mtbl)?;
+ write!(buf, "{mtbl} ")?;
print_type_ref(pointee, buf)?;
}
TypeRef::Reference(pointee, lt, mtbl) => {
if let Some(lt) = lt {
write!(buf, "{} ", lt.name)?;
}
- write!(buf, "{}", mtbl)?;
+ write!(buf, "{mtbl}")?;
print_type_ref(pointee, buf)?;
}
TypeRef::Array(elem, len) => {
write!(buf, "[")?;
print_type_ref(elem, buf)?;
- write!(buf, "; {}]", len)?;
+ write!(buf, "; {len}]")?;
}
TypeRef::Slice(elem) => {
write!(buf, "[")?;
// be reported at the definition site (when we construct a def map).
Err(err) => {
return ExpandResult::only_err(ExpandError::Other(
- format!("invalid macro definition: {}", err).into(),
+ format!("invalid macro definition: {err}").into(),
))
}
};
Ok(Ok(db.intern_macro_call(loc)))
} else {
- panic!("called `expand_eager_macro` on non-eager macro def {:?}", def);
+ panic!("called `expand_eager_macro` on non-eager macro def {def:?}");
}
}
fixups.append,
);
- let actual = format!("{}\n", tt);
+ let actual = format!("{tt}\n");
expect.indent(false);
expect.assert_eq(&actual);
let quoted = quote!(#a);
assert_eq!(quoted.to_string(), "hello");
- let t = format!("{:?}", quoted);
+ let t = format!("{quoted:?}");
assert_eq!(t, "SUBTREE $\n IDENT hello 4294967295");
}
match (a.data(Interner), e) {
(chalk_ir::GenericArgData::Ty(_), ParamKind::Type)
| (chalk_ir::GenericArgData::Const(_), ParamKind::Const(_)) => (),
- _ => panic!("Mismatched kinds: {:?}, {:?}, {:?}", a, self.vec, self.param_kinds),
+ _ => panic!("Mismatched kinds: {a:?}, {:?}, {:?}", self.vec, self.param_kinds),
}
}
}
ComputedExpr::Literal(l) => match l {
Literal::Int(x, _) => {
if *x >= 10 {
- write!(f, "{} ({:#X})", x, x)
+ write!(f, "{x} ({x:#X})")
} else {
x.fmt(f)
}
}
Literal::Uint(x, _) => {
if *x >= 10 {
- write!(f, "{} ({:#X})", x, x)
+ write!(f, "{x} ({x:#X})")
} else {
x.fmt(f)
}
match r {
ComputedExpr::Literal(Literal::Int(r, _)) => assert_eq!(r, answer),
ComputedExpr::Literal(Literal::Uint(r, _)) => assert_eq!(r, answer as u128),
- x => panic!("Expected number but found {:?}", x),
+ x => panic!("Expected number but found {x:?}"),
}
}
assert_eq!(name, "E::A");
assert_eq!(val, 1);
}
- x => panic!("Expected enum but found {:?}", x),
+ x => panic!("Expected enum but found {x:?}"),
}
}
}
subpattern.hir_fmt(f)
}
- PatKind::LiteralBool { value } => write!(f, "{}", value),
+ PatKind::LiteralBool { value } => write!(f, "{value}"),
PatKind::Or { pats } => f.write_joined(pats.iter(), " | "),
}
}
hir_def::AdtId::UnionId(id) => id.into(),
}
}
- _ => panic!("bad constructor {:?} for adt {:?}", self, adt),
+ _ => panic!("bad constructor {self:?} for adt {adt:?}"),
}
}
let mut first = true;
for e in iter {
if !first {
- write!(self, "{}", sep)?;
+ write!(self, "{sep}")?;
}
first = false;
// Abbreviate multiple omitted types with a single ellipsis.
if self.should_truncate() {
- return write!(self, "{}", TYPE_HINT_TRUNCATION);
+ return write!(self, "{TYPE_HINT_TRUNCATION}");
}
e.hir_fmt(self)?;
impl HirDisplay for ProjectionTy {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
if f.should_truncate() {
- return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ return write!(f, "{TYPE_HINT_TRUNCATION}");
}
let trait_ref = self.trait_ref(f.db);
impl HirDisplay for OpaqueTy {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
if f.should_truncate() {
- return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ return write!(f, "{TYPE_HINT_TRUNCATION}");
}
self.substitution.at(Interner, 0).hir_fmt(f)
impl HirDisplay for Ty {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
if f.should_truncate() {
- return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ return write!(f, "{TYPE_HINT_TRUNCATION}");
}
match self.kind(Interner) {
hir_def::AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
hir_def::AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
};
- write!(f, "{}", name)?;
+ write!(f, "{name}")?;
}
DisplayTarget::SourceCode { module_id } => {
if let Some(path) = find_path::find_path(
module_id,
false,
) {
- write!(f, "{}", path)?;
+ write!(f, "{path}")?;
} else {
return Err(HirDisplayError::DisplaySourceCodeError(
DisplaySourceCodeError::PathNotFound,
if sig.params().is_empty() {
write!(f, "||")?;
} else if f.should_truncate() {
- write!(f, "|{}|", TYPE_HINT_TRUNCATION)?;
+ write!(f, "|{TYPE_HINT_TRUNCATION}|")?;
} else {
write!(f, "|")?;
f.write_joined(sig.params(), ", ")?;
default_sized: SizedByDefault,
f: &mut HirFormatter<'_>,
) -> Result<(), HirDisplayError> {
- write!(f, "{}", prefix)?;
+ write!(f, "{prefix}")?;
if !predicates.is_empty()
|| predicates.is_empty() && matches!(default_sized, SizedByDefault::Sized { .. })
{
use_as: bool,
) -> Result<(), HirDisplayError> {
if f.should_truncate() {
- return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ return write!(f, "{TYPE_HINT_TRUNCATION}");
}
tr.self_type_parameter(Interner).hir_fmt(f)?;
impl HirDisplay for WhereClause {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
if f.should_truncate() {
- return write!(f, "{}", TYPE_HINT_TRUNCATION);
+ return write!(f, "{TYPE_HINT_TRUNCATION}");
}
match self {
hir_def::type_ref::Mutability::Shared => "*const ",
hir_def::type_ref::Mutability::Mut => "*mut ",
};
- write!(f, "{}", mutability)?;
+ write!(f, "{mutability}")?;
inner.hir_fmt(f)?;
}
TypeRef::Reference(inner, lifetime, mutability) => {
if let Some(lifetime) = lifetime {
write!(f, "{} ", lifetime.name)?;
}
- write!(f, "{}", mutability)?;
+ write!(f, "{mutability}")?;
inner.hir_fmt(f)?;
}
TypeRef::Array(inner, len) => {
write!(f, "[")?;
inner.hir_fmt(f)?;
- write!(f, "; {}]", len)?;
+ write!(f, "; {len}]")?;
}
TypeRef::Slice(inner) => {
write!(f, "[")?;
for index in 0..function_parameters.len() {
let (param_name, param_type) = &function_parameters[index];
if let Some(name) = param_name {
- write!(f, "{}: ", name)?;
+ write!(f, "{name}: ")?;
}
param_type.hir_fmt(f)?;
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
match self {
hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f),
- hir_def::path::GenericArg::Const(c) => write!(f, "{}", c),
+ hir_def::path::GenericArg::Const(c) => write!(f, "{c}"),
hir_def::path::GenericArg::Lifetime(lifetime) => write!(f, "{}", lifetime.name),
}
}
fn debug_goal(goal: &Goal<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
let goal_data = goal.data(Interner);
- Some(write!(fmt, "{:?}", goal_data))
+ Some(write!(fmt, "{goal_data:?}"))
}
fn debug_goals(
let mut error_replacer = ErrorReplacer { vars: 0 };
let value = match t.clone().try_fold_with(&mut error_replacer, DebruijnIndex::INNERMOST) {
Ok(t) => t,
- Err(_) => panic!("Encountered unbound or inference vars in {:?}", t),
+ Err(_) => panic!("Encountered unbound or inference vars in {t:?}"),
};
let kinds = (0..error_replacer.vars).map(|_| {
chalk_ir::CanonicalVarKind::new(
.collect(),
);
} else {
- panic!("unexpected annotation: {}", expected);
+ panic!("unexpected annotation: {expected}");
}
had_annotations = true;
}
expected,
adjustments
.iter()
- .map(|Adjustment { kind, .. }| format!("{:?}", kind))
+ .map(|Adjustment { kind, .. }| format!("{kind:?}"))
.collect::<Vec<_>>()
);
} else {
- panic!("expected {:?} adjustments, found none", expected);
+ panic!("expected {expected:?} adjustments, found none");
}
}
}
db.infer(def);
});
});
- assert!(format!("{:?}", events).contains("infer"))
+ assert!(format!("{events:?}").contains("infer"))
}
let new_text = "
db.infer(def);
});
});
- assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
+ assert!(!format!("{events:?}").contains("infer"), "{events:#?}")
}
}
//^^^^^^^^^^^^^^^^^ RegisterBlock
}
"#;
- let fixture = format!("{}\n//- /foo.rs\n{}", fixture, data);
+ let fixture = format!("{fixture}\n//- /foo.rs\n{data}");
{
let _b = bench("include macro");
let trait_ref = projection_ty.trait_ref(self.0);
let trait_params = trait_ref.substitution.as_slice(Interner);
let self_ty = trait_ref.self_type_parameter(Interner);
- write!(fmt, "<{:?} as {}", self_ty, trait_name)?;
+ write!(fmt, "<{self_ty:?} as {trait_name}")?;
if trait_params.len() > 1 {
write!(
fmt,
"<{}>",
- trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
+ trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))),
)?;
}
write!(fmt, ">::{}", type_alias_data.name)?;
write!(
fmt,
"<{}>",
- proj_params.iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
+ proj_params.iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))),
)?;
}
}
};
match def {
- CallableDefId::FunctionId(_) => write!(fmt, "{{fn {}}}", name),
+ CallableDefId::FunctionId(_) => write!(fmt, "{{fn {name}}}"),
CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
- write!(fmt, "{{ctor {}}}", name)
+ write!(fmt, "{{ctor {name}}}")
}
}
}
let mut solve = || {
let _ctx = if is_chalk_debug() || is_chalk_print() {
- Some(panic_context::enter(format!("solving {:?}", goal)))
+ Some(panic_context::enter(format!("solving {goal:?}")))
} else {
None
};
let modpath = {
// FIXME: this is not how we should get a mod path here
- let ast_path = ast::SourceFile::parse(&format!("type T = {};", link))
+ let ast_path = ast::SourceFile::parse(&format!("type T = {link};"))
.syntax_node()
.descendants()
.find_map(ast::Path::cast)?;
}
}
match name {
- Some(name) => write!(f, "{}: ", name)?,
+ Some(name) => write!(f, "{name}: ")?,
None => f.write_str("_: ")?,
}
// FIXME: Use resolved `param.ty` or raw `type_ref`?
continue;
}
delim(f)?;
- write!(f, "{}", name)?;
+ write!(f, "{name}")?;
if let Some(default) = &ty.default {
f.write_str(" = ")?;
default.hir_fmt(f)?;
}
TypeOrConstParamData::ConstParamData(c) => {
delim(f)?;
- write!(f, "const {}: ", name)?;
+ write!(f, "const {name}: ")?;
c.ty.hir_fmt(f)?;
}
}
WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f),
WherePredicateTypeTarget::TypeOrConstParam(id) => {
match ¶ms.type_or_consts[*id].name() {
- Some(name) => write!(f, "{}", name),
+ Some(name) => write!(f, "{name}"),
None => f.write_str("{unnamed}"),
}
}
if idx != 0 {
f.write_str(", ")?;
}
- write!(f, "{}", lifetime)?;
+ write!(f, "{lifetime}")?;
}
f.write_str("> ")?;
write_target(target, f)?;
let data = f.db.const_data(self.id);
f.write_str("const ")?;
match &data.name {
- Some(name) => write!(f, "{}: ", name)?,
+ Some(name) => write!(f, "{name}: ")?,
None => f.write_str("_: ")?,
}
data.type_ref.hir_fmt(f)?;
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
// FIXME: Module doesn't have visibility saved in data.
match self.name(f.db) {
- Some(name) => write!(f, "mod {}", name),
+ Some(name) => write!(f, "mod {name}"),
None if self.is_crate_root(f.db) => match self.krate(f.db).display_name(f.db) {
- Some(name) => write!(f, "extern crate {}", name),
+ Some(name) => write!(f, "extern crate {name}"),
None => f.write_str("extern crate {unknown}"),
},
None => f.write_str("mod {unnamed}"),
.doc_comments_and_attrs()
.nth((*invoc_attr_index) as usize)
.and_then(Either::left)
- .unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index));
+ .unwrap_or_else(|| panic!("cannot find attribute #{invoc_attr_index}"));
(
ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
self.cache
.borrow()
.keys()
- .map(|it| format!("{:?}", it))
+ .map(|it| format!("{it:?}"))
.collect::<Vec<_>>()
.join(", ")
)
acc.add_group(
&GroupLabel("Generate delegate methods…".to_owned()),
AssistId("generate_delegate_methods", AssistKind::Generate),
- format!("Generate delegate for `{}.{}()`", field_name, method.name(ctx.db())),
+ format!("Generate delegate for `{field_name}.{}()`", method.name(ctx.db())),
target,
|builder| {
// Create the function
assist_description,
target,
|builder| {
- let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{} ", v));
+ let vis = parent_enum.visibility().map_or(String::new(), |v| format!("{v} "));
let field_type_syntax = field_type.syntax();
) -> String {
let mut buf = String::with_capacity(512);
- let vis = info.strukt.visibility().map_or(String::new(), |v| format!("{} ", v));
+ let vis = info.strukt.visibility().map_or(String::new(), |v| format!("{v} "));
let (ty, body) = if info.mutable {
(
format!("&mut {}", record_field_info.field_ty),
}
FileSystemEdit::MoveDir { src, src_id, dst } => {
// temporary placeholder for MoveDir since we are not using MoveDir in ide assists yet.
- (dst, format!("{:?}\n{:?}", src_id, src))
+ (dst, format!("{src_id:?}\n{src:?}"))
}
};
let sr = db.file_source_root(dst.anchor);
for assist in assists.iter() {
for (idx, section) in assist.sections.iter().enumerate() {
let test_id =
- if idx == 0 { assist.id.clone() } else { format!("{}_{}", &assist.id, idx) };
+ if idx == 0 { assist.id.clone() } else { format!("{}_{idx}", &assist.id) };
let test = format!(
r######"
#[test]
fn hide_hash_comments(text: &str) -> String {
text.split('\n') // want final newline
.filter(|&it| !(it.starts_with("# ") || it == "#"))
- .map(|it| format!("{}\n", it))
+ .map(|it| format!("{it}\n"))
.collect()
}
it
}
})
- .map(|it| format!("{}\n", it))
+ .map(|it| format!("{it}\n"))
.collect()
}
if incomplete_let && snippet.ends_with('}') {
// complete block expression snippets with a trailing semicolon, if inside an incomplete let
cov_mark::hit!(let_semi);
- item.insert_snippet(cap, format!("{};", snippet));
+ item.insert_snippet(cap, format!("{snippet};"));
} else {
item.insert_snippet(cap, snippet);
}
pub(crate) fn complete_cfg(acc: &mut Completions, ctx: &CompletionContext<'_>) {
let add_completion = |item: &str| {
let mut completion = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), item);
- completion.insert_text(format!(r#""{}""#, item));
+ completion.insert_text(format!(r#""{item}""#));
acc.add(completion.build());
};
Some("target_vendor") => KNOWN_VENDOR.iter().copied().for_each(add_completion),
Some("target_endian") => ["little", "big"].into_iter().for_each(add_completion),
Some(name) => ctx.krate.potential_cfg(ctx.db).get_cfg_values(name).cloned().for_each(|s| {
- let insert_text = format!(r#""{}""#, s);
+ let insert_text = format!(r#""{s}""#);
let mut item = CompletionItem::new(SymbolKind::BuiltinAttr, ctx.source_range(), s);
item.insert_text(insert_text);
continue;
}
let label = match qual {
- Some(qual) if !is_qualified => format!("{}::{}", qual, name),
+ Some(qual) if !is_qualified => format!("{qual}::{name}"),
_ => name.to_owned(),
};
let mut item = CompletionItem::new(SymbolKind::Attribute, ctx.source_range(), label);
"#;
let completions = completion_list(fixture);
- assert!(completions.is_empty(), "Completions weren't empty: {}", completions);
+ assert!(completions.is_empty(), "Completions weren't empty: {completions}");
}
#[test]
"#;
let completions = completion_list(fixture);
- assert!(completions.is_empty(), "Completions weren't empty: {}", completions);
+ assert!(completions.is_empty(), "Completions weren't empty: {completions}");
}
#[test]
"#;
let completions = completion_list(fixture);
- assert!(completions.is_empty(), "Completions weren't empty: {}", completions)
+ assert!(completions.is_empty(), "Completions weren't empty: {completions}")
}
}
matches!(prev_token_kind, SyntaxKind::COMMA | SyntaxKind::L_PAREN | SyntaxKind::PIPE);
let leading = if has_leading_comma { "" } else { ", " };
- Some((move |label: &_| (format!("{}{}{}", leading, label, trailing)), param.text_range()))
+ Some((move |label: &_| (format!("{leading}{label}{trailing}")), param.text_range()))
}
};
let mut item = CompletionItem::new(completion_kind, replacement_range, label);
- item.lookup_by(format!("fn {}", fn_name))
+ item.lookup_by(format!("fn {fn_name}"))
.set_documentation(func.docs(ctx.db))
.set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() });
let function_decl = function_declaration(&transformed_fn, source.file_id.is_macro());
match ctx.config.snippet_cap {
Some(cap) => {
- let snippet = format!("{} {{\n $0\n}}", function_decl);
+ let snippet = format!("{function_decl} {{\n $0\n}}");
item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet));
}
None => {
- let header = format!("{} {{", function_decl);
+ let header = format!("{function_decl} {{");
item.text_edit(TextEdit::replace(replacement_range, header));
}
};
) {
let alias_name = type_alias.name(ctx.db).unescaped().to_smol_str();
- let label = format!("type {} =", alias_name);
+ let label = format!("type {alias_name} =");
let mut item = CompletionItem::new(SymbolKind::TypeAlias, replacement_range, label);
- item.lookup_by(format!("type {}", alias_name))
+ item.lookup_by(format!("type {alias_name}"))
.set_documentation(type_alias.docs(ctx.db))
.set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() });
match ctx.config.snippet_cap {
Some(cap) => {
- let snippet = format!("{}$0;", decl);
+ let snippet = format!("{decl}$0;");
item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet));
}
None => {
};
let label = make_const_compl_syntax(&transformed_const, source.file_id.is_macro());
- let replacement = format!("{} ", label);
+ let replacement = format!("{label} ");
let mut item = CompletionItem::new(SymbolKind::Const, replacement_range, label);
- item.lookup_by(format!("const {}", const_name))
+ item.lookup_by(format!("const {const_name}"))
.set_documentation(const_.docs(ctx.db))
.set_relevance(CompletionRelevance {
is_item_from_trait: true,
match ctx.config.snippet_cap {
Some(cap) => item.snippet_edit(
cap,
- TextEdit::replace(replacement_range, format!("{}$0;", replacement)),
+ TextEdit::replace(replacement_range, format!("{replacement}$0;")),
),
None => item.text_edit(TextEdit::replace(replacement_range, replacement)),
};
let mut item = postfix_snippet(
"drop",
"fn drop(&mut self)",
- &format!("drop($0{})", receiver_text),
+ &format!("drop($0{receiver_text})"),
);
item.set_documentation(drop_fn.docs(ctx.db));
item.add_to(acc);
postfix_snippet(
"ifl",
"if let Ok {}",
- &format!("if let Ok($1) = {} {{\n $0\n}}", receiver_text),
+ &format!("if let Ok($1) = {receiver_text} {{\n $0\n}}"),
)
.add_to(acc);
postfix_snippet(
"while",
"while let Ok {}",
- &format!("while let Ok($1) = {} {{\n $0\n}}", receiver_text),
+ &format!("while let Ok($1) = {receiver_text} {{\n $0\n}}"),
)
.add_to(acc);
}
postfix_snippet(
"ifl",
"if let Some {}",
- &format!("if let Some($1) = {} {{\n $0\n}}", receiver_text),
+ &format!("if let Some($1) = {receiver_text} {{\n $0\n}}"),
)
.add_to(acc);
postfix_snippet(
"while",
"while let Some {}",
- &format!("while let Some($1) = {} {{\n $0\n}}", receiver_text),
+ &format!("while let Some($1) = {receiver_text} {{\n $0\n}}"),
)
.add_to(acc);
}
}
} else if receiver_ty.is_bool() || receiver_ty.is_unknown() {
- postfix_snippet("if", "if expr {}", &format!("if {} {{\n $0\n}}", receiver_text))
+ postfix_snippet("if", "if expr {}", &format!("if {receiver_text} {{\n $0\n}}"))
.add_to(acc);
- postfix_snippet(
- "while",
- "while expr {}",
- &format!("while {} {{\n $0\n}}", receiver_text),
- )
- .add_to(acc);
- postfix_snippet("not", "!expr", &format!("!{}", receiver_text)).add_to(acc);
+ postfix_snippet("while", "while expr {}", &format!("while {receiver_text} {{\n $0\n}}"))
+ .add_to(acc);
+ postfix_snippet("not", "!expr", &format!("!{receiver_text}")).add_to(acc);
} else if let Some(trait_) = ctx.famous_defs().core_iter_IntoIterator() {
if receiver_ty.impls_trait(ctx.db, trait_, &[]) {
postfix_snippet(
"for",
"for ele in expr {}",
- &format!("for ele in {} {{\n $0\n}}", receiver_text),
+ &format!("for ele in {receiver_text} {{\n $0\n}}"),
)
.add_to(acc);
}
}
- postfix_snippet("ref", "&expr", &format!("&{}", receiver_text)).add_to(acc);
- postfix_snippet("refm", "&mut expr", &format!("&mut {}", receiver_text)).add_to(acc);
+ postfix_snippet("ref", "&expr", &format!("&{receiver_text}")).add_to(acc);
+ postfix_snippet("refm", "&mut expr", &format!("&mut {receiver_text}")).add_to(acc);
// The rest of the postfix completions create an expression that moves an argument,
// so it's better to consider references now to avoid breaking the compilation
postfix_snippet(
"match",
"match expr {}",
- &format!("match {} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}", receiver_text),
+ &format!("match {receiver_text} {{\n Ok(${{1:_}}) => {{$2}},\n Err(${{3:_}}) => {{$0}},\n}}"),
)
.add_to(acc);
}
postfix_snippet(
"match",
"match expr {}",
- &format!("match {} {{\n ${{1:_}} => {{$0}},\n}}", receiver_text),
+ &format!("match {receiver_text} {{\n ${{1:_}} => {{$0}},\n}}"),
)
.add_to(acc);
}
}
- postfix_snippet("box", "Box::new(expr)", &format!("Box::new({})", receiver_text)).add_to(acc);
- postfix_snippet("dbg", "dbg!(expr)", &format!("dbg!({})", receiver_text)).add_to(acc); // fixme
- postfix_snippet("dbgr", "dbg!(&expr)", &format!("dbg!(&{})", receiver_text)).add_to(acc);
- postfix_snippet("call", "function(expr)", &format!("${{1}}({})", receiver_text)).add_to(acc);
+ postfix_snippet("box", "Box::new(expr)", &format!("Box::new({receiver_text})")).add_to(acc);
+ postfix_snippet("dbg", "dbg!(expr)", &format!("dbg!({receiver_text})")).add_to(acc); // fixme
+ postfix_snippet("dbgr", "dbg!(&expr)", &format!("dbg!(&{receiver_text})")).add_to(acc);
+ postfix_snippet("call", "function(expr)", &format!("${{1}}({receiver_text})")).add_to(acc);
if let Some(parent) = dot_receiver.syntax().parent().and_then(|p| p.parent()) {
if matches!(parent.kind(), STMT_LIST | EXPR_STMT) {
- postfix_snippet("let", "let", &format!("let $0 = {};", receiver_text)).add_to(acc);
- postfix_snippet("letm", "let mut", &format!("let mut $0 = {};", receiver_text))
+ postfix_snippet("let", "let", &format!("let $0 = {receiver_text};")).add_to(acc);
+ postfix_snippet("letm", "let mut", &format!("let mut $0 = {receiver_text};"))
.add_to(acc);
}
}
let body = snippet.postfix_snippet(receiver_text);
let mut builder =
postfix_snippet(trigger, snippet.description.as_deref().unwrap_or_default(), &body);
- builder.documentation(Documentation::new(format!("```rust\n{}\n```", body)));
+ builder.documentation(Documentation::new(format!("```rust\n{body}\n```")));
for import in imports.into_iter() {
builder.add_import(import);
}
if let Ok((out, exprs)) = parse_format_exprs(receiver_text.text()) {
let exprs = with_placeholders(exprs);
for (label, macro_name) in KINDS {
- let snippet = format!(r#"{}({}, {})"#, macro_name, out, exprs.join(", "));
+ let snippet = format!(r#"{macro_name}({out}, {})"#, exprs.join(", "));
postfix_snippet(label, macro_name, &snippet).add_to(acc);
}
for (kind, input, output) in test_vector {
let (parsed_string, exprs) = parse_format_exprs(input).unwrap();
let exprs = with_placeholders(exprs);
- let snippet = format!(r#"{}("{}", {})"#, kind, parsed_string, exprs.join(", "));
+ let snippet = format!(r#"{kind}("{parsed_string}", {})"#, exprs.join(", "));
assert_eq!(&snippet, output);
}
}
};
let body = snip.snippet();
let mut builder = snippet(ctx, cap, trigger, &body);
- builder.documentation(Documentation::new(format!("```rust\n{}\n```", body)));
+ builder.documentation(Documentation::new(format!("```rust\n{body}\n```")));
for import in imports.into_iter() {
builder.add_import(import);
}
let name =
completion_context.expected_name.map_or_else(|| "?".to_owned(), |name| name.to_string());
- expect.assert_eq(&format!("ty: {}, name: {}", ty, name));
+ expect.assert_eq(&format!("ty: {ty}, name: {name}"));
}
#[test]
// snippets can have multiple imports, but normal completions only have up to one
if let Some(original_path) = import_edit.original_path.as_ref() {
lookup = lookup.or_else(|| Some(label.clone()));
- label = SmolStr::from(format!("{} (use {})", label, original_path));
+ label = SmolStr::from(format!("{label} (use {original_path})"));
}
} else if let Some(trait_name) = self.trait_name {
- label = SmolStr::from(format!("{} (as {})", label, trait_name));
+ label = SmolStr::from(format!("{label} (as {trait_name})"));
}
let text_edit = match self.text_edit {
}
fn field_with_receiver(receiver: Option<&hir::Name>, field_name: &str) -> SmolStr {
- receiver
- .map_or_else(|| field_name.into(), |receiver| format!("{}.{}", receiver, field_name).into())
+ receiver.map_or_else(|| field_name.into(), |receiver| format!("{receiver}.{field_name}").into())
}
pub(crate) fn render_tuple_field(
item.lookup_by(name.clone())
.label(SmolStr::from_iter([&name, "<…>"]))
.trigger_call_info()
- .insert_snippet(cap, format!("{}<$0>", local_name));
+ .insert_snippet(cap, format!("{local_name}<$0>"));
}
}
}
let tag = it.kind().tag();
let relevance = display_relevance(it.relevance());
- items.push(format!("{} {} {}\n", tag, it.label(), relevance));
+ items.push(format!("{tag} {} {relevance}\n", it.label()));
if let Some((mutability, _offset, relevance)) = it.ref_match() {
let label = format!("&{}{}", mutability.as_keyword_for_ref(), it.label());
let relevance = display_relevance(relevance);
- items.push(format!("{} {} {}\n", tag, label, relevance));
+ items.push(format!("{tag} {label} {relevance}\n"));
}
items
.filter_map(|(cond, desc)| if cond { Some(desc) } else { None })
.join("+");
- format!("[{}]", relevance_factors)
+ format!("[{relevance_factors}]")
}
}
let (call, escaped_call) = match &func_kind {
FuncKind::Method(_, Some(receiver)) => (
format!("{}.{}", receiver.unescaped(), name.unescaped()).into(),
- format!("{}.{}", receiver, name).into(),
+ format!("{receiver}.{name}").into(),
),
_ => (name.unescaped().to_smol_str(), name.to_smol_str()),
};
cov_mark::hit!(inserts_parens_for_function_calls);
let (snippet, label_suffix) = if self_param.is_none() && params.is_empty() {
- (format!("{}()$0", escaped_name), "()")
+ (format!("{escaped_name}()$0"), "()")
} else {
builder.trigger_call_info();
let snippet = if let Some(CallableSnippets::FillArguments) = ctx.config.callable {
let smol_str = n.to_smol_str();
let text = smol_str.as_str().trim_start_matches('_');
let ref_ = ref_of_param(ctx, text, param.ty());
- f(&format_args!("${{{}:{}{}}}", index + offset, ref_, text))
+ f(&format_args!("${{{}:{ref_}{text}}}", index + offset))
}
None => {
let name = match param.ty().as_adt() {
.map(|s| to_lower_snake_case(s.as_str()))
.unwrap_or_else(|| "_".to_string()),
};
- f(&format_args!("${{{}:{}}}", index + offset, name))
+ f(&format_args!("${{{}:{name}}}", index + offset))
}
}
});
)
}
None => {
- format!("{}({})$0", escaped_name, function_params_snippet)
+ format!("{escaped_name}({function_params_snippet})$0")
}
}
} else {
cov_mark::hit!(suppress_arg_snippets);
- format!("{}($0)", escaped_name)
+ format!("{escaped_name}($0)")
};
(snippet, "(…)")
match ctx.snippet_cap() {
Some(cap) if needs_bang && !has_call_parens => {
- let snippet = format!("{}!{}$0{}", escaped_name, bra, ket);
+ let snippet = format!("{escaped_name}!{bra}$0{ket}");
let lookup = banged_name(&name);
item.insert_snippet(cap, snippet).lookup_by(lookup);
}
});
RenderedLiteral {
- literal: format!("{} {{ {} }}", path, completions),
- detail: format!("{} {{ {} }}", path, types),
+ literal: format!("{path} {{ {completions} }}"),
+ detail: format!("{path} {{ {types} }}"),
}
}
path: &str,
) -> RenderedLiteral {
if snippet_cap.is_none() {
- return RenderedLiteral { literal: format!("{}", path), detail: format!("{}", path) };
+ return RenderedLiteral { literal: format!("{path}"), detail: format!("{path}") };
}
let completions = fields.iter().enumerate().format_with(", ", |(idx, _), f| {
if snippet_cap.is_some() {
let types = fields.iter().format_with(", ", |field, f| f(&field.ty(db).display(db)));
RenderedLiteral {
- literal: format!("{}({})", path, completions),
- detail: format!("{}({})", path, types),
+ literal: format!("{path}({completions})"),
+ detail: format!("{path}({types})"),
}
}
) -> Option<(Box<[GreenNode]>, String, Option<Box<str>>)> {
let mut imports = Vec::with_capacity(requires.len());
for path in requires.iter() {
- let use_path = ast::SourceFile::parse(&format!("use {};", path))
+ let use_path = ast::SourceFile::parse(&format!("use {path};"))
.syntax_node()
.descendants()
.find_map(ast::Path::cast)?;
.into_iter()
.map(|it| {
let tag = it.kind().tag();
- let var_name = format!("{} {}", tag, it.label());
+ let var_name = format!("{tag} {}", it.label());
let mut buf = var_name;
if let Some(detail) = it.detail() {
let width = label_width.saturating_sub(monospace_width(it.label()));
.iter()
.filter(|it| it.lookup() == what)
.collect_tuple()
- .unwrap_or_else(|| panic!("can't find {:?} completion in {:#?}", what, completions));
+ .unwrap_or_else(|| panic!("can't find {what:?} completion in {completions:#?}"));
let mut actual = db.file_text(position.file_id).to_string();
let mut combined_edit = completion.text_edit().to_owned();
use crate::tests::{check_edit, completion_list, BASE_ITEMS_FIXTURE};
fn check(ra_fixture: &str, expect: Expect) {
- let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}{ra_fixture}"));
expect.assert_eq(&actual)
}
use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
fn check(ra_fixture: &str, expect: Expect) {
- let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}{ra_fixture}"));
expect.assert_eq(&actual)
}
use crate::tests::{check_edit, completion_list, BASE_ITEMS_FIXTURE};
fn check(ra_fixture: &str, expect: Expect) {
- let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}{ra_fixture}"));
expect.assert_eq(&actual)
}
}
fn check(ra_fixture: &str, expect: Expect) {
- let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}\n{ra_fixture}"));
expect.assert_eq(&actual)
}
use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
fn check(ra_fixture: &str, expect: Expect) {
- let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}\n{ra_fixture}"));
expect.assert_eq(&actual)
}
use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
fn check(ra_fixture: &str, expect: Expect) {
- let actual = completion_list(&format!("{}\n{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ let actual = completion_list(&format!("{BASE_ITEMS_FIXTURE}\n{ra_fixture}"));
expect.assert_eq(&actual)
}
"RefactorExtract" => Ok(AssistKind::RefactorExtract),
"RefactorInline" => Ok(AssistKind::RefactorInline),
"RefactorRewrite" => Ok(AssistKind::RefactorRewrite),
- unknown => Err(format!("Unknown AssistKind: '{}'", unknown)),
+ unknown => Err(format!("Unknown AssistKind: '{unknown}'")),
}
}
}
let expected_import_end = if item_as_assoc(db, original_item).is_some() {
unresolved_qualifier.to_string()
} else {
- format!("{}::{}", unresolved_qualifier, item_name(db, original_item)?)
+ format!("{unresolved_qualifier}::{}", item_name(db, original_item)?)
};
if !import_path_string.contains(unresolved_first_segment)
|| !import_path_string.ends_with(&expected_import_end)
.and_then(|it| ImportScope::find_insert_use_container(&it, sema))
.or_else(|| ImportScope::from(syntax))
.unwrap();
- let path = ast::SourceFile::parse(&format!("use {};", path))
+ let path = ast::SourceFile::parse(&format!("use {path};"))
.tree()
.syntax()
.descendants()
// Module exists in a named file
if !is_mod_rs {
- let path = format!("{}.rs", new_name);
+ let path = format!("{new_name}.rs");
let dst = AnchoredPathBuf { anchor, path };
source_change.push_file_system_edit(FileSystemEdit::MoveFile { src: anchor, dst })
}
// - Module has submodules defined in separate files
let dir_paths = match (is_mod_rs, has_detached_child, module.name(sema.db)) {
// Go up one level since the anchor is inside the dir we're trying to rename
- (true, _, Some(mod_name)) => {
- Some((format!("../{}", mod_name), format!("../{}", new_name)))
- }
+ (true, _, Some(mod_name)) => Some((format!("../{mod_name}"), format!("../{new_name}"))),
// The anchor is on the same level as target dir
(false, true, Some(mod_name)) => Some((mod_name.to_string(), new_name.to_string())),
_ => None,
// FIXME: instead of splitting the shorthand, recursively trigger a rename of the
// other name https://github.com/rust-lang/rust-analyzer/issues/6547
- edit.insert(ident_pat.syntax().text_range().start(), format!("{}: ", new_name));
+ edit.insert(ident_pat.syntax().text_range().start(), format!("{new_name}: "));
return true;
}
}
// Foo { field } -> Foo { new_name: field }
// ^ insert `new_name: `
let offset = name_ref.syntax().text_range().start();
- edit.insert(offset, format!("{}: ", new_name));
+ edit.insert(offset, format!("{new_name}: "));
return true;
}
(None, Some(_)) if matches!(def, Definition::Local(_)) => {
// Foo { field } -> Foo { field: new_name }
// ^ insert `: new_name`
let offset = name_ref.syntax().text_range().end();
- edit.insert(offset, format!(": {}", new_name));
+ edit.insert(offset, format!(": {new_name}"));
return true;
}
_ => (),
}
pub fn crate_symbols(db: &RootDatabase, krate: Crate, query: Query) -> Vec<FileSymbol> {
- let _p = profile::span("crate_symbols").detail(|| format!("{:?}", query));
+ let _p = profile::span("crate_symbols").detail(|| format!("{query:?}"));
let modules = krate.modules(db);
let indices: Vec<_> = modules
fn check(input: &str, expect: &Expect) {
let (output, exprs) = parse_format_exprs(input).unwrap_or(("-".to_string(), vec![]));
let outcome_repr = if !exprs.is_empty() {
- format!("{}; {}", output, with_placeholders(exprs).join(", "))
+ format!("{output}; {}", with_placeholders(exprs).join(", "))
} else {
output
};
buf.push_str(r#"pub const CLIPPY_LINT_GROUPS: &[LintGroup] = &["#);
for (id, children) in clippy_groups {
- let children = children.iter().map(|id| format!("clippy::{}", id)).collect::<Vec<_>>();
+ let children = children.iter().map(|id| format!("clippy::{id}")).collect::<Vec<_>>();
if !children.is_empty() {
- let lint_ident = format!("clippy::{}", id);
+ let lint_ident = format!("clippy::{id}");
let description = format!("lint group for: {}", children.iter().join(", "));
push_lint_group(buf, &lint_ident, &description, &children);
}
push_lint_completion(buf, label, description);
- let children = format!("&[{}]", children.iter().map(|it| format!("\"{}\"", it)).join(", "));
+ let children = format!("&[{}]", children.iter().map(|it| format!("\"{it}\"")).join(", "));
format_to!(
buf,
r###"
d: &hir::MismatchedArgCount,
) -> Diagnostic {
let s = if d.expected == 1 { "" } else { "s" };
- let message = format!("expected {} argument{}, found {}", d.expected, s, d.found);
+ let message = format!("expected {} argument{s}, found {}", d.expected, d.found);
Diagnostic::new("mismatched-arg-count", message, invalid_args_range(ctx, d))
}
let mut new_field = new_field.to_string();
if usage_file_id != def_file_id {
- new_field = format!("pub(crate) {}", new_field);
+ new_field = format!("pub(crate) {new_field}");
}
- new_field = format!("\n{}{}", indent, new_field);
+ new_field = format!("\n{indent}{new_field}");
let needs_comma = !last_field_syntax.to_string().ends_with(',');
if needs_comma {
- new_field = format!(",{}", new_field);
+ new_field = format!(",{new_field}");
}
let source_change = SourceChange::from_text_edit(
}
let mut builder = TextEdit::builder();
- builder.insert(expr.syntax().text_range().start(), format!("{}(", variant_name));
+ builder.insert(expr.syntax().text_range().start(), format!("{variant_name}("));
builder.insert(expr.syntax().text_range().end(), ")".to_string());
let source_change =
SourceChange::from_text_edit(d.expr.file_id.original_file(ctx.sema.db), builder.finish());
- let name = format!("Wrap in {}", variant_name);
+ let name = format!("Wrap in {variant_name}");
acc.push(fix("wrap_in_constructor", &name, source_change, expr_range));
Some(())
}
// `submod/bla.rs` -> `submod.rs`
let parent_mod = (|| {
let (name, _) = parent.name_and_extension()?;
- parent.parent()?.join(&format!("{}.rs", name))
+ parent.parent()?.join(&format!("{name}.rs"))
})();
paths.extend(parent_mod);
paths
matches!(item, ast::Item::Module(m) if m.item_list().is_none())
}
- let mod_decl = format!("mod {};", new_mod_name);
- let pub_mod_decl = format!("pub mod {};", new_mod_name);
+ let mod_decl = format!("mod {new_mod_name};");
+ let pub_mod_decl = format!("pub mod {new_mod_name};");
let ast: ast::SourceFile = db.parse(parent_file_id).tree();
Some(last) => {
cov_mark::hit!(unlinked_file_append_to_existing_mods);
let offset = last.syntax().text_range().end();
- mod_decl_builder.insert(offset, format!("\n{}", mod_decl));
- pub_mod_decl_builder.insert(offset, format!("\n{}", pub_mod_decl));
+ mod_decl_builder.insert(offset, format!("\n{mod_decl}"));
+ pub_mod_decl_builder.insert(offset, format!("\n{pub_mod_decl}"));
}
None => {
// Prepend before the first item in the file.
Some(item) => {
cov_mark::hit!(unlinked_file_prepend_before_first_item);
let offset = item.syntax().text_range().start();
- mod_decl_builder.insert(offset, format!("{}\n\n", mod_decl));
- pub_mod_decl_builder.insert(offset, format!("{}\n\n", pub_mod_decl));
+ mod_decl_builder.insert(offset, format!("{mod_decl}\n\n"));
+ pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n\n"));
}
None => {
// No items in the file, so just append at the end.
cov_mark::hit!(unlinked_file_empty_file);
let offset = ast.syntax().text_range().end();
- mod_decl_builder.insert(offset, format!("{}\n", mod_decl));
- pub_mod_decl_builder.insert(offset, format!("{}\n", pub_mod_decl));
+ mod_decl_builder.insert(offset, format!("{mod_decl}\n"));
+ pub_mod_decl_builder.insert(offset, format!("{pub_mod_decl}\n"));
}
}
}
Some(vec![
fix(
"add_mod_declaration",
- &format!("Insert `{}`", mod_decl),
+ &format!("Insert `{mod_decl}`"),
SourceChange::from_text_edit(parent_file_id, mod_decl_builder.finish()),
trigger_range,
),
fix(
"add_pub_mod_declaration",
- &format!("Insert `{}`", pub_mod_decl),
+ &format!("Insert `{pub_mod_decl}`"),
SourceChange::from_text_edit(parent_file_id, pub_mod_decl_builder.finish()),
trigger_range,
),
let bang = if d.is_bang { "!" } else { "" };
Diagnostic::new(
"unresolved-macro-call",
- format!("unresolved macro `{}{}`", d.path, bang),
+ format!("unresolved macro `{}{bang}`", d.path),
display_range,
)
.experimental()
"unresolved-module",
match &*d.candidates {
[] => "unresolved module".to_string(),
- [candidate] => format!("unresolved module, can't find module file: {}", candidate),
+ [candidate] => format!("unresolved module, can't find module file: {candidate}"),
[candidates @ .., last] => {
format!(
"unresolved module, can't find module file: {}, or {}",
};
let message = match &d.macro_name {
- Some(name) => format!("proc macro `{}` not expanded", name),
+ Some(name) => format!("proc macro `{name}` not expanded"),
None => "proc macro not expanded".to_string(),
};
let severity = if config_enabled { Severity::Error } else { Severity::WeakWarning };
// [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
res.extend(
parse.errors().iter().take(128).map(|err| {
- Diagnostic::new("syntax-error", format!("Syntax Error: {}", err), err.range())
+ Diagnostic::new("syntax-error", format!("Syntax Error: {err}"), err.range())
}),
);
)
.pop()
.unwrap();
- assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {:?}", diagnostic);
+ assert!(diagnostic.fixes.is_none(), "got a fix when none was expected: {diagnostic:?}");
}
pub(crate) fn check_expect(ra_fixture: &str, expect: Expect) {
for block in comment_blocks {
let id = block.id;
if let Err(msg) = is_valid_diagnostic_name(&id) {
- panic!("invalid diagnostic name: {:?}:\n {}", id, msg)
+ panic!("invalid diagnostic name: {id:?}:\n {msg}")
}
let doc = block.contents.join("\n");
let location = sourcegen::Location { file: path.clone(), line: block.line };
impl Placeholder {
fn new(name: SmolStr, constraints: Vec<Constraint>) -> Self {
Self {
- stand_in_name: format!("__placeholder_{}", name),
+ stand_in_name: format!("__placeholder_{name}"),
constraints,
ident: Var(name.to_string()),
}
snippet
);
for (index, d) in debug_info.iter().enumerate() {
- println!("Node #{}\n{:#?}\n", index, d);
+ println!("Node #{index}\n{d:#?}\n");
}
}
let matches = match_finder.matches().flattened().matches;
if !matches.is_empty() {
print_match_debug_info(&match_finder, position.file_id, &matches[0].matched_text());
- panic!("Got {} matches when we expected none: {:#?}", matches.len(), matches);
+ panic!("Got {} matches when we expected none: {matches:#?}", matches.len());
}
}
})?
}
};
- Url::parse(&base).ok()?.join(&format!("{}/", display_name)).ok()
+ Url::parse(&base).ok()?.join(&format!("{display_name}/")).ok()
}
/// Get the filename and extension generated for a symbol by rustdoc.
Some(kw) => {
format!("keyword.{}.html", kw.trim_matches('"'))
}
- None => format!("{}/index.html", name),
+ None => format!("{name}/index.html"),
},
None => String::from("index.html"),
},
fn check(link: &str, expected: Expect) {
let (l, a) = parse_intra_doc_link(link);
- let a = a.map_or_else(String::new, |a| format!(" ({:?})", a));
- expected.assert_eq(&format!("{}{}", l, a));
+ let a = a.map_or_else(String::new, |a| format!(" ({a:?})"));
+ expected.assert_eq(&format!("{l}{a}"));
}
#[test]
.into_iter()
.map(|(_, link, ns)| {
let def = resolve_doc_path_for_def(sema.db, cursor_def, &link, ns)
- .unwrap_or_else(|| panic!("Failed to resolve {}", link));
+ .unwrap_or_else(|| panic!("Failed to resolve {link}"));
let nav_target = def.try_to_nav(sema.db).unwrap();
let range =
FileRange { file_id: nav_target.file_id, range: nav_target.focus_or_full_range() };
let (analysis, position) = fixture::position(ra_fixture);
let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info;
- assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {:?}", navs)
+ assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {navs:?}")
}
#[test]
.filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config))
.reduce(|mut acc: HoverResult, HoverResult { markup, actions }| {
acc.actions.extend(actions);
- acc.markup = Markup::from(format!("{}\n---\n{}", acc.markup, markup));
+ acc.markup = Markup::from(format!("{}\n---\n{markup}", acc.markup));
acc
})
})
Definition::Variant(it) => label_value_and_docs(db, it, |&it| {
if !it.parent_enum(db).is_data_carrying(db) {
match it.eval(db) {
- Ok(x) => Some(format!("{}", x)),
- Err(_) => it.value(db).map(|x| format!("{:?}", x)),
+ Ok(x) => Some(format!("{x}")),
+ Err(_) => it.value(db).map(|x| format!("{x:?}")),
}
} else {
None
Definition::Const(it) => label_value_and_docs(db, it, |it| {
let body = it.eval(db);
match body {
- Ok(x) => Some(format!("{}", x)),
+ Ok(x) => Some(format!("{x}")),
Err(_) => {
let source = it.source(db)?;
let mut body = source.value.body()?.syntax().clone();
fn render_builtin_attr(db: &RootDatabase, attr: hir::BuiltinAttr) -> Option<Markup> {
let name = attr.name(db);
- let desc = format!("#[{}]", name);
+ let desc = format!("#[{name}]");
let AttributeTemplate { word, list, name_value_str } = match attr.template(db) {
Some(template) => template,
V: Display,
{
let label = if let Some(value) = value_extractor(&def) {
- format!("{} // {}", def.display(db), value)
+ format!("{} // {value}", def.display(db))
} else {
def.display(db).to_string()
};
V: Display,
{
let label = if let Some(value) = value_extractor(&def) {
- format!("{} = {}", def.display(db), value)
+ format!("{} = {value}", def.display(db))
} else {
def.display(db).to_string()
};
} else {
""
};
- format!("{}{}{}: {}", let_kw, is_mut, name, ty)
+ format!("{let_kw}{is_mut}{name}: {ty}")
}
- Either::Right(_) => format!("{}self: {}", is_mut, ty),
+ Either::Right(_) => format!("{is_mut}self: {ty}"),
};
markup(None, desc, None)
}
let content = analysis.db.file_text(position.file_id);
let hovered_element = &content[hover.range];
- let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup);
+ let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
expect.assert_eq(&actual)
}
let content = analysis.db.file_text(position.file_id);
let hovered_element = &content[hover.range];
- let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup);
+ let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
expect.assert_eq(&actual)
}
let content = analysis.db.file_text(position.file_id);
let hovered_element = &content[hover.range];
- let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup);
+ let actual = format!("*{hovered_element}*\n{}\n", hover.info.markup);
expect.assert_eq(&actual)
}
.collect::<Vec<_>>();
expected.sort_by_key(|(range, _)| range.start());
- assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual);
+ assert_eq!(expected, actual, "\nExpected:\n{expected:#?}\n\nActual:\n{actual:#?}");
}
#[track_caller]
let ctor_name = match qual_seg.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
match qual_seg.generic_arg_list().map(|it| it.generic_args()) {
- Some(generics) => format!("{}<{}>", name_ref, generics.format(", ")),
+ Some(generics) => format!("{name_ref}<{}>", generics.format(", ")),
None => name_ref.to_string(),
}
}
.unwrap();
let actual =
inlay_hints.into_iter().map(|it| (it.range, it.label.to_string())).collect::<Vec<_>>();
- assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual);
+ assert_eq!(expected, actual, "\nExpected:\n{expected:#?}\n\nActual:\n{actual:#?}");
}
#[test]
self.text.as_str()
}
pub fn fenced_block(contents: &impl fmt::Display) -> Markup {
- format!("```rust\n{}\n```", contents).into()
+ format!("```rust\n{contents}\n```").into()
}
}
fn no_moniker(ra_fixture: &str) {
let (analysis, position) = fixture::position(ra_fixture);
if let Some(x) = analysis.moniker(position).unwrap() {
- assert_eq!(x.info.len(), 0, "Moniker founded but no moniker expected: {:?}", x);
+ assert_eq!(x.info.len(), 0, "Moniker founded but no moniker expected: {x:?}");
}
}
self.full_range
);
if let Some(focus_range) = self.focus_range {
- buf.push_str(&format!(" {:?}", focus_range))
+ buf.push_str(&format!(" {focus_range:?}"))
}
if let Some(container_name) = &self.container_name {
- buf.push_str(&format!(" {}", container_name))
+ buf.push_str(&format!(" {container_name}"))
}
buf
}
let (analysis, position) = fixture::position(ra_fixture_before);
let rename_result = analysis
.rename(position, new_name)
- .unwrap_or_else(|err| panic!("Rename to '{}' was cancelled: {}", new_name, err));
+ .unwrap_or_else(|err| panic!("Rename to '{new_name}' was cancelled: {err}"));
match rename_result {
Ok(source_change) => {
let mut text_edit_builder = TextEdit::builder();
.collect::<String>();
assert_eq!(error_message.trim(), err.to_string());
} else {
- panic!("Rename to '{}' failed unexpectedly: {}", new_name, err)
+ panic!("Rename to '{new_name}' failed unexpectedly: {err}")
}
}
};
let (analysis, position) = fixture::position(ra_fixture);
let result = analysis
.prepare_rename(position)
- .unwrap_or_else(|err| panic!("PrepareRename was cancelled: {}", err));
+ .unwrap_or_else(|err| panic!("PrepareRename was cancelled: {err}"));
match result {
Ok(RangeInfo { range, info: () }) => {
let source = analysis.file_text(position.file_id).unwrap();
- expect.assert_eq(&format!("{:?}: {}", range, &source[range]))
+ expect.assert_eq(&format!("{range:?}: {}", &source[range]))
}
Err(RenameError(err)) => expect.assert_eq(&err),
};
// test package::module::testname
pub fn label(&self, target: Option<String>) -> String {
match &self.kind {
- RunnableKind::Test { test_id, .. } => format!("test {}", test_id),
- RunnableKind::TestMod { path } => format!("test-mod {}", path),
- RunnableKind::Bench { test_id } => format!("bench {}", test_id),
- RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id),
+ RunnableKind::Test { test_id, .. } => format!("test {test_id}"),
+ RunnableKind::TestMod { path } => format!("test-mod {path}"),
+ RunnableKind::Bench { test_id } => format!("bench {test_id}"),
+ RunnableKind::DocTest { test_id, .. } => format!("doctest {test_id}"),
RunnableKind::Bin => {
- target.map_or_else(|| "run binary".to_string(), |t| format!("run {}", t))
+ target.map_or_else(|| "run binary".to_string(), |t| format!("run {t}"))
}
}
}
} else {
String::new()
};
- let mut test_id = format!("{}{}", adt_name, params);
+ let mut test_id = format!("{adt_name}{params}");
test_id.retain(|c| c != ' ');
let test_id = TestId::Path(test_id);
for (range, _) in f.tokens {
let x = FileRange { file_id: f.file_id, range };
if !range_set.contains(&x) {
- panic!("additional range {:?}", x);
+ panic!("additional range {x:?}");
}
range_set.remove(&x);
}
}
if !range_set.is_empty() {
- panic!("unfound ranges {:?}", range_set);
+ panic!("unfound ranges {range_set:?}");
}
}
continue;
}
if !range_set.contains(&x) {
- panic!("additional definition {:?}", x);
+ panic!("additional definition {x:?}");
}
range_set.remove(&x);
}
}
if !range_set.is_empty() {
- panic!("unfound definitions {:?}", range_set);
+ panic!("unfound definitions {range_set:?}");
}
}
let crate_graph = db.crate_graph();
for krate in crates {
let display_crate = |krate: CrateId| match &crate_graph[krate].display_name {
- Some(it) => format!("{}({:?})", it, krate),
- None => format!("{:?}", krate),
+ Some(it) => format!("{it}({krate:?})"),
+ None => format!("{krate:?}"),
};
format_to!(buf, "Crate: {}\n", display_crate(krate));
let deps = crate_graph[krate]
let class = r.highlight.to_string().replace('.', " ");
let color = match (rainbow, r.binding_hash) {
(true, Some(hash)) => {
- format!(" data-binding-hash=\"{}\" style=\"color: {};\"", hash, rainbowify(hash))
+ format!(" data-binding-hash=\"{hash}\" style=\"color: {};\"", rainbowify(hash))
}
_ => "".into(),
};
}
};
- format!("{:#?}", node)
+ format!("{node:#?}")
} else {
format!("{:#?}", parse.tree().syntax())
}
fn type_char(char_typed: char, ra_fixture_before: &str, ra_fixture_after: &str) {
let actual = do_type_char(char_typed, ra_fixture_before)
- .unwrap_or_else(|| panic!("typing `{}` did nothing", char_typed));
+ .unwrap_or_else(|| panic!("typing `{char_typed}` did nothing"));
assert_eq_text!(ra_fixture_after, &actual);
}
}
let indent = node_indent(file, comment.syntax())?;
- let inserted = format!("\n{}{} $0", indent, prefix);
+ let inserted = format!("\n{indent}{prefix} $0");
let delete = if remove_trailing_whitespace {
let trimmed_len = comment.text().trim_end().len() as u32;
let trailing_whitespace_len = comment.text().len() as u32 - trimmed_len;
let indent = IndentLevel::from_node(block.syntax());
let mut edit = TextEdit::insert(position.offset, format!("\n{}$0", indent + 1));
- edit.union(TextEdit::insert(contents.text_range().end(), format!("\n{}", indent))).ok()?;
+ edit.union(TextEdit::insert(contents.text_range().end(), format!("\n{indent}"))).ok()?;
Some(edit)
}
let indent = IndentLevel::from_node(list.syntax());
let mut edit = TextEdit::insert(position.offset, format!("\n{}$0", indent + 1));
- edit.union(TextEdit::insert(
- list.r_curly_token()?.text_range().start(),
- format!("\n{}", indent),
- ))
- .ok()?;
+ edit.union(TextEdit::insert(list.r_curly_token()?.text_range().start(), format!("\n{indent}")))
+ .ok()?;
Some(edit)
}
.compare_exchange_weak(old_max, other, Ordering::Relaxed, Ordering::Relaxed)
.is_ok()
{
- eprintln!("new max: {}", other);
+ eprintln!("new max: {other}");
}
}
}
try_cnt += 1;
if try_cnt > 100 {
- panic!("invocaton fixture {} cannot be generated.\n", name);
+ panic!("invocaton fixture {name} cannot be generated.\n");
}
}
}
}
None => (),
- Some(kind) => panic!("Unhandled kind {:?}", kind),
+ Some(kind) => panic!("Unhandled kind {kind:?}"),
},
Op::Leaf(leaf) => parent.token_trees.push(leaf.clone().into()),
Op::Repeat { tokens, kind, separator } => {
let char = match token.to_char(conv) {
Some(c) => c,
None => {
- panic!("Token from lexer must be single char: token = {:#?}", token);
+ panic!("Token from lexer must be single char: token = {token:#?}");
}
};
tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) })
let spacing = match annotation.as_str() {
"Alone" => Spacing::Alone,
"Joint" => Spacing::Joint,
- a => panic!("unknown annotation: {}", a),
+ a => panic!("unknown annotation: {a}"),
};
(token, spacing)
})
cursor = cursor.bump();
}
- assert!(annotations.is_empty(), "unchecked annotations: {:?}", annotations);
+ assert!(annotations.is_empty(), "unchecked annotations: {annotations:?}");
}
#[test]
},
tt::Leaf::Punct(punct) => {
let kind = SyntaxKind::from_char(punct.char)
- .unwrap_or_else(|| panic!("{:#?} is not a valid punct", punct));
+ .unwrap_or_else(|| panic!("{punct:#?} is not a valid punct"));
res.push(kind);
if punct.spacing == tt::Spacing::Joint {
res.was_joint();
if self.eat(kind) {
return true;
}
- self.error(format!("expected {:?}", kind));
+ self.error(format!("expected {kind:?}"));
false
}
let text = lexed.text(i);
let error = lexed.error(i);
- let error = error.map(|err| format!(" error: {}", err)).unwrap_or_default();
- writeln!(res, "{:?} {:?}{}", kind, text, error).unwrap();
+ let error = error.map(|err| format!(" error: {err}")).unwrap_or_default();
+ writeln!(res, "{kind:?} {text:?}{error}").unwrap();
}
res
}
fn parse_ok() {
for case in TestCase::list("parser/ok") {
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
- assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual);
+ assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display());
expect_file![case.rast].assert_eq(&actual);
}
}
fn parse_inline_ok() {
for case in TestCase::list("parser/inline/ok") {
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
- assert!(!errors, "errors in an OK file {}:\n{}", case.rs.display(), actual);
+ assert!(!errors, "errors in an OK file {}:\n{actual}", case.rs.display());
expect_file![case.rast].assert_eq(&actual);
}
}
fn parse_err() {
for case in TestCase::list("parser/err") {
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
- assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual);
+ assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display());
expect_file![case.rast].assert_eq(&actual)
}
}
fn parse_inline_err() {
for case in TestCase::list("parser/inline/err") {
let (actual, errors) = parse(TopEntryPoint::SourceFile, &case.text);
- assert!(errors, "no errors in an ERR file {}:\n{}", case.rs.display(), actual);
+ assert!(errors, "no errors in an ERR file {}:\n{actual}", case.rs.display());
expect_file![case.rast].assert_eq(&actual)
}
}
crate::StrStep::Token { kind, text } => {
assert!(depth > 0);
len += text.len();
- write!(buf, "{}", indent).unwrap();
- write!(buf, "{:?} {:?}\n", kind, text).unwrap();
+ write!(buf, "{indent}").unwrap();
+ write!(buf, "{kind:?} {text:?}\n").unwrap();
}
crate::StrStep::Enter { kind } => {
assert!(depth > 0 || len == 0);
depth += 1;
- write!(buf, "{}", indent).unwrap();
- write!(buf, "{:?}\n", kind).unwrap();
+ write!(buf, "{indent}").unwrap();
+ write!(buf, "{kind:?}\n").unwrap();
indent.push_str(" ");
}
crate::StrStep::Exit => {
}
crate::StrStep::Error { msg, pos } => {
assert!(depth > 0);
- errors.push(format!("error {}: {}\n", pos, msg))
+ errors.push(format!("error {pos}: {msg}\n"))
}
});
assert_eq!(
for (token, msg) in lexed.errors() {
let pos = lexed.text_start(token);
- errors.push(format!("error {}: {}\n", pos, msg));
+ errors.push(format!("error {pos}: {msg}\n"));
}
let has_errors = !errors.is_empty();
let mut res = Vec::new();
let read_dir = fs::read_dir(&dir)
- .unwrap_or_else(|err| panic!("can't `read_dir` {}: {}", dir.display(), err));
+ .unwrap_or_else(|err| panic!("can't `read_dir` {}: {err}", dir.display()));
for file in read_dir {
let file = file.unwrap();
let path = file.path();
// ok is never actually read, but it needs to be specified to create a Test in existing_tests
let existing = existing_tests(&tests_dir, true);
for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
- panic!("Test is deleted: {}", t);
+ panic!("Test is deleted: {t}");
}
let mut new_idx = existing.len() + 1;
let path = match existing.get(name) {
Some((path, _test)) => path.clone(),
None => {
- let file_name = format!("{:04}_{}.rs", new_idx, name);
+ let file_name = format!("{new_idx:04}_{name}.rs");
new_idx += 1;
tests_dir.join(file_name)
}
let text = fs::read_to_string(&path).unwrap();
let test = Test { name: name.clone(), text, ok };
if let Some(old) = res.insert(name, (path, test)) {
- println!("Duplicate test: {:?}", old);
+ println!("Duplicate test: {old:?}");
}
}
res
let info = version::read_dylib_info(&path)?;
if info.version.0 < 1 || info.version.1 < 47 {
- let msg = format!("proc-macro {} built by {:#?} is not supported by rust-analyzer, please update your Rust version.", path.display(), info);
+ let msg = format!("proc-macro {} built by {info:#?} is not supported by rust-analyzer, please update your Rust version.", path.display());
return Err(io::Error::new(io::ErrorKind::InvalidData, msg));
}
1 => Some(tt::DelimiterKind::Parenthesis),
2 => Some(tt::DelimiterKind::Brace),
3 => Some(tt::DelimiterKind::Bracket),
- other => panic!("bad kind {}", other),
+ other => panic!("bad kind {other}"),
};
SubtreeRepr { id: TokenId(id), kind, tt: [lo, len] }
}
let spacing = match spacing {
0 => tt::Spacing::Alone,
1 => tt::Spacing::Joint,
- other => panic!("bad spacing {}", other),
+ other => panic!("bad spacing {other}"),
};
PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing }
}
})
.into()
}
- other => panic!("bad tag: {}", other),
+ other => panic!("bad tag: {other}"),
}
})
.collect(),
_ => {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
- format!("unsupported metadata version {}", version),
+ format!("unsupported metadata version {version}"),
));
}
}
/// example if it is infinity or NaN this function will panic.
pub fn f32_unsuffixed(n: f32) -> Literal {
if !n.is_finite() {
- panic!("Invalid float literal {}", n);
+ panic!("Invalid float literal {n}");
}
let mut repr = n.to_string();
if !repr.contains('.') {
/// example if it is infinity or NaN this function will panic.
pub fn f32_suffixed(n: f32) -> Literal {
if !n.is_finite() {
- panic!("Invalid float literal {}", n);
+ panic!("Invalid float literal {n}");
}
Literal(bridge::client::Literal::f32(&n.to_string()))
}
/// example if it is infinity or NaN this function will panic.
pub fn f64_unsuffixed(n: f64) -> Literal {
if !n.is_finite() {
- panic!("Invalid float literal {}", n);
+ panic!("Invalid float literal {n}");
}
let mut repr = n.to_string();
if !repr.contains('.') {
/// example if it is infinity or NaN this function will panic.
pub fn f64_suffixed(n: f64) -> Literal {
if !n.is_finite() {
- panic!("Invalid float literal {}", n);
+ panic!("Invalid float literal {n}");
}
Literal(bridge::client::Literal::f64(&n.to_string()))
}
fn f32(&mut self, n: &str) -> Self::Literal {
let n: f32 = n.parse().unwrap();
- let text = format!("{}f32", n);
+ let text = format!("{n}f32");
Literal { text: text.into(), id: tt::TokenId::unspecified() }
}
fn f64(&mut self, n: &str) -> Self::Literal {
let n: f64 = n.parse().unwrap();
- let text = format!("{}f64", n);
+ let text = format!("{n}f64");
Literal { text: text.into(), id: tt::TokenId::unspecified() }
}
for ch in string.chars() {
escaped.extend(ch.escape_debug());
}
- Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+ Literal { text: format!("\"{escaped}\"").into(), id: tt::TokenId::unspecified() }
}
fn character(&mut self, ch: char) -> Self::Literal {
- Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+ Literal { text: format!("'{ch}'").into(), id: tt::TokenId::unspecified() }
}
fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
.map(Into::<char>::into)
.collect::<String>();
- Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+ Literal { text: format!("b\"{string}\"").into(), id: tt::TokenId::unspecified() }
}
fn span(&mut self, literal: &Self::Literal) -> Self::Span {
fn f32(&mut self, n: &str) -> Self::Literal {
let n: f32 = n.parse().unwrap();
- let text = format!("{}f32", n);
+ let text = format!("{n}f32");
Literal { text: text.into(), id: tt::TokenId::unspecified() }
}
fn f64(&mut self, n: &str) -> Self::Literal {
let n: f64 = n.parse().unwrap();
- let text = format!("{}f64", n);
+ let text = format!("{n}f64");
Literal { text: text.into(), id: tt::TokenId::unspecified() }
}
for ch in string.chars() {
escaped.extend(ch.escape_debug());
}
- Literal { text: format!("\"{}\"", escaped).into(), id: tt::TokenId::unspecified() }
+ Literal { text: format!("\"{escaped}\"").into(), id: tt::TokenId::unspecified() }
}
fn character(&mut self, ch: char) -> Self::Literal {
- Literal { text: format!("'{}'", ch).into(), id: tt::TokenId::unspecified() }
+ Literal { text: format!("'{ch}'").into(), id: tt::TokenId::unspecified() }
}
fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
.map(Into::<char>::into)
.collect::<String>();
- Literal { text: format!("b\"{}\"", string).into(), id: tt::TokenId::unspecified() }
+ Literal { text: format!("b\"{string}\"").into(), id: tt::TokenId::unspecified() }
}
fn span(&mut self, literal: &Self::Literal) -> Self::Span {
pub fn expand(&mut self, task: ExpandMacro) -> Result<FlatTree, PanicMessage> {
let expander = self.expander(task.lib.as_ref()).map_err(|err| {
debug_assert!(false, "should list macros before asking to expand");
- PanicMessage(format!("failed to load macro: {}", err))
+ PanicMessage(format!("failed to load macro: {err}"))
})?;
let prev_env = EnvSnapshot::new();
Some(dir) => {
let prev_working_dir = std::env::current_dir().ok();
if let Err(err) = std::env::set_current_dir(&dir) {
- eprintln!("Failed to set the current working dir to {}. Error: {:?}", dir, err)
+ eprintln!("Failed to set the current working dir to {dir}. Error: {err:?}")
}
prev_working_dir
}
}
fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> {
- let time = fs::metadata(path).and_then(|it| it.modified()).map_err(|err| {
- format!("Failed to get file metadata for {}: {}", path.display(), err)
- })?;
+ let time = fs::metadata(path)
+ .and_then(|it| it.modified())
+ .map_err(|err| format!("Failed to get file metadata for {}: {err}", path.display()))?;
Ok(match self.expanders.entry((path.to_path_buf(), time)) {
- Entry::Vacant(v) => v.insert(dylib::Expander::new(path).map_err(|err| {
- format!("Cannot create expander for {}: {}", path.display(), err)
- })?),
+ Entry::Vacant(v) => {
+ v.insert(dylib::Expander::new(path).map_err(|err| {
+ format!("Cannot create expander for {}: {err}", path.display())
+ })?)
+ }
Entry::Occupied(e) => e.into_mut(),
})
}
.arg("--target-dir")
.arg(&target_dir);
- println!("Running {:?}", cmd);
+ println!("Running {cmd:?}");
let output = cmd.output().unwrap();
if !output.status.success() {
for message in Message::parse_stream(output.stdout.as_slice()) {
if let Message::CompilerArtifact(artifact) = message.unwrap() {
if artifact.target.kind.contains(&"proc-macro".to_string()) {
- let repr = format!("{} {}", name, version);
+ let repr = format!("{name} {version}");
if artifact.package_id.repr.starts_with(&repr) {
artifact_path = Some(PathBuf::from(&artifact.filenames[0]));
}
self.heartbeat(frame.heartbeats);
let avg_span = duration / (frame.heartbeats + 1);
if avg_span > self.filter.heartbeat_longer_than {
- eprintln!("Too few heartbeats {} ({}/{:?})?", label, frame.heartbeats, duration);
+ eprintln!("Too few heartbeats {label} ({}/{duration:?})?", frame.heartbeats);
}
}
out: &mut impl Write,
) {
let current_indent = " ".repeat(level as usize);
- let detail = tree[curr].detail.as_ref().map(|it| format!(" @ {}", it)).unwrap_or_default();
+ let detail = tree[curr].detail.as_ref().map(|it| format!(" @ {it}")).unwrap_or_default();
writeln!(
out,
"{}{} - {}{}",
}
for (child_msg, (duration, count)) in &short_children {
- writeln!(out, " {}{} - {} ({} calls)", current_indent, ms(*duration), child_msg, count)
+ writeln!(out, " {current_indent}{} - {child_msg} ({count} calls)", ms(*duration))
.expect("printing profiling info");
}
let unaccounted = tree[curr].duration - accounted_for;
if tree.children(curr).next().is_some() && unaccounted > longer_than {
- writeln!(out, " {}{} - ???", current_indent, ms(unaccounted))
+ writeln!(out, " {current_indent}{} - ???", ms(unaccounted))
.expect("printing profiling info");
}
}
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.0.as_millis() {
0 => f.write_str(" 0 "),
- n => write!(f, "{:5}ms", n),
+ n => write!(f, "{n:5}ms"),
}
}
}
suffix = "mb";
}
}
- f.pad(&format!("{}{}", value, suffix))
+ f.pad(&format!("{value}{suffix}"))
}
}
if *PERF_ENABLED {
let mut counter = perf_event::Builder::new()
.build()
- .map_err(|err| eprintln!("Failed to create perf counter: {}", err))
+ .map_err(|err| eprintln!("Failed to create perf counter: {err}"))
.ok();
if let Some(counter) = &mut counter {
if let Err(err) = counter.enable() {
- eprintln!("Failed to start perf counter: {}", err)
+ eprintln!("Failed to start perf counter: {err}")
}
}
counter
#[cfg(target_os = "linux")]
let instructions = self.counter.as_mut().and_then(|it| {
- it.read().map_err(|err| eprintln!("Failed to read perf counter: {}", err)).ok()
+ it.read().map_err(|err| eprintln!("Failed to read perf counter: {err}")).ok()
});
#[cfg(not(target_os = "linux"))]
let instructions = None;
instructions /= 1000;
prefix = "g";
}
- write!(f, ", {}{}instr", instructions, prefix)?;
+ write!(f, ", {instructions}{prefix}instr")?;
}
if let Some(memory) = self.memory {
- write!(f, ", {}", memory)?;
+ write!(f, ", {memory}")?;
}
Ok(())
}
match message {
Message::BuildScriptExecuted(mut message) => {
with_output_for(&message.package_id.repr, &mut |name, data| {
- progress(format!("running build-script: {}", name));
+ progress(format!("running build-script: {name}"));
let cfgs = {
let mut acc = Vec::new();
for cfg in &message.cfgs {
}
Message::CompilerArtifact(message) => {
with_output_for(&message.package_id.repr, &mut |name, data| {
- progress(format!("building proc-macros: {}", name));
+ progress(format!("building proc-macros: {name}"));
if message.target.kind.iter().any(|k| k == "proc-macro") {
// Skip rmeta file
if let Some(filename) =
let res = match s.split_once('=') {
Some((key, value)) => {
if !(value.starts_with('"') && value.ends_with('"')) {
- return Err(format!("Invalid cfg ({:?}), value should be in quotes", s));
+ return Err(format!("Invalid cfg ({s:?}), value should be in quotes"));
}
let key = key.to_string();
let value = value[1..value.len() - 1].to_string();
}
fn utf8_stdout(mut cmd: Command) -> Result<String> {
- let output = cmd.output().with_context(|| format!("{:?} failed", cmd))?;
+ let output = cmd.output().with_context(|| format!("{cmd:?} failed"))?;
if !output.status.success() {
match String::from_utf8(output.stderr) {
Ok(stderr) if !stderr.is_empty() => {
D: de::Deserializer<'de>,
{
let name = String::deserialize(de)?;
- CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {:?}", err)))
+ CrateName::new(&name).map_err(|err| de::Error::custom(format!("invalid crate name: {err:?}")))
}
for path in SYSROOT_CRATES.trim().lines() {
let name = path.split('/').last().unwrap();
- let root = [format!("{}/src/lib.rs", path), format!("lib{}/lib.rs", path)]
+ let root = [format!("{path}/src/lib.rs"), format!("lib{path}/lib.rs")]
.into_iter()
.map(|it| sysroot.src_root.join(it))
.filter_map(|it| ManifestPath::try_from(it).ok())
}
fn check_crate_graph(crate_graph: CrateGraph, expect: Expect) {
- let mut crate_graph = format!("{:#?}", crate_graph);
+ let mut crate_graph = format!("{crate_graph:#?}");
replace_root(&mut crate_graph, false);
expect.assert_eq(&crate_graph);
}
Registry::default()
.with(
self.filter
- .add_directive(format!("chalk_solve={}", val).parse()?)
- .add_directive(format!("chalk_ir={}", val).parse()?)
- .add_directive(format!("chalk_recursive={}", val).parse()?),
+ .add_directive(format!("chalk_solve={val}").parse()?)
+ .add_directive(format!("chalk_ir={val}").parse()?)
+ .add_directive(format!("chalk_recursive={val}").parse()?),
)
.with(ra_fmt_layer)
.with(chalk_layer)
Some(log) => log.target(),
None => event.metadata().target(),
};
- write!(writer, "[{} {}] ", level, target)?;
+ write!(writer, "[{level} {target}] ")?;
// Write spans and fields of each span
ctx.visit_spans(|span| {
let fields = &ext.get::<FormattedFields<N>>().expect("will never be `None`");
if !fields.is_empty() {
- write!(writer, "{{{}}}", fields)?;
+ write!(writer, "{{{fields}}}")?;
}
write!(writer, ": ")?;
let code = match rustc_wrapper::run_rustc_skipping_cargo_checking(rustc, args.collect()) {
Ok(rustc_wrapper::ExitCode(code)) => code.unwrap_or(102),
Err(err) => {
- eprintln!("{}", err);
+ eprintln!("{err}");
101
}
};
let flags = flags::RustAnalyzer::from_env_or_exit();
if let Err(err) = try_main(flags) {
tracing::error!("Unexpected error: {}", err);
- eprintln!("{}", err);
+ eprintln!("{err}");
process::exit(101);
}
}
if std::env::var("RA_METRICS").is_err() {
return;
}
- println!("METRIC:{}:{}:{}", metric, value, unit)
+ println!("METRIC:{metric}:{value}:{unit}")
}
fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) {
for (name, bytes) in mem {
// NOTE: Not a debug print, so avoid going through the `eprintln` defined above.
- eprintln!("{:>8} {}", bytes, name);
+ eprintln!("{bytes:>8} {name}");
}
}
load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
let db = host.raw_database();
eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed());
- eprint!(" (metadata {}", metadata_time);
+ eprint!(" (metadata {metadata_time}");
if let Some(build_scripts_time) = build_scripts_time {
- eprint!("; build {}", build_scripts_time);
+ eprint!("; build {build_scripts_time}");
}
eprintln!(")");
shuffle(&mut rng, &mut visit_queue);
}
- eprint!(" crates: {}", num_crates);
+ eprint!(" crates: {num_crates}");
let mut num_decls = 0;
let mut funcs = Vec::new();
while let Some(module) = visit_queue.pop() {
}
}
}
- eprintln!(", mods: {}, decls: {}, fns: {}", visited_modules.len(), num_decls, funcs.len());
+ eprintln!(", mods: {}, decls: {num_decls}, fns: {}", visited_modules.len(), funcs.len());
eprintln!("{:<20} {}", "Item Collection:", analysis_sw.elapsed());
if self.randomize {
}
let total_span = analysis_sw.elapsed();
- eprintln!("{:<20} {}", "Total:", total_span);
+ eprintln!("{:<20} {total_span}", "Total:");
report_metric("total time", total_span.time.as_millis() as u64, "ms");
if let Some(instructions) = total_span.instructions {
report_metric("total instructions", instructions, "#instr");
total_macro_file_size += syntax_len(val.syntax_node())
}
}
- eprintln!("source files: {}, macro files: {}", total_file_size, total_macro_file_size);
+ eprintln!("source files: {total_file_size}, macro files: {total_macro_file_size}");
}
if self.memory_usage && verbosity.is_verbose() {
continue;
}
}
- let mut msg = format!("processing: {}", full_name);
+ let mut msg = format!("processing: {full_name}");
if verbosity.is_verbose() {
if let Some(src) = f.source(db) {
let original_file = src.file_id.original_file(db);
end.col,
));
} else {
- bar.println(format!("{}: Unknown type", name,));
+ bar.println(format!("{name}: Unknown type",));
}
}
true
let text_range = original_range.range;
let (start, end) =
(line_index.line_col(text_range.start()), line_index.line_col(text_range.end()));
- format!("{},{}:{},{}:{}", path, start.line + 1, start.col, end.line + 1, end.col)
+ format!("{path},{}:{},{}:{}", start.line + 1, start.col, end.line + 1, end.col)
}
fn expr_syntax_range(
if !visited_files.contains(&file_id) {
let crate_name =
module.krate().display_name(db).as_deref().unwrap_or("unknown").to_string();
- println!("processing crate: {}, module: {}", crate_name, _vfs.file_path(file_id));
+ println!("processing crate: {crate_name}, module: {}", _vfs.file_path(file_id));
for diagnostic in analysis
.diagnostics(
&DiagnosticsConfig::test_sample(),
found_error = true;
}
- println!("{:?}", diagnostic);
+ println!("{diagnostic:?}");
}
visited_files.insert(file_id);
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"csv" => Ok(Self::Csv),
- _ => Err(format!("unknown output format `{}`", s)),
+ _ => Err(format!("unknown output format `{s}`")),
}
}
}
pub fn run(self) -> anyhow::Result<()> {
let (analysis, file_id) = Analysis::from_single_file(read_stdin()?);
let html = analysis.highlight_as_html(file_id, self.rainbow).unwrap();
- println!("{}", html);
+ println!("{html}");
Ok(())
}
}
// FIXME: support file in addition to stdout here
fn emit(&self, data: &str) {
- println!("{}", data);
+ println!("{data}");
}
fn get_token_id(&mut self, id: TokenId) -> Id {
return;
}
let percent = (self.curr * 100.0) as u32;
- let text = format!("{}/{} {:3>}% {}", self.pos, self.len, percent, self.msg);
+ let text = format!("{}/{} {percent:3>}% {}", self.pos, self.len, self.msg);
self.update_text(&text);
}
// Fill all last text to space and return the cursor
let spaces = " ".repeat(self.text.len());
let backspaces = "\x08".repeat(self.text.len());
- print!("{}{}{}", backspaces, spaces, backspaces);
+ print!("{backspaces}{spaces}{backspaces}");
let _ = io::stdout().flush();
self.text = String::new();
let now = Instant::now();
let cargo_config = CargoConfig::default();
- let no_progress = &|s| (eprintln!("rust-analyzer: Loading {}", s));
+ let no_progress = &|s| (eprintln!("rust-analyzer: Loading {s}"));
let load_cargo_config = LoadCargoConfig {
load_out_dirs_from_check: true,
with_proc_macro: true,
fn new_descriptor(name: Name, suffix: scip_types::descriptor::Suffix) -> scip_types::Descriptor {
let mut name = name.to_string();
if name.contains("'") {
- name = format!("`{}`", name);
+ name = format!("`{name}`");
}
new_descriptor_str(name.as_str(), suffix)
}
if expected == "" {
- assert!(found_symbol.is_none(), "must have no symbols {:?}", found_symbol);
+ assert!(found_symbol.is_none(), "must have no symbols {found_symbol:?}");
return;
}
- assert!(found_symbol.is_some(), "must have one symbol {:?}", found_symbol);
+ assert!(found_symbol.is_some(), "must have one symbol {found_symbol:?}");
let res = found_symbol.unwrap();
let formatted = format_symbol(res);
assert_eq!(formatted, expected);
let sr = db.source_root(root);
for file_id in sr.iter() {
for debug_info in match_finder.debug_where_text_equal(file_id, debug_snippet) {
- println!("{:#?}", debug_info);
+ println!("{debug_info:#?}");
}
}
}
let (analysis, file_id) = Analysis::from_single_file(text);
let structure = analysis.file_structure(file_id).unwrap();
for s in structure {
- println!("{:?}", s);
+ println!("{s:?}");
}
Ok(())
}
fn key(f: &str) -> &str {
f.splitn(2, '_').next().unwrap()
}
- assert!(key(f1) <= key(f2), "wrong field order: {:?} {:?}", f1, f2);
+ assert!(key(f1) <= key(f2), "wrong field order: {f1:?} {f2:?}");
}
let map = fields
.iter()
.map(|(field, ty, doc, default)| {
let name = field.replace('_', ".");
- let name = format!("rust-analyzer.{}", name);
+ let name = format!("rust-analyzer.{name}");
let props = field_props(field, ty, doc, default);
(name, props)
})
},
],
},
- _ => panic!("missing entry for {}: {}", ty, default),
+ _ => panic!("missing entry for {ty}: {default}"),
}
map.into()
name, name, default, doc
)
} else {
- format!("[[{}]]{} (default: `{}`)::\n+\n--\n{}--\n", name, name, default, doc)
+ format!("[[{name}]]{name} (default: `{default}`)::\n+\n--\n{doc}--\n")
}
})
.collect::<String>()
}
fn doc_comment_to_string(doc: &[&str]) -> String {
- doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{}\n", it)).collect()
+ doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{it}\n")).collect()
}
#[cfg(test)]
#[test]
fn generate_package_json_config() {
let s = Config::json_schema();
- let schema = format!("{:#}", s);
+ let schema = format!("{s:#}");
let mut schema = schema
.trim_start_matches('{')
.trim_end_matches('}')
.iter()
.find_map(|(from, to)| file_name.strip_prefix(from).map(|file_name| (to, file_name)))
{
- Some((to, file_name)) => workspace_root.join(format!("{}{}", to, file_name)),
+ Some((to, file_name)) => workspace_root.join(format!("{to}{file_name}")),
None => workspace_root.join(file_name),
}
}
if !suggested_replacements.is_empty() {
message.push_str(": ");
let suggestions =
- suggested_replacements.iter().map(|suggestion| format!("`{}`", suggestion)).join(", ");
+ suggested_replacements.iter().map(|suggestion| format!("`{suggestion}`")).join(", ");
message.push_str(&suggestions);
}
&& chars.next().is_none()
})
.and_then(|code| {
- lsp_types::Url::parse(&format!("https://doc.rust-lang.org/error-index.html#{}", code))
+ lsp_types::Url::parse(&format!("https://doc.rust-lang.org/error-index.html#{code}"))
.ok()
.map(|href| lsp_types::CodeDescription { href })
})
match res {
Ok(params) => {
let panic_context =
- format!("\nversion: {}\nrequest: {} {:#?}", version(), R::METHOD, params);
+ format!("\nversion: {}\nrequest: {} {params:#?}", version(), R::METHOD);
Some((req, params, panic_context))
}
Err(err) => {
pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> {
let path = from_proto::vfs_path(url)?;
- let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {}", path))?;
+ let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {path}"))?;
Ok(res)
}
Some(spec) => {
for cmd in ["check", "test"] {
res.push(lsp_ext::Runnable {
- label: format!("cargo {} -p {} --all-targets", cmd, spec.package),
+ label: format!("cargo {cmd} -p {} --all-targets", spec.package),
location: None,
kind: lsp_ext::RunnableKind::Cargo,
args: lsp_ext::CargoRunnable {
Ok(parsed_data) => parsed_data,
Err(e) => {
return Err(invalid_params_error(format!(
- "Failed to parse action id string '{}': {}",
- params.id, e
+ "Failed to parse action id string '{}': {e}",
+ params.id
))
.into())
}
let assist_kind: AssistKind = assist_kind_string.parse()?;
let index: usize = match index_string.parse() {
Ok(index) => index,
- Err(e) => return Err(format!("Incorrect index string: {}", e)),
+ Err(e) => return Err(format!("Incorrect index string: {e}")),
};
Ok((index, SingleResolve { assist_id: assist_id_string.to_string(), assist_kind }))
}
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()
- .context(format!("Failed to spawn {:?}", command))?;
+ .context(format!("Failed to spawn {command:?}"))?;
rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?;
format!(
r#"rustfmt exited with:
Status: {}
- stdout: {}
- stderr: {}"#,
- output.status, captured_stdout, captured_stderr,
+ stdout: {captured_stdout}
+ stderr: {captured_stderr}"#,
+ output.status,
),
)
.into())
let file_id = {
let file = workspace_to_load.join(file);
let path = VfsPath::from(AbsPathBuf::assert(file));
- vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
+ vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
};
{
let file_id = {
let file = workspace_to_load.join(file);
let path = VfsPath::from(AbsPathBuf::assert(file));
- vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
+ vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {path}"))
};
{
pub fn from_json<T: DeserializeOwned>(what: &'static str, json: &serde_json::Value) -> Result<T> {
let res = serde_json::from_value(json.clone())
- .map_err(|e| format!("Failed to deserialize {}: {}; {}", what, e, json))?;
+ .map_err(|e| format!("Failed to deserialize {what}: {e}; {json}"))?;
Ok(res)
}
});
let cancellable = Some(cancel_token.is_some());
let token = lsp_types::ProgressToken::String(
- cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{}", title)),
+ cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{title}")),
);
let work_done_progress = match state {
Progress::Begin => {
message = match &report.crates_currently_indexing[..] {
[crate_name] => Some(format!(
- "{}/{} ({})",
- report.crates_done, report.crates_total, crate_name
+ "{}/{} ({crate_name})",
+ report.crates_done, report.crates_total
)),
[crate_name, rest @ ..] => Some(format!(
"{}/{} ({} + {} more)",
self.report_progress(
"Roots Scanned",
state,
- Some(format!("{}/{}", n_done, n_total)),
+ Some(format!("{n_done}/{n_total}")),
Some(Progress::fraction(n_done, n_total)),
None,
)
state,
message,
None,
- Some(format!("rust-analyzer/flycheck/{}", id)),
+ Some(format!("rust-analyzer/flycheck/{id}")),
);
}
}
// by the client. Hex format is used because it is easier to
// visually compare very large values, which the sort text
// tends to be since it is the opposite of the score.
- res.sort_text = Some(format!("{:08x}", sort_score));
+ res.sort_text = Some(format!("{sort_score:08x}"));
}
}
(Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?),
(None, Some((index, code_action_params))) => {
res.data = Some(lsp_ext::CodeActionData {
- id: format!("{}:{}:{}", assist.id.0, assist.id.1.name(), index),
+ id: format!("{}:{}:{index}", assist.id.0, assist.id.1.name()),
code_action_params,
});
}
if count == 1 {
"1 implementation".into()
} else {
- format!("{} implementations", count)
+ format!("{count} implementations")
}
}
if count == 1 {
"1 reference".into()
} else {
- format!("{} references", count)
+ format!("{count} references")
}
}
for runnable in ["consumer", "dependency", "devdependency"] {
server.request::<Runnables>(
RunnablesParams {
- text_document: server.doc_id(&format!("{}/src/lib.rs", runnable)),
+ text_document: server.doc_id(&format!("{runnable}/src/lib.rs")),
position: None,
},
json!([
return;
}
- let librs: String = (0..10).map(|i| format!("mod m{};", i)).collect();
- let libs: String = (0..10).map(|i| format!("//- /src/m{}.rs\nfn foo() {{}}\n\n", i)).collect();
+ let librs: String = (0..10).map(|i| format!("mod m{i};")).collect();
+ let libs: String = (0..10).map(|i| format!("//- /src/m{i}.rs\nfn foo() {{}}\n\n")).collect();
let server = Project::with_fixture(&format!(
r#"
//- /Cargo.toml
for i in 0..10 {
server.notification::<DidOpenTextDocument>(DidOpenTextDocumentParams {
text_document: TextDocumentItem {
- uri: server.doc_id(&format!("src/m{}.rs", i)).uri,
+ uri: server.doc_id(&format!("src/m{i}.rs")).uri,
language_id: "rust".to_string(),
version: 0,
text: "/// Docs\nfn foo() {}".to_string(),
}]),
);
let elapsed = start.elapsed();
- assert!(elapsed.as_millis() < 2000, "typing enter took {:?}", elapsed);
+ assert!(elapsed.as_millis() < 2000, "typing enter took {elapsed:?}");
}
#[test]
let tmp_dir = TestDir::new();
let tmp_dir_path = tmp_dir.path().to_owned();
let tmp_dir_str = tmp_dir_path.to_str().unwrap();
- let base_path = PathBuf::from(format!("file://{}", tmp_dir_str));
+ let base_path = PathBuf::from(format!("file://{tmp_dir_str}"));
let code = r#"
//- /Cargo.toml
for block in comment_blocks {
let id = block.id;
if let Err(msg) = is_valid_feature_name(&id) {
- panic!("invalid feature name: {:?}:\n {}", id, msg)
+ panic!("invalid feature name: {id:?}:\n {msg}")
}
let doc = block.contents.join("\n");
let location = sourcegen::Location { file: path.clone(), line: block.line };
}
for short in ["To", "And"] {
if word == short {
- return Err(format!("Don't capitalize {:?}", word));
+ return Err(format!("Don't capitalize {word:?}"));
}
}
if !word.starts_with(char::is_uppercase) {
- return Err(format!("Capitalize {:?}", word));
+ return Err(format!("Capitalize {word:?}"));
}
}
Ok(())
fn send_request_(&self, r: Request) -> Value {
let id = r.id.clone();
self.client.sender.send(r.clone().into()).unwrap();
- while let Some(msg) = self.recv().unwrap_or_else(|Timeout| panic!("timeout: {:?}", r)) {
+ while let Some(msg) = self.recv().unwrap_or_else(|Timeout| panic!("timeout: {r:?}")) {
match msg {
Message::Request(req) => {
if req.method == "client/registerCapability" {
continue;
}
}
- panic!("unexpected request: {:?}", req)
+ panic!("unexpected request: {req:?}")
}
Message::Notification(_) => (),
Message::Response(res) => {
assert_eq!(res.id, id);
if let Some(err) = res.error {
- panic!("error response: {:#?}", err);
+ panic!("error response: {err:#?}");
}
return res.result.unwrap();
}
}
}
- panic!("no response for {:?}", r);
+ panic!("no response for {r:?}");
}
pub(crate) fn wait_until_workspace_is_loaded(self) -> Server {
self.wait_for_message_cond(1, &|msg: &Message| match msg {
static CNT: AtomicUsize = AtomicUsize::new(0);
for _ in 0..100 {
let cnt = CNT.fetch_add(1, Ordering::Relaxed);
- let path = base.join(format!("{}_{}", pid, cnt));
+ let path = base.join(format!("{pid}_{cnt}"));
if path.is_dir() {
continue;
}
return;
}
remove_dir_all(&self.path).unwrap_or_else(|err| {
- panic!("failed to remove temporary directory {}: {}", self.path.display(), err)
+ panic!("failed to remove temporary directory {}: {err}", self.path.display())
})
}
}
diff.push_str("New Licenses:\n");
for &l in licenses.iter() {
if !expected.contains(&l) {
- diff += &format!(" {}\n", l)
+ diff += &format!(" {l}\n")
}
}
diff.push_str("\nMissing Licenses:\n");
for &l in expected.iter() {
if !licenses.contains(&l) {
- diff += &format!(" {}\n", l)
+ diff += &format!(" {l}\n")
}
}
- panic!("different set of licenses!\n{}", diff);
+ panic!("different set of licenses!\n{diff}");
}
assert_eq!(licenses, expected);
}
"ide-assists/src/tests/generated.rs",
];
if text.contains("#[ignore") && !need_ignore.iter().any(|p| path.ends_with(p)) {
- panic!("\ndon't `#[ignore]` tests, see:\n\n {}\n\n {}\n", ignore_rule, path.display(),)
+ panic!("\ndon't `#[ignore]` tests, see:\n\n {ignore_rule}\n\n {}\n", path.display(),)
}
let panic_rule =
self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect();
if !diff.is_empty() {
- panic!("unpaired marks: {:?}", diff)
+ panic!("unpaired marks: {diff:?}")
}
}
}
pub fn extract(tag: &str, text: &str) -> Vec<CommentBlock> {
assert!(tag.starts_with(char::is_uppercase));
- let tag = format!("{}:", tag);
+ let tag = format!("{tag}:");
// Would be nice if we had `.retain_mut` here!
CommentBlock::extract_untagged(text)
.into_iter()
}
pub fn add_preamble(generator: &'static str, mut text: String) -> String {
- let preamble = format!("//! Generated by `{}`, do not edit by hand.\n\n", generator);
+ let preamble = format!("//! Generated by `{generator}`, do not edit by hand.\n\n");
text.insert_str(0, &preamble);
text
}
if !ctx.is_empty() {
eprintln!("Panic context:");
for frame in ctx.iter() {
- eprintln!("> {}\n", frame);
+ eprintln!("> {frame}\n");
}
}
default_hook(panic_info);
let fmt_syntax = |syn: &SyntaxElement| match syn.kind() {
SyntaxKind::WHITESPACE => format!("{:?}", syn.to_string()),
- _ => format!("{}", syn),
+ _ => format!("{syn}"),
};
let insertions =
.iter()
.sorted_by_key(|(syntax, _)| syntax.text_range().start())
.format_with("\n", |(k, v), f| {
- f(&format!("Line {}: {:?} -> {}", line_number(k), k, fmt_syntax(v)))
+ f(&format!("Line {}: {k:?} -> {}", line_number(k), fmt_syntax(v)))
});
let deletions = diff
for token in tokens {
if let Some(ws) = ast::Whitespace::cast(token) {
if ws.text().contains('\n') {
- let new_ws = make::tokens::whitespace(&format!("{}{}", ws.syntax(), self));
+ let new_ws = make::tokens::whitespace(&format!("{}{self}", ws.syntax()));
ted::replace(ws.syntax(), &new_ws);
}
}
if let Some(ws) = ast::Whitespace::cast(token) {
if ws.text().contains('\n') {
let new_ws = make::tokens::whitespace(
- &ws.syntax().text().replace(&format!("\n{}", self), "\n"),
+ &ws.syntax().text().replace(&format!("\n{self}"), "\n"),
);
ted::replace(ws.syntax(), &new_ws);
}
},
};
let elements: Vec<SyntaxElement<_>> = vec![
- make::tokens::whitespace(&format!("{}{}", whitespace, indent)).into(),
+ make::tokens::whitespace(&format!("{whitespace}{indent}")).into(),
item.syntax().clone().into(),
];
ted::insert_all(position, elements);
},
};
let indent = IndentLevel::from_node(self.syntax()) + 1;
- elements.push(make::tokens::whitespace(&format!("\n{}", indent)).into());
+ elements.push(make::tokens::whitespace(&format!("\n{indent}")).into());
elements.push(arm.syntax().clone().into());
if needs_comma(&arm) {
ted::append_child(arm.syntax(), make::token(SyntaxKind::COMMA));
let is_multiline = self.syntax().text().contains_char('\n');
let whitespace = if is_multiline {
let indent = IndentLevel::from_node(self.syntax()) + 1;
- make::tokens::whitespace(&format!("\n{}", indent))
+ make::tokens::whitespace(&format!("\n{indent}"))
} else {
make::tokens::single_space()
};
let is_multiline = self.syntax().text().contains_char('\n');
let whitespace = if is_multiline {
let indent = IndentLevel::from_node(self.syntax()) + 1;
- make::tokens::whitespace(&format!("\n{}", indent))
+ make::tokens::whitespace(&format!("\n{indent}"))
} else {
make::tokens::single_space()
};
},
};
let elements: Vec<SyntaxElement<_>> = vec![
- make::tokens::whitespace(&format!("{}{}", "\n", indent)).into(),
+ make::tokens::whitespace(&format!("{}{indent}", "\n")).into(),
variant.syntax().clone().into(),
ast::make::token(T![,]).into(),
];
match l.next_sibling_or_token() {
Some(ws) if ws.kind() == SyntaxKind::WHITESPACE => {
if ws.next_sibling_or_token()?.into_token()? == r {
- ted::replace(ws, make::tokens::whitespace(&format!("\n{}", indent)));
+ ted::replace(ws, make::tokens::whitespace(&format!("\n{indent}")));
}
}
Some(ws) if ws.kind() == T!['}'] => {
- ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{}", indent)));
+ ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{indent}")));
}
_ => (),
}
fn check_string_value<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
assert_eq!(
- ast::String { syntax: make::tokens::literal(&format!("\"{}\"", lit)) }
- .value()
- .as_deref(),
+ ast::String { syntax: make::tokens::literal(&format!("\"{lit}\"")) }.value().as_deref(),
expected.into()
);
}
expected: impl Into<Option<&'a [u8; N]>>,
) {
assert_eq!(
- ast::ByteString { syntax: make::tokens::literal(&format!("b\"{}\"", lit)) }
+ ast::ByteString { syntax: make::tokens::literal(&format!("b\"{lit}\"")) }
.value()
.as_deref(),
expected.into().map(|value| &value[..])
let delete_len = usize::from_str(lines.next()?).ok()?;
let insert = lines.next()?.to_string();
let text = lines.collect::<Vec<_>>().join("\n");
- let text = format!("{}{}{}", PREFIX, text, SUFFIX);
+ let text = format!("{PREFIX}{text}{SUFFIX}");
text.get(delete_start..delete_start.checked_add(delete_len)?)?; // make sure delete is a valid range
let delete =
TextRange::at(delete_start.try_into().unwrap(), delete_len.try_into().unwrap());
eprint!("reparsed:\n{:#?}", new_parse.tree().syntax());
eprint!("full reparse:\n{:#?}", full_reparse.tree().syntax());
assert_eq!(
- format!("{:?}", a),
- format!("{:?}", b),
+ format!("{a:?}"),
+ format!("{b:?}"),
"different syntax tree produced by the full reparse"
);
}
pub fn parse_expr_from_str(s: &str) -> Option<ast::Expr> {
let s = s.trim();
- let file = ast::SourceFile::parse(&format!("const _: () = {};", s));
+ let file = ast::SourceFile::parse(&format!("const _: () = {s};"));
let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
if expr.syntax().text() != s {
return None;
if let Some(item_list) = prev.parent().and_then(ast::ItemList::cast) {
let mut indent = IndentLevel::from_element(&item_list.syntax().clone().into());
indent.0 += 1;
- return Some(make::tokens::whitespace(&format!("\n{}", indent)));
+ return Some(make::tokens::whitespace(&format!("\n{indent}")));
}
}
if let Some(stmt_list) = prev.parent().and_then(ast::StmtList::cast) {
let mut indent = IndentLevel::from_element(&stmt_list.syntax().clone().into());
indent.0 += 1;
- return Some(make::tokens::whitespace(&format!("\n{}", indent)));
+ return Some(make::tokens::whitespace(&format!("\n{indent}")));
}
}
if left.kind() == SyntaxKind::USE {
indent.0 = IndentLevel::from_element(right).0.max(indent.0);
}
- return Some(make::tokens::whitespace(&format!("\n{}", indent)));
+ return Some(make::tokens::whitespace(&format!("\n{indent}")));
}
Some(make::tokens::single_space())
}
.into_iter()
.map(|(path, err)| format!("{}: {:?}\n", path.display(), err[0]))
.collect::<String>();
- panic!("Parsing errors:\n{}\n", errors);
+ panic!("Parsing errors:\n{errors}\n");
}
}
/// so this should always be correct.
fn read_text(path: &Path) -> String {
fs::read_to_string(path)
- .unwrap_or_else(|_| panic!("File at {:?} should be valid", path))
+ .unwrap_or_else(|_| panic!("File at {path:?} should be valid"))
.replace("\r\n", "\n")
}
fn write_doc_comment(contents: &[String], dest: &mut String) {
for line in contents {
- writeln!(dest, "///{}", line).unwrap();
+ writeln!(dest, "///{line}").unwrap();
}
}
}
fn pluralize(s: &str) -> String {
- format!("{}s", s)
+ format!("{s}s")
}
impl Field {
let mut name = grammar[*token].name.clone();
if name != "int_number" && name != "string" {
if "[]{}()".contains(&name) {
- name = format!("'{}'", name);
+ name = format!("'{name}'");
}
let field = Field::Token(name);
acc.push(field);
acc.push(field);
return;
}
- panic!("unhandled rule: {:?}", rule)
+ panic!("unhandled rule: {rule:?}")
}
Rule::Labeled { label: l, rule } => {
assert!(label.is_none());
let a = mean_y - b * mean_x;
- self.plot = format!("y_pred = {:.3} + {:.3} * x\n\nx y y_pred\n", a, b);
+ self.plot = format!("y_pred = {a:.3} + {b:.3} * x\n\nx y y_pred\n");
let mut se = 0.0;
let mut max_error = 0.0f64;
&& !line.contains('.')
&& line.chars().all(|it| !it.is_uppercase())
{
- panic!("looks like invalid metadata line: {:?}", line);
+ panic!("looks like invalid metadata line: {line:?}");
}
if let Some(entry) = res.last_mut() {
let components = meta.split_ascii_whitespace().collect::<Vec<_>>();
let path = components[0].to_string();
- assert!(path.starts_with('/'), "fixture path does not start with `/`: {:?}", path);
+ assert!(path.starts_with('/'), "fixture path does not start with `/`: {path:?}");
let mut krate = None;
let mut deps = Vec::new();
let mut introduce_new_source_root = None;
let mut target_data_layout = None;
for component in components[1..].iter() {
- let (key, value) = component
- .split_once(':')
- .unwrap_or_else(|| panic!("invalid meta line: {:?}", meta));
+ let (key, value) =
+ component.split_once(':').unwrap_or_else(|| panic!("invalid meta line: {meta:?}"));
match key {
"crate" => krate = Some(value.to_string()),
"deps" => deps = value.split(',').map(|it| it.to_string()).collect(),
}
"new_source_root" => introduce_new_source_root = Some(value.to_string()),
"target_data_layout" => target_data_layout = Some(value.to_string()),
- _ => panic!("bad component: {:?}", component),
+ _ => panic!("bad component: {component:?}"),
}
}
#[track_caller]
fn assert_valid_flag(&self, flag: &str) {
if !self.valid_flags.iter().any(|it| it == flag) {
- panic!("invalid flag: {:?}, valid flags: {:?}", flag, self.valid_flags);
+ panic!("invalid flag: {flag:?}, valid flags: {:?}", self.valid_flags);
}
}
let line = line.strip_prefix("//- minicore:").unwrap().trim();
for entry in line.split(", ") {
if res.has_flag(entry) {
- panic!("duplicate minicore flag: {:?}", entry);
+ panic!("duplicate minicore flag: {entry:?}");
}
res.activated_flags.push(entry.to_owned());
}
for flag in &self.valid_flags {
if !seen_regions.iter().any(|it| it == flag) {
- panic!("unused minicore flag: {:?}", flag);
+ panic!("unused minicore flag: {flag:?}");
}
}
buf
/// Extracts ranges, marked with `<tag> </tag>` pairs from the `text`
pub fn extract_tags(mut text: &str, tag: &str) -> (Vec<(TextRange, Option<String>)>, String) {
- let open = format!("<{}", tag);
- let close = format!("</{}>", tag);
+ let open = format!("<{tag}");
+ let close = format!("</{tag}>");
let mut ranges = Vec::new();
let mut res = String::new();
let mut stack = Vec::new();
stack.push((from, attr));
} else if text.starts_with(&close) {
text = &text[close.len()..];
- let (from, attr) =
- stack.pop().unwrap_or_else(|| panic!("unmatched </{}>", tag));
+ let (from, attr) = stack.pop().unwrap_or_else(|| panic!("unmatched </{tag}>"));
let to = TextSize::of(&res);
ranges.push((TextRange::new(from, to), attr));
} else {
}
}
}
- assert!(stack.is_empty(), "unmatched <{}>", tag);
+ assert!(stack.is_empty(), "unmatched <{tag}>");
ranges.sort_by_key(|r| (r.0.start(), r.0.end()));
(ranges, res)
}
for chunk in chunks {
let formatted = match chunk {
dissimilar::Chunk::Equal(text) => text.into(),
- dissimilar::Chunk::Delete(text) => format!("\x1b[41m{}\x1b[0m", text),
- dissimilar::Chunk::Insert(text) => format!("\x1b[42m{}\x1b[0m", text),
+ dissimilar::Chunk::Delete(text) => format!("\x1b[41m{text}\x1b[0m"),
+ dissimilar::Chunk::Insert(text) => format!("\x1b[42m{text}\x1b[0m"),
};
buf.push_str(&formatted);
}
let aux = match subtree.delimiter.map(|it| (it.kind, it.id.0)) {
None => "$".to_string(),
- Some((DelimiterKind::Parenthesis, id)) => format!("() {}", id),
- Some((DelimiterKind::Brace, id)) => format!("{{}} {}", id),
- Some((DelimiterKind::Bracket, id)) => format!("[] {}", id),
+ Some((DelimiterKind::Parenthesis, id)) => format!("() {id}"),
+ Some((DelimiterKind::Brace, id)) => format!("{{}} {id}"),
+ Some((DelimiterKind::Bracket, id)) => format!("[] {id}"),
};
if subtree.token_trees.is_empty() {
- write!(f, "{}SUBTREE {}", align, aux)?;
+ write!(f, "{align}SUBTREE {aux}")?;
} else {
- writeln!(f, "{}SUBTREE {}", align, aux)?;
+ writeln!(f, "{align}SUBTREE {aux}")?;
for (idx, child) in subtree.token_trees.iter().enumerate() {
print_debug_token(f, child, level + 1)?;
if idx != subtree.token_trees.len() - 1 {
match tkn {
TokenTree::Leaf(leaf) => match leaf {
- Leaf::Literal(lit) => write!(f, "{}LITERAL {} {}", align, lit.text, lit.id.0)?,
+ Leaf::Literal(lit) => write!(f, "{align}LITERAL {} {}", lit.text, lit.id.0)?,
Leaf::Punct(punct) => write!(
f,
"{}PUNCH {} [{}] {}",
if punct.spacing == Spacing::Alone { "alone" } else { "joint" },
punct.id.0
)?,
- Leaf::Ident(ident) => write!(f, "{}IDENT {} {}", align, ident.text, ident.id.0)?,
+ Leaf::Ident(ident) => write!(f, "{align}IDENT {} {}", ident.text, ident.id.0)?,
},
TokenTree::Subtree(subtree) => {
print_debug_subtree(f, subtree, level)?;
Some(DelimiterKind::Parenthesis) => ("(", ")"),
Some(DelimiterKind::Bracket) => ("[", "]"),
};
- format!("{}{}{}", open, content, close)
+ format!("{open}{content}{close}")
}
}
}
path = &path["../".len()..];
}
path = path.trim_start_matches("./");
- res.0 = format!("{}/{}", res.0, path);
+ res.0 = format!("{}/{path}", res.0);
Some(res)
}
let _params: InitializeParams = serde_json::from_value(params).unwrap();
eprintln!("starting example main loop");
for msg in &connection.receiver {
- eprintln!("got msg: {:?}", msg);
+ eprintln!("got msg: {msg:?}");
match msg {
Message::Request(req) => {
if connection.handle_shutdown(&req)? {
return Ok(());
}
- eprintln!("got request: {:?}", req);
+ eprintln!("got request: {req:?}");
match cast::<GotoDefinition>(req) {
Ok((id, params)) => {
- eprintln!("got gotoDefinition request #{}: {:?}", id, params);
+ eprintln!("got gotoDefinition request #{id}: {params:?}");
let result = Some(GotoDefinitionResponse::Array(Vec::new()));
let result = serde_json::to_value(&result).unwrap();
let resp = Response { id, result: Some(result), error: None };
connection.sender.send(Message::Response(resp))?;
continue;
}
- Err(err @ ExtractError::JsonError { .. }) => panic!("{:?}", err),
+ Err(err @ ExtractError::JsonError { .. }) => panic!("{err:?}"),
Err(ExtractError::MethodMismatch(req)) => req,
};
// ...
}
Message::Response(resp) => {
- eprintln!("got response: {:?}", resp);
+ eprintln!("got response: {resp:?}");
}
Message::Notification(not) => {
- eprintln!("got notification: {:?}", not);
+ eprintln!("got notification: {not:?}");
}
}
}
let resp = Response::new_err(
req.id.clone(),
ErrorCode::ServerNotInitialized as i32,
- format!("expected initialize request, got {:?}", req),
+ format!("expected initialize request, got {req:?}"),
);
self.sender.send(resp.into()).unwrap();
}
match &self.receiver.recv_timeout(std::time::Duration::from_secs(30)) {
Ok(Message::Notification(n)) if n.is_exit() => (),
Ok(msg) => {
- return Err(ProtocolError(format!("unexpected message during shutdown: {:?}", msg)))
- }
- Err(e) => {
- return Err(ProtocolError(format!("unexpected error during shutdown: {}", e)))
+ return Err(ProtocolError(format!("unexpected message during shutdown: {msg:?}")))
}
+ Err(e) => return Err(ProtocolError(format!("unexpected error during shutdown: {e}"))),
}
Ok(true)
}
if let Some(patch_version) = self.client_patch_version {
let version = if stable {
- format!("{}.{}", VERSION_STABLE, patch_version)
+ format!("{VERSION_STABLE}.{patch_version}")
} else {
// A hack to make VS Code prefer nightly over stable.
- format!("{}.{}", VERSION_NIGHTLY, patch_version)
+ format!("{VERSION_NIGHTLY}.{patch_version}")
};
dist_server(sh, &format!("{version}-standalone"), &target)?;
let release_tag = if stable { date_iso(sh)? } else { "nightly".to_string() };
let mut patch = Patch::new(sh, "./package.json")?;
patch
.replace(
- &format!(r#""version": "{}.0-dev""#, VERSION_DEV),
- &format!(r#""version": "{}""#, version),
+ &format!(r#""version": "{VERSION_DEV}.0-dev""#),
+ &format!(r#""version": "{version}""#),
)
- .replace(r#""releaseTag": null"#, &format!(r#""releaseTag": "{}""#, release_tag))
+ .replace(r#""releaseTag": null"#, &format!(r#""releaseTag": "{release_tag}""#))
.replace(r#""$generated-start": {},"#, "")
.replace(",\n \"$generated-end\": {}", "")
.replace(r#""enabledApiProposals": [],"#, r#""#);
} else {
(String::new(), None)
};
- let server_path = out_path.join(format!("rust-analyzer{}", exe_suffix));
- let artifact_name = format!("rust-analyzer-{}{}", name, exe_suffix);
+ let server_path = out_path.join(format!("rust-analyzer{exe_suffix}"));
+ let artifact_name = format!("rust-analyzer-{name}{exe_suffix}");
Self { name, server_path, symbols_path, artifact_name }
}
}
self.measure_analysis_stats_path(
sh,
bench,
- &format!("./target/rustc-perf/collector/benchmarks/{}", bench),
+ &format!("./target/rustc-perf/collector/benchmarks/{bench}"),
)
}
fn measure_analysis_stats_path(
let prev_tag = tags.lines().filter(|line| is_release_tag(line)).last().unwrap();
let contents = changelog::get_changelog(sh, changelog_n, &commit, prev_tag, &today)?;
- let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n));
+ let path = changelog_dir.join(format!("{today}-changelog-{changelog_n}.adoc"));
sh.write_file(&path, &contents)?;
Ok(())
let line = line.trim_start();
if let Some(pr_num) = parse_pr_number(&line) {
let accept = "Accept: application/vnd.github.v3+json";
- let authorization = format!("Authorization: token {}", token);
+ let authorization = format!("Authorization: token {token}");
let pr_url = "https://api.github.com/repos/rust-lang/rust-analyzer/issues";
// we don't use an HTTPS client or JSON parser to keep the build times low
PrKind::Other => &mut others,
PrKind::Skip => continue,
};
- writeln!(s, "* pr:{}[] {}", pr_num, l.message.as_deref().unwrap_or(&pr_title)).unwrap();
+ writeln!(s, "* pr:{pr_num}[] {}", l.message.as_deref().unwrap_or(&pr_title)).unwrap();
}
}