]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_ast_lowering/src/asm.rs
Rollup merge of #101677 - winxpqq955:issue-101211, r=fee1-dead
[rust.git] / compiler / rustc_ast_lowering / src / asm.rs
1 use crate::{ImplTraitContext, ImplTraitPosition, ParamMode, ResolverAstLoweringExt};
2
3 use super::errors::{
4     AbiSpecifiedMultipleTimes, AttSyntaxOnlyX86, ClobberAbiNotSupported,
5     InlineAsmUnsupportedTarget, InvalidAbiClobberAbi, InvalidAsmTemplateModifierConst,
6     InvalidAsmTemplateModifierRegClass, InvalidAsmTemplateModifierRegClassSub,
7     InvalidAsmTemplateModifierSym, InvalidRegister, InvalidRegisterClass, RegisterClassOnlyClobber,
8     RegisterConflict,
9 };
10 use super::LoweringContext;
11
12 use rustc_ast::ptr::P;
13 use rustc_ast::*;
14 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
15 use rustc_hir as hir;
16 use rustc_hir::def::{DefKind, Res};
17 use rustc_hir::definitions::DefPathData;
18 use rustc_session::parse::feature_err;
19 use rustc_span::{sym, Span};
20 use rustc_target::asm;
21 use std::collections::hash_map::Entry;
22 use std::fmt::Write;
23
24 impl<'a, 'hir> LoweringContext<'a, 'hir> {
25     pub(crate) fn lower_inline_asm(
26         &mut self,
27         sp: Span,
28         asm: &InlineAsm,
29     ) -> &'hir hir::InlineAsm<'hir> {
30         // Rustdoc needs to support asm! from foreign architectures: don't try
31         // lowering the register constraints in this case.
32         let asm_arch =
33             if self.tcx.sess.opts.actually_rustdoc { None } else { self.tcx.sess.asm_arch };
34         if asm_arch.is_none() && !self.tcx.sess.opts.actually_rustdoc {
35             self.tcx.sess.emit_err(InlineAsmUnsupportedTarget { span: sp });
36         }
37         if let Some(asm_arch) = asm_arch {
38             // Inline assembly is currently only stable for these architectures.
39             let is_stable = matches!(
40                 asm_arch,
41                 asm::InlineAsmArch::X86
42                     | asm::InlineAsmArch::X86_64
43                     | asm::InlineAsmArch::Arm
44                     | asm::InlineAsmArch::AArch64
45                     | asm::InlineAsmArch::RiscV32
46                     | asm::InlineAsmArch::RiscV64
47             );
48             if !is_stable && !self.tcx.features().asm_experimental_arch {
49                 feature_err(
50                     &self.tcx.sess.parse_sess,
51                     sym::asm_experimental_arch,
52                     sp,
53                     "inline assembly is not stable yet on this architecture",
54                 )
55                 .emit();
56             }
57         }
58         if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
59             && !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
60             && !self.tcx.sess.opts.actually_rustdoc
61         {
62             self.tcx.sess.emit_err(AttSyntaxOnlyX86 { span: sp });
63         }
64         if asm.options.contains(InlineAsmOptions::MAY_UNWIND) && !self.tcx.features().asm_unwind {
65             feature_err(
66                 &self.tcx.sess.parse_sess,
67                 sym::asm_unwind,
68                 sp,
69                 "the `may_unwind` option is unstable",
70             )
71             .emit();
72         }
73
74         let mut clobber_abis = FxHashMap::default();
75         if let Some(asm_arch) = asm_arch {
76             for (abi_name, abi_span) in &asm.clobber_abis {
77                 match asm::InlineAsmClobberAbi::parse(asm_arch, &self.tcx.sess.target, *abi_name) {
78                     Ok(abi) => {
79                         // If the abi was already in the list, emit an error
80                         match clobber_abis.get(&abi) {
81                             Some((prev_name, prev_sp)) => {
82                                 // Multiple different abi names may actually be the same ABI
83                                 // If the specified ABIs are not the same name, alert the user that they resolve to the same ABI
84                                 let source_map = self.tcx.sess.source_map();
85                                 let equivalent = (source_map.span_to_snippet(*prev_sp)
86                                     != source_map.span_to_snippet(*abi_span))
87                                 .then_some(());
88
89                                 self.tcx.sess.emit_err(AbiSpecifiedMultipleTimes {
90                                     abi_span: *abi_span,
91                                     prev_name: *prev_name,
92                                     prev_span: *prev_sp,
93                                     equivalent,
94                                 });
95                             }
96                             None => {
97                                 clobber_abis.insert(abi, (*abi_name, *abi_span));
98                             }
99                         }
100                     }
101                     Err(&[]) => {
102                         self.tcx.sess.emit_err(ClobberAbiNotSupported { abi_span: *abi_span });
103                     }
104                     Err(supported_abis) => {
105                         let mut abis = format!("`{}`", supported_abis[0]);
106                         for m in &supported_abis[1..] {
107                             let _ = write!(abis, ", `{}`", m);
108                         }
109                         self.tcx.sess.emit_err(InvalidAbiClobberAbi {
110                             abi_span: *abi_span,
111                             supported_abis: abis,
112                         });
113                     }
114                 }
115             }
116         }
117
118         // Lower operands to HIR. We use dummy register classes if an error
119         // occurs during lowering because we still need to be able to produce a
120         // valid HIR.
121         let sess = self.tcx.sess;
122         let mut operands: Vec<_> = asm
123             .operands
124             .iter()
125             .map(|(op, op_sp)| {
126                 let lower_reg = |reg| match reg {
127                     InlineAsmRegOrRegClass::Reg(reg) => {
128                         asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
129                             asm::InlineAsmReg::parse(asm_arch, reg).unwrap_or_else(|error| {
130                                 sess.emit_err(InvalidRegister { op_span: *op_sp, reg, error });
131                                 asm::InlineAsmReg::Err
132                             })
133                         } else {
134                             asm::InlineAsmReg::Err
135                         })
136                     }
137                     InlineAsmRegOrRegClass::RegClass(reg_class) => {
138                         asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch {
139                             asm::InlineAsmRegClass::parse(asm_arch, reg_class).unwrap_or_else(
140                                 |error| {
141                                     sess.emit_err(InvalidRegisterClass {
142                                         op_span: *op_sp,
143                                         reg_class,
144                                         error,
145                                     });
146                                     asm::InlineAsmRegClass::Err
147                                 },
148                             )
149                         } else {
150                             asm::InlineAsmRegClass::Err
151                         })
152                     }
153                 };
154
155                 let op = match *op {
156                     InlineAsmOperand::In { reg, ref expr } => hir::InlineAsmOperand::In {
157                         reg: lower_reg(reg),
158                         expr: self.lower_expr(expr),
159                     },
160                     InlineAsmOperand::Out { reg, late, ref expr } => hir::InlineAsmOperand::Out {
161                         reg: lower_reg(reg),
162                         late,
163                         expr: expr.as_ref().map(|expr| self.lower_expr(expr)),
164                     },
165                     InlineAsmOperand::InOut { reg, late, ref expr } => {
166                         hir::InlineAsmOperand::InOut {
167                             reg: lower_reg(reg),
168                             late,
169                             expr: self.lower_expr(expr),
170                         }
171                     }
172                     InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => {
173                         hir::InlineAsmOperand::SplitInOut {
174                             reg: lower_reg(reg),
175                             late,
176                             in_expr: self.lower_expr(in_expr),
177                             out_expr: out_expr.as_ref().map(|expr| self.lower_expr(expr)),
178                         }
179                     }
180                     InlineAsmOperand::Const { ref anon_const } => {
181                         if !self.tcx.features().asm_const {
182                             feature_err(
183                                 &sess.parse_sess,
184                                 sym::asm_const,
185                                 *op_sp,
186                                 "const operands for inline assembly are unstable",
187                             )
188                             .emit();
189                         }
190                         hir::InlineAsmOperand::Const {
191                             anon_const: self.lower_anon_const(anon_const),
192                         }
193                     }
194                     InlineAsmOperand::Sym { ref sym } => {
195                         if !self.tcx.features().asm_sym {
196                             feature_err(
197                                 &sess.parse_sess,
198                                 sym::asm_sym,
199                                 *op_sp,
200                                 "sym operands for inline assembly are unstable",
201                             )
202                             .emit();
203                         }
204
205                         let static_def_id = self
206                             .resolver
207                             .get_partial_res(sym.id)
208                             .filter(|res| res.unresolved_segments() == 0)
209                             .and_then(|res| {
210                                 if let Res::Def(DefKind::Static(_), def_id) = res.base_res() {
211                                     Some(def_id)
212                                 } else {
213                                     None
214                                 }
215                             });
216
217                         if let Some(def_id) = static_def_id {
218                             let path = self.lower_qpath(
219                                 sym.id,
220                                 &sym.qself,
221                                 &sym.path,
222                                 ParamMode::Optional,
223                                 &mut ImplTraitContext::Disallowed(ImplTraitPosition::Path),
224                             );
225                             hir::InlineAsmOperand::SymStatic { path, def_id }
226                         } else {
227                             // Replace the InlineAsmSym AST node with an
228                             // Expr using the name node id.
229                             let expr = Expr {
230                                 id: sym.id,
231                                 kind: ExprKind::Path(sym.qself.clone(), sym.path.clone()),
232                                 span: *op_sp,
233                                 attrs: AttrVec::new(),
234                                 tokens: None,
235                             };
236
237                             // Wrap the expression in an AnonConst.
238                             let parent_def_id = self.current_hir_id_owner;
239                             let node_id = self.next_node_id();
240                             self.create_def(parent_def_id, node_id, DefPathData::AnonConst);
241                             let anon_const = AnonConst { id: node_id, value: P(expr) };
242                             hir::InlineAsmOperand::SymFn {
243                                 anon_const: self.lower_anon_const(&anon_const),
244                             }
245                         }
246                     }
247                 };
248                 (op, self.lower_span(*op_sp))
249             })
250             .collect();
251
252         // Validate template modifiers against the register classes for the operands
253         for p in &asm.template {
254             if let InlineAsmTemplatePiece::Placeholder {
255                 operand_idx,
256                 modifier: Some(modifier),
257                 span: placeholder_span,
258             } = *p
259             {
260                 let op_sp = asm.operands[operand_idx].1;
261                 match &operands[operand_idx].0 {
262                     hir::InlineAsmOperand::In { reg, .. }
263                     | hir::InlineAsmOperand::Out { reg, .. }
264                     | hir::InlineAsmOperand::InOut { reg, .. }
265                     | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
266                         let class = reg.reg_class();
267                         if class == asm::InlineAsmRegClass::Err {
268                             continue;
269                         }
270                         let valid_modifiers = class.valid_modifiers(asm_arch.unwrap());
271                         if !valid_modifiers.contains(&modifier) {
272                             let sub = if !valid_modifiers.is_empty() {
273                                 let mut mods = format!("`{}`", valid_modifiers[0]);
274                                 for m in &valid_modifiers[1..] {
275                                     let _ = write!(mods, ", `{}`", m);
276                                 }
277                                 InvalidAsmTemplateModifierRegClassSub::SupportModifier {
278                                     class_name: class.name(),
279                                     modifiers: mods,
280                                 }
281                             } else {
282                                 InvalidAsmTemplateModifierRegClassSub::DoesNotSupportModifier {
283                                     class_name: class.name(),
284                                 }
285                             };
286                             sess.emit_err(InvalidAsmTemplateModifierRegClass {
287                                 placeholder_span,
288                                 op_span: op_sp,
289                                 sub,
290                             });
291                         }
292                     }
293                     hir::InlineAsmOperand::Const { .. } => {
294                         sess.emit_err(InvalidAsmTemplateModifierConst {
295                             placeholder_span,
296                             op_span: op_sp,
297                         });
298                     }
299                     hir::InlineAsmOperand::SymFn { .. }
300                     | hir::InlineAsmOperand::SymStatic { .. } => {
301                         sess.emit_err(InvalidAsmTemplateModifierSym {
302                             placeholder_span,
303                             op_span: op_sp,
304                         });
305                     }
306                 }
307             }
308         }
309
310         let mut used_input_regs = FxHashMap::default();
311         let mut used_output_regs = FxHashMap::default();
312
313         for (idx, &(ref op, op_sp)) in operands.iter().enumerate() {
314             if let Some(reg) = op.reg() {
315                 let reg_class = reg.reg_class();
316                 if reg_class == asm::InlineAsmRegClass::Err {
317                     continue;
318                 }
319
320                 // Some register classes can only be used as clobbers. This
321                 // means that we disallow passing a value in/out of the asm and
322                 // require that the operand name an explicit register, not a
323                 // register class.
324                 if reg_class.is_clobber_only(asm_arch.unwrap()) && !op.is_clobber() {
325                     sess.emit_err(RegisterClassOnlyClobber {
326                         op_span: op_sp,
327                         reg_class_name: reg_class.name(),
328                     });
329                     continue;
330                 }
331
332                 // Check for conflicts between explicit register operands.
333                 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
334                     let (input, output) = match op {
335                         hir::InlineAsmOperand::In { .. } => (true, false),
336
337                         // Late output do not conflict with inputs, but normal outputs do
338                         hir::InlineAsmOperand::Out { late, .. } => (!late, true),
339
340                         hir::InlineAsmOperand::InOut { .. }
341                         | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
342
343                         hir::InlineAsmOperand::Const { .. }
344                         | hir::InlineAsmOperand::SymFn { .. }
345                         | hir::InlineAsmOperand::SymStatic { .. } => {
346                             unreachable!()
347                         }
348                     };
349
350                     // Flag to output the error only once per operand
351                     let mut skip = false;
352                     reg.overlapping_regs(|r| {
353                         let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
354                                          input| {
355                             match used_regs.entry(r) {
356                                 Entry::Occupied(o) => {
357                                     if skip {
358                                         return;
359                                     }
360                                     skip = true;
361
362                                     let idx2 = *o.get();
363                                     let &(ref op2, op_sp2) = &operands[idx2];
364                                     let Some(asm::InlineAsmRegOrRegClass::Reg(reg2)) = op2.reg() else {
365                                         unreachable!();
366                                     };
367
368                                     let in_out = match (op, op2) {
369                                         (
370                                             hir::InlineAsmOperand::In { .. },
371                                             hir::InlineAsmOperand::Out { late, .. },
372                                         )
373                                         | (
374                                             hir::InlineAsmOperand::Out { late, .. },
375                                             hir::InlineAsmOperand::In { .. },
376                                         ) => {
377                                             assert!(!*late);
378                                             let out_op_sp = if input { op_sp2 } else { op_sp };
379                                             Some(out_op_sp)
380                                         },
381                                         _ => None,
382                                     };
383
384                                     sess.emit_err(RegisterConflict {
385                                         op_span1: op_sp,
386                                         op_span2: op_sp2,
387                                         reg1_name: reg.name(),
388                                         reg2_name: reg2.name(),
389                                         in_out
390                                     });
391                                 }
392                                 Entry::Vacant(v) => {
393                                     if r == reg {
394                                         v.insert(idx);
395                                     }
396                                 }
397                             }
398                         };
399                         if input {
400                             check(&mut used_input_regs, true);
401                         }
402                         if output {
403                             check(&mut used_output_regs, false);
404                         }
405                     });
406                 }
407             }
408         }
409
410         // If a clobber_abi is specified, add the necessary clobbers to the
411         // operands list.
412         let mut clobbered = FxHashSet::default();
413         for (abi, (_, abi_span)) in clobber_abis {
414             for &clobber in abi.clobbered_regs() {
415                 // Don't emit a clobber for a register already clobbered
416                 if clobbered.contains(&clobber) {
417                     continue;
418                 }
419
420                 let mut output_used = false;
421                 clobber.overlapping_regs(|reg| {
422                     if used_output_regs.contains_key(&reg) {
423                         output_used = true;
424                     }
425                 });
426
427                 if !output_used {
428                     operands.push((
429                         hir::InlineAsmOperand::Out {
430                             reg: asm::InlineAsmRegOrRegClass::Reg(clobber),
431                             late: true,
432                             expr: None,
433                         },
434                         self.lower_span(abi_span),
435                     ));
436                     clobbered.insert(clobber);
437                 }
438             }
439         }
440
441         let operands = self.arena.alloc_from_iter(operands);
442         let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
443         let template_strs = self.arena.alloc_from_iter(
444             asm.template_strs
445                 .iter()
446                 .map(|(sym, snippet, span)| (*sym, *snippet, self.lower_span(*span))),
447         );
448         let line_spans =
449             self.arena.alloc_from_iter(asm.line_spans.iter().map(|span| self.lower_span(*span)));
450         let hir_asm =
451             hir::InlineAsm { template, template_strs, operands, options: asm.options, line_spans };
452         self.arena.alloc(hir_asm)
453     }
454 }