• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 use crate::{ImplTraitContext, ImplTraitPosition, ParamMode, ResolverAstLoweringExt};
2 
3 use super::errors::{
4     AbiSpecifiedMultipleTimes, AttSyntaxOnlyX86, ClobberAbiNotSupported,
5     InlineAsmUnsupportedTarget, InvalidAbiClobberAbi, InvalidAsmTemplateModifierConst,
6     InvalidAsmTemplateModifierRegClass, InvalidAsmTemplateModifierRegClassSub,
7     InvalidAsmTemplateModifierSym, InvalidRegister, InvalidRegisterClass, RegisterClassOnlyClobber,
8     RegisterConflict,
9 };
10 use super::LoweringContext;
11 
12 use rustc_ast::ptr::P;
13 use rustc_ast::*;
14 use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
15 use rustc_hir as hir;
16 use rustc_hir::def::{DefKind, Res};
17 use rustc_hir::definitions::DefPathData;
18 use rustc_session::parse::feature_err;
19 use rustc_span::{sym, Span};
20 use rustc_target::asm;
21 use std::collections::hash_map::Entry;
22 use std::fmt::Write;
23 
24 impl<'a, 'hir> LoweringContext<'a, 'hir> {
lower_inline_asm( &mut self, sp: Span, asm: &InlineAsm, ) -> &'hir hir::InlineAsm<'hir>25     pub(crate) fn lower_inline_asm(
26         &mut self,
27         sp: Span,
28         asm: &InlineAsm,
29     ) -> &'hir hir::InlineAsm<'hir> {
30         // Rustdoc needs to support asm! from foreign architectures: don't try
31         // lowering the register constraints in this case.
32         let asm_arch =
33             if self.tcx.sess.opts.actually_rustdoc { None } else { self.tcx.sess.asm_arch };
34         if asm_arch.is_none() && !self.tcx.sess.opts.actually_rustdoc {
35             self.tcx.sess.emit_err(InlineAsmUnsupportedTarget { span: sp });
36         }
37         if let Some(asm_arch) = asm_arch {
38             // Inline assembly is currently only stable for these architectures.
39             let is_stable = matches!(
40                 asm_arch,
41                 asm::InlineAsmArch::X86
42                     | asm::InlineAsmArch::X86_64
43                     | asm::InlineAsmArch::Arm
44                     | asm::InlineAsmArch::AArch64
45                     | asm::InlineAsmArch::RiscV32
46                     | asm::InlineAsmArch::RiscV64
47                     | asm::InlineAsmArch::LoongArch64
48             );
49             if !is_stable && !self.tcx.features().asm_experimental_arch {
50                 feature_err(
51                     &self.tcx.sess.parse_sess,
52                     sym::asm_experimental_arch,
53                     sp,
54                     "inline assembly is not stable yet on this architecture",
55                 )
56                 .emit();
57             }
58         }
59         if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
60             && !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
61             && !self.tcx.sess.opts.actually_rustdoc
62         {
63             self.tcx.sess.emit_err(AttSyntaxOnlyX86 { span: sp });
64         }
65         if asm.options.contains(InlineAsmOptions::MAY_UNWIND) && !self.tcx.features().asm_unwind {
66             feature_err(
67                 &self.tcx.sess.parse_sess,
68                 sym::asm_unwind,
69                 sp,
70                 "the `may_unwind` option is unstable",
71             )
72             .emit();
73         }
74 
75         let mut clobber_abis = FxIndexMap::default();
76         if let Some(asm_arch) = asm_arch {
77             for (abi_name, abi_span) in &asm.clobber_abis {
78                 match asm::InlineAsmClobberAbi::parse(asm_arch, &self.tcx.sess.target, *abi_name) {
79                     Ok(abi) => {
80                         // If the abi was already in the list, emit an error
81                         match clobber_abis.get(&abi) {
82                             Some((prev_name, prev_sp)) => {
83                                 // Multiple different abi names may actually be the same ABI
84                                 // If the specified ABIs are not the same name, alert the user that they resolve to the same ABI
85                                 let source_map = self.tcx.sess.source_map();
86                                 let equivalent = (source_map.span_to_snippet(*prev_sp)
87                                     != source_map.span_to_snippet(*abi_span))
88                                 .then_some(());
89 
90                                 self.tcx.sess.emit_err(AbiSpecifiedMultipleTimes {
91                                     abi_span: *abi_span,
92                                     prev_name: *prev_name,
93                                     prev_span: *prev_sp,
94                                     equivalent,
95                                 });
96                             }
97                             None => {
98                                 clobber_abis.insert(abi, (*abi_name, *abi_span));
99                             }
100                         }
101                     }
102                     Err(&[]) => {
103                         self.tcx.sess.emit_err(ClobberAbiNotSupported { abi_span: *abi_span });
104                     }
105                     Err(supported_abis) => {
106                         let mut abis = format!("`{}`", supported_abis[0]);
107                         for m in &supported_abis[1..] {
108                             let _ = write!(abis, ", `{m}`");
109                         }
110                         self.tcx.sess.emit_err(InvalidAbiClobberAbi {
111                             abi_span: *abi_span,
112                             supported_abis: abis,
113                         });
114                     }
115                 }
116             }
117         }
118 
119         // Lower operands to HIR. We use dummy register classes if an error
120         // occurs during lowering because we still need to be able to produce a
121         // valid HIR.
122         let sess = self.tcx.sess;
123         let mut operands: Vec<_> = asm
124             .operands
125             .iter()
126             .map(|(op, op_sp)| {
127                 let lower_reg = |&reg: &_| match reg {
128                     InlineAsmRegOrRegClass::Reg(reg) => {
129                         asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
130                             asm::InlineAsmReg::parse(asm_arch, reg).unwrap_or_else(|error| {
131                                 sess.emit_err(InvalidRegister { op_span: *op_sp, reg, error });
132                                 asm::InlineAsmReg::Err
133                             })
134                         } else {
135                             asm::InlineAsmReg::Err
136                         })
137                     }
138                     InlineAsmRegOrRegClass::RegClass(reg_class) => {
139                         asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch {
140                             asm::InlineAsmRegClass::parse(asm_arch, reg_class).unwrap_or_else(
141                                 |error| {
142                                     sess.emit_err(InvalidRegisterClass {
143                                         op_span: *op_sp,
144                                         reg_class,
145                                         error,
146                                     });
147                                     asm::InlineAsmRegClass::Err
148                                 },
149                             )
150                         } else {
151                             asm::InlineAsmRegClass::Err
152                         })
153                     }
154                 };
155 
156                 let op = match op {
157                     InlineAsmOperand::In { reg, expr } => hir::InlineAsmOperand::In {
158                         reg: lower_reg(reg),
159                         expr: self.lower_expr(expr),
160                     },
161                     InlineAsmOperand::Out { reg, late, expr } => hir::InlineAsmOperand::Out {
162                         reg: lower_reg(reg),
163                         late: *late,
164                         expr: expr.as_ref().map(|expr| self.lower_expr(expr)),
165                     },
166                     InlineAsmOperand::InOut { reg, late, expr } => hir::InlineAsmOperand::InOut {
167                         reg: lower_reg(reg),
168                         late: *late,
169                         expr: self.lower_expr(expr),
170                     },
171                     InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => {
172                         hir::InlineAsmOperand::SplitInOut {
173                             reg: lower_reg(reg),
174                             late: *late,
175                             in_expr: self.lower_expr(in_expr),
176                             out_expr: out_expr.as_ref().map(|expr| self.lower_expr(expr)),
177                         }
178                     }
179                     InlineAsmOperand::Const { anon_const } => {
180                         if !self.tcx.features().asm_const {
181                             feature_err(
182                                 &sess.parse_sess,
183                                 sym::asm_const,
184                                 *op_sp,
185                                 "const operands for inline assembly are unstable",
186                             )
187                             .emit();
188                         }
189                         hir::InlineAsmOperand::Const {
190                             anon_const: self.lower_anon_const(anon_const),
191                         }
192                     }
193                     InlineAsmOperand::Sym { sym } => {
194                         let static_def_id = self
195                             .resolver
196                             .get_partial_res(sym.id)
197                             .and_then(|res| res.full_res())
198                             .and_then(|res| match res {
199                                 Res::Def(DefKind::Static(_), def_id) => Some(def_id),
200                                 _ => None,
201                             });
202 
203                         if let Some(def_id) = static_def_id {
204                             let path = self.lower_qpath(
205                                 sym.id,
206                                 &sym.qself,
207                                 &sym.path,
208                                 ParamMode::Optional,
209                                 &ImplTraitContext::Disallowed(ImplTraitPosition::Path),
210                             );
211                             hir::InlineAsmOperand::SymStatic { path, def_id }
212                         } else {
213                             // Replace the InlineAsmSym AST node with an
214                             // Expr using the name node id.
215                             let expr = Expr {
216                                 id: sym.id,
217                                 kind: ExprKind::Path(sym.qself.clone(), sym.path.clone()),
218                                 span: *op_sp,
219                                 attrs: AttrVec::new(),
220                                 tokens: None,
221                             };
222 
223                             // Wrap the expression in an AnonConst.
224                             let parent_def_id = self.current_hir_id_owner;
225                             let node_id = self.next_node_id();
226                             self.create_def(
227                                 parent_def_id.def_id,
228                                 node_id,
229                                 DefPathData::AnonConst,
230                                 *op_sp,
231                             );
232                             let anon_const = AnonConst { id: node_id, value: P(expr) };
233                             hir::InlineAsmOperand::SymFn {
234                                 anon_const: self.lower_anon_const(&anon_const),
235                             }
236                         }
237                     }
238                 };
239                 (op, self.lower_span(*op_sp))
240             })
241             .collect();
242 
243         // Validate template modifiers against the register classes for the operands
244         for p in &asm.template {
245             if let InlineAsmTemplatePiece::Placeholder {
246                 operand_idx,
247                 modifier: Some(modifier),
248                 span: placeholder_span,
249             } = *p
250             {
251                 let op_sp = asm.operands[operand_idx].1;
252                 match &operands[operand_idx].0 {
253                     hir::InlineAsmOperand::In { reg, .. }
254                     | hir::InlineAsmOperand::Out { reg, .. }
255                     | hir::InlineAsmOperand::InOut { reg, .. }
256                     | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
257                         let class = reg.reg_class();
258                         if class == asm::InlineAsmRegClass::Err {
259                             continue;
260                         }
261                         let valid_modifiers = class.valid_modifiers(asm_arch.unwrap());
262                         if !valid_modifiers.contains(&modifier) {
263                             let sub = if !valid_modifiers.is_empty() {
264                                 let mut mods = format!("`{}`", valid_modifiers[0]);
265                                 for m in &valid_modifiers[1..] {
266                                     let _ = write!(mods, ", `{m}`");
267                                 }
268                                 InvalidAsmTemplateModifierRegClassSub::SupportModifier {
269                                     class_name: class.name(),
270                                     modifiers: mods,
271                                 }
272                             } else {
273                                 InvalidAsmTemplateModifierRegClassSub::DoesNotSupportModifier {
274                                     class_name: class.name(),
275                                 }
276                             };
277                             sess.emit_err(InvalidAsmTemplateModifierRegClass {
278                                 placeholder_span,
279                                 op_span: op_sp,
280                                 sub,
281                             });
282                         }
283                     }
284                     hir::InlineAsmOperand::Const { .. } => {
285                         sess.emit_err(InvalidAsmTemplateModifierConst {
286                             placeholder_span,
287                             op_span: op_sp,
288                         });
289                     }
290                     hir::InlineAsmOperand::SymFn { .. }
291                     | hir::InlineAsmOperand::SymStatic { .. } => {
292                         sess.emit_err(InvalidAsmTemplateModifierSym {
293                             placeholder_span,
294                             op_span: op_sp,
295                         });
296                     }
297                 }
298             }
299         }
300 
301         let mut used_input_regs = FxHashMap::default();
302         let mut used_output_regs = FxHashMap::default();
303 
304         for (idx, &(ref op, op_sp)) in operands.iter().enumerate() {
305             if let Some(reg) = op.reg() {
306                 let reg_class = reg.reg_class();
307                 if reg_class == asm::InlineAsmRegClass::Err {
308                     continue;
309                 }
310 
311                 // Some register classes can only be used as clobbers. This
312                 // means that we disallow passing a value in/out of the asm and
313                 // require that the operand name an explicit register, not a
314                 // register class.
315                 if reg_class.is_clobber_only(asm_arch.unwrap()) && !op.is_clobber() {
316                     sess.emit_err(RegisterClassOnlyClobber {
317                         op_span: op_sp,
318                         reg_class_name: reg_class.name(),
319                     });
320                     continue;
321                 }
322 
323                 // Check for conflicts between explicit register operands.
324                 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
325                     let (input, output) = match op {
326                         hir::InlineAsmOperand::In { .. } => (true, false),
327 
328                         // Late output do not conflict with inputs, but normal outputs do
329                         hir::InlineAsmOperand::Out { late, .. } => (!late, true),
330 
331                         hir::InlineAsmOperand::InOut { .. }
332                         | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
333 
334                         hir::InlineAsmOperand::Const { .. }
335                         | hir::InlineAsmOperand::SymFn { .. }
336                         | hir::InlineAsmOperand::SymStatic { .. } => {
337                             unreachable!()
338                         }
339                     };
340 
341                     // Flag to output the error only once per operand
342                     let mut skip = false;
343                     reg.overlapping_regs(|r| {
344                         let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
345                                          input| {
346                             match used_regs.entry(r) {
347                                 Entry::Occupied(o) => {
348                                     if skip {
349                                         return;
350                                     }
351                                     skip = true;
352 
353                                     let idx2 = *o.get();
354                                     let (ref op2, op_sp2) = operands[idx2];
355                                     let Some(asm::InlineAsmRegOrRegClass::Reg(reg2)) = op2.reg() else {
356                                         unreachable!();
357                                     };
358 
359                                     let in_out = match (op, op2) {
360                                         (
361                                             hir::InlineAsmOperand::In { .. },
362                                             hir::InlineAsmOperand::Out { late, .. },
363                                         )
364                                         | (
365                                             hir::InlineAsmOperand::Out { late, .. },
366                                             hir::InlineAsmOperand::In { .. },
367                                         ) => {
368                                             assert!(!*late);
369                                             let out_op_sp = if input { op_sp2 } else { op_sp };
370                                             Some(out_op_sp)
371                                         },
372                                         _ => None,
373                                     };
374 
375                                     sess.emit_err(RegisterConflict {
376                                         op_span1: op_sp,
377                                         op_span2: op_sp2,
378                                         reg1_name: reg.name(),
379                                         reg2_name: reg2.name(),
380                                         in_out
381                                     });
382                                 }
383                                 Entry::Vacant(v) => {
384                                     if r == reg {
385                                         v.insert(idx);
386                                     }
387                                 }
388                             }
389                         };
390                         if input {
391                             check(&mut used_input_regs, true);
392                         }
393                         if output {
394                             check(&mut used_output_regs, false);
395                         }
396                     });
397                 }
398             }
399         }
400 
401         // If a clobber_abi is specified, add the necessary clobbers to the
402         // operands list.
403         let mut clobbered = FxHashSet::default();
404         for (abi, (_, abi_span)) in clobber_abis {
405             for &clobber in abi.clobbered_regs() {
406                 // Don't emit a clobber for a register already clobbered
407                 if clobbered.contains(&clobber) {
408                     continue;
409                 }
410 
411                 let mut output_used = false;
412                 clobber.overlapping_regs(|reg| {
413                     if used_output_regs.contains_key(&reg) {
414                         output_used = true;
415                     }
416                 });
417 
418                 if !output_used {
419                     operands.push((
420                         hir::InlineAsmOperand::Out {
421                             reg: asm::InlineAsmRegOrRegClass::Reg(clobber),
422                             late: true,
423                             expr: None,
424                         },
425                         self.lower_span(abi_span),
426                     ));
427                     clobbered.insert(clobber);
428                 }
429             }
430         }
431 
432         let operands = self.arena.alloc_from_iter(operands);
433         let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
434         let template_strs = self.arena.alloc_from_iter(
435             asm.template_strs
436                 .iter()
437                 .map(|(sym, snippet, span)| (*sym, *snippet, self.lower_span(*span))),
438         );
439         let line_spans =
440             self.arena.alloc_from_iter(asm.line_spans.iter().map(|span| self.lower_span(*span)));
441         let hir_asm =
442             hir::InlineAsm { template, template_strs, operands, options: asm.options, line_spans };
443         self.arena.alloc(hir_asm)
444     }
445 }
446