• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 use super::place::PlaceRef;
2 use super::{FunctionCx, LocalRef};
3 
4 use crate::base;
5 use crate::common::TypeKind;
6 use crate::glue;
7 use crate::traits::*;
8 use crate::MemFlags;
9 
10 use rustc_middle::mir;
11 use rustc_middle::mir::interpret::{alloc_range, ConstValue, Pointer, Scalar};
12 use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
13 use rustc_middle::ty::Ty;
14 use rustc_target::abi::{self, Abi, Align, Size};
15 
16 use std::fmt;
17 
18 /// The representation of a Rust value. The enum variant is in fact
19 /// uniquely determined by the value's type, but is kept as a
20 /// safety check.
21 #[derive(Copy, Clone, Debug)]
22 pub enum OperandValue<V> {
23     /// A reference to the actual operand. The data is guaranteed
24     /// to be valid for the operand's lifetime.
25     /// The second value, if any, is the extra data (vtable or length)
26     /// which indicates that it refers to an unsized rvalue.
27     ///
28     /// An `OperandValue` has this variant for types which are neither
29     /// `Immediate` nor `Pair`s. The backend value in this variant must be a
30     /// pointer to the *non*-immediate backend type. That pointee type is the
31     /// one returned by [`LayoutTypeMethods::backend_type`].
32     Ref(V, Option<V>, Align),
33     /// A single LLVM immediate value.
34     ///
35     /// An `OperandValue` *must* be this variant for any type for which
36     /// [`LayoutTypeMethods::is_backend_immediate`] returns `true`.
37     /// The backend value in this variant must be the *immediate* backend type,
38     /// as returned by [`LayoutTypeMethods::immediate_backend_type`].
39     Immediate(V),
40     /// A pair of immediate LLVM values. Used by fat pointers too.
41     ///
42     /// An `OperandValue` *must* be this variant for any type for which
43     /// [`LayoutTypeMethods::is_backend_scalar_pair`] returns `true`.
44     /// The backend values in this variant must be the *immediate* backend types,
45     /// as returned by [`LayoutTypeMethods::scalar_pair_element_backend_type`]
46     /// with `immediate: true`.
47     Pair(V, V),
48     /// A value taking no bytes, and which therefore needs no LLVM value at all.
49     ///
50     /// If you ever need a `V` to pass to something, get a fresh poison value
51     /// from [`ConstMethods::const_poison`].
52     ///
53     /// An `OperandValue` *must* be this variant for any type for which
54     /// `is_zst` on its `Layout` returns `true`.
55     ZeroSized,
56 }
57 
58 /// An `OperandRef` is an "SSA" reference to a Rust value, along with
59 /// its type.
60 ///
61 /// NOTE: unless you know a value's type exactly, you should not
62 /// generate LLVM opcodes acting on it and instead act via methods,
63 /// to avoid nasty edge cases. In particular, using `Builder::store`
64 /// directly is sure to cause problems -- use `OperandRef::store`
65 /// instead.
66 #[derive(Copy, Clone)]
67 pub struct OperandRef<'tcx, V> {
68     /// The value.
69     pub val: OperandValue<V>,
70 
71     /// The layout of value, based on its Rust type.
72     pub layout: TyAndLayout<'tcx>,
73 }
74 
75 impl<V: CodegenObject> fmt::Debug for OperandRef<'_, V> {
fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result76     fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
77         write!(f, "OperandRef({:?} @ {:?})", self.val, self.layout)
78     }
79 }
80 
81 impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
zero_sized(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, V>82     pub fn zero_sized(layout: TyAndLayout<'tcx>) -> OperandRef<'tcx, V> {
83         assert!(layout.is_zst());
84         OperandRef { val: OperandValue::ZeroSized, layout }
85     }
86 
from_const<Bx: BuilderMethods<'a, 'tcx, Value = V>>( bx: &mut Bx, val: ConstValue<'tcx>, ty: Ty<'tcx>, ) -> Self87     pub fn from_const<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
88         bx: &mut Bx,
89         val: ConstValue<'tcx>,
90         ty: Ty<'tcx>,
91     ) -> Self {
92         let layout = bx.layout_of(ty);
93 
94         let val = match val {
95             ConstValue::Scalar(x) => {
96                 let Abi::Scalar(scalar) = layout.abi else {
97                     bug!("from_const: invalid ByVal layout: {:#?}", layout);
98                 };
99                 let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
100                 OperandValue::Immediate(llval)
101             }
102             ConstValue::ZeroSized => return OperandRef::zero_sized(layout),
103             ConstValue::Slice { data, start, end } => {
104                 let Abi::ScalarPair(a_scalar, _) = layout.abi else {
105                     bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
106                 };
107                 let a = Scalar::from_pointer(
108                     Pointer::new(bx.tcx().create_memory_alloc(data), Size::from_bytes(start)),
109                     &bx.tcx(),
110                 );
111                 let a_llval = bx.scalar_to_backend(
112                     a,
113                     a_scalar,
114                     bx.scalar_pair_element_backend_type(layout, 0, true),
115                 );
116                 let b_llval = bx.const_usize((end - start) as u64);
117                 OperandValue::Pair(a_llval, b_llval)
118             }
119             ConstValue::ByRef { alloc, offset } => {
120                 return Self::from_const_alloc(bx, layout, alloc, offset);
121             }
122         };
123 
124         OperandRef { val, layout }
125     }
126 
from_const_alloc<Bx: BuilderMethods<'a, 'tcx, Value = V>>( bx: &mut Bx, layout: TyAndLayout<'tcx>, alloc: rustc_middle::mir::interpret::ConstAllocation<'tcx>, offset: Size, ) -> Self127     fn from_const_alloc<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
128         bx: &mut Bx,
129         layout: TyAndLayout<'tcx>,
130         alloc: rustc_middle::mir::interpret::ConstAllocation<'tcx>,
131         offset: Size,
132     ) -> Self {
133         let alloc_align = alloc.inner().align;
134         assert_eq!(alloc_align, layout.align.abi);
135         let ty = bx.type_ptr_to(bx.cx().backend_type(layout));
136 
137         let read_scalar = |start, size, s: abi::Scalar, ty| {
138             let val = alloc
139                 .0
140                 .read_scalar(
141                     bx,
142                     alloc_range(start, size),
143                     /*read_provenance*/ matches!(s.primitive(), abi::Pointer(_)),
144                 )
145                 .unwrap();
146             bx.scalar_to_backend(val, s, ty)
147         };
148 
149         // It may seem like all types with `Scalar` or `ScalarPair` ABI are fair game at this point.
150         // However, `MaybeUninit<u64>` is considered a `Scalar` as far as its layout is concerned --
151         // and yet cannot be represented by an interpreter `Scalar`, since we have to handle the
152         // case where some of the bytes are initialized and others are not. So, we need an extra
153         // check that walks over the type of `mplace` to make sure it is truly correct to treat this
154         // like a `Scalar` (or `ScalarPair`).
155         match layout.abi {
156             Abi::Scalar(s @ abi::Scalar::Initialized { .. }) => {
157                 let size = s.size(bx);
158                 assert_eq!(size, layout.size, "abi::Scalar size does not match layout size");
159                 let val = read_scalar(Size::ZERO, size, s, ty);
160                 OperandRef { val: OperandValue::Immediate(val), layout }
161             }
162             Abi::ScalarPair(
163                 a @ abi::Scalar::Initialized { .. },
164                 b @ abi::Scalar::Initialized { .. },
165             ) => {
166                 let (a_size, b_size) = (a.size(bx), b.size(bx));
167                 let b_offset = a_size.align_to(b.align(bx).abi);
168                 assert!(b_offset.bytes() > 0);
169                 let a_val = read_scalar(
170                     Size::ZERO,
171                     a_size,
172                     a,
173                     bx.scalar_pair_element_backend_type(layout, 0, true),
174                 );
175                 let b_val = read_scalar(
176                     b_offset,
177                     b_size,
178                     b,
179                     bx.scalar_pair_element_backend_type(layout, 1, true),
180                 );
181                 OperandRef { val: OperandValue::Pair(a_val, b_val), layout }
182             }
183             _ if layout.is_zst() => OperandRef::zero_sized(layout),
184             _ => {
185                 // Neither a scalar nor scalar pair. Load from a place
186                 let init = bx.const_data_from_alloc(alloc);
187                 let base_addr = bx.static_addr_of(init, alloc_align, None);
188 
189                 let llval = bx.const_ptr_byte_offset(base_addr, offset);
190                 let llval = bx.const_bitcast(llval, ty);
191                 bx.load_operand(PlaceRef::new_sized(llval, layout))
192             }
193         }
194     }
195 
196     /// Asserts that this operand refers to a scalar and returns
197     /// a reference to its value.
immediate(self) -> V198     pub fn immediate(self) -> V {
199         match self.val {
200             OperandValue::Immediate(s) => s,
201             _ => bug!("not immediate: {:?}", self),
202         }
203     }
204 
deref<Cx: LayoutTypeMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V>205     pub fn deref<Cx: LayoutTypeMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V> {
206         if self.layout.ty.is_box() {
207             bug!("dereferencing {:?} in codegen", self.layout.ty);
208         }
209 
210         let projected_ty = self
211             .layout
212             .ty
213             .builtin_deref(true)
214             .unwrap_or_else(|| bug!("deref of non-pointer {:?}", self))
215             .ty;
216 
217         let (llptr, llextra) = match self.val {
218             OperandValue::Immediate(llptr) => (llptr, None),
219             OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
220             OperandValue::Ref(..) => bug!("Deref of by-Ref operand {:?}", self),
221             OperandValue::ZeroSized => bug!("Deref of ZST operand {:?}", self),
222         };
223         let layout = cx.layout_of(projected_ty);
224         PlaceRef { llval: llptr, llextra, layout, align: layout.align.abi }
225     }
226 
227     /// If this operand is a `Pair`, we return an aggregate with the two values.
228     /// For other cases, see `immediate`.
immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>( self, bx: &mut Bx, ) -> V229     pub fn immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
230         self,
231         bx: &mut Bx,
232     ) -> V {
233         if let OperandValue::Pair(a, b) = self.val {
234             let llty = bx.cx().backend_type(self.layout);
235             debug!("Operand::immediate_or_packed_pair: packing {:?} into {:?}", self, llty);
236             // Reconstruct the immediate aggregate.
237             let mut llpair = bx.cx().const_poison(llty);
238             let imm_a = bx.from_immediate(a);
239             let imm_b = bx.from_immediate(b);
240             llpair = bx.insert_value(llpair, imm_a, 0);
241             llpair = bx.insert_value(llpair, imm_b, 1);
242             llpair
243         } else {
244             self.immediate()
245         }
246     }
247 
248     /// If the type is a pair, we return a `Pair`, otherwise, an `Immediate`.
from_immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>( bx: &mut Bx, llval: V, layout: TyAndLayout<'tcx>, ) -> Self249     pub fn from_immediate_or_packed_pair<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
250         bx: &mut Bx,
251         llval: V,
252         layout: TyAndLayout<'tcx>,
253     ) -> Self {
254         let val = if let Abi::ScalarPair(a, b) = layout.abi {
255             debug!("Operand::from_immediate_or_packed_pair: unpacking {:?} @ {:?}", llval, layout);
256 
257             // Deconstruct the immediate aggregate.
258             let a_llval = bx.extract_value(llval, 0);
259             let a_llval = bx.to_immediate_scalar(a_llval, a);
260             let b_llval = bx.extract_value(llval, 1);
261             let b_llval = bx.to_immediate_scalar(b_llval, b);
262             OperandValue::Pair(a_llval, b_llval)
263         } else {
264             OperandValue::Immediate(llval)
265         };
266         OperandRef { val, layout }
267     }
268 
extract_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>( &self, bx: &mut Bx, i: usize, ) -> Self269     pub fn extract_field<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
270         &self,
271         bx: &mut Bx,
272         i: usize,
273     ) -> Self {
274         let field = self.layout.field(bx.cx(), i);
275         let offset = self.layout.fields.offset(i);
276 
277         let mut val = match (self.val, self.layout.abi) {
278             // If the field is ZST, it has no data.
279             _ if field.is_zst() => OperandValue::ZeroSized,
280 
281             // Newtype of a scalar, scalar pair or vector.
282             (OperandValue::Immediate(_) | OperandValue::Pair(..), _)
283                 if field.size == self.layout.size =>
284             {
285                 assert_eq!(offset.bytes(), 0);
286                 self.val
287             }
288 
289             // Extract a scalar component from a pair.
290             (OperandValue::Pair(a_llval, b_llval), Abi::ScalarPair(a, b)) => {
291                 if offset.bytes() == 0 {
292                     assert_eq!(field.size, a.size(bx.cx()));
293                     OperandValue::Immediate(a_llval)
294                 } else {
295                     assert_eq!(offset, a.size(bx.cx()).align_to(b.align(bx.cx()).abi));
296                     assert_eq!(field.size, b.size(bx.cx()));
297                     OperandValue::Immediate(b_llval)
298                 }
299             }
300 
301             // `#[repr(simd)]` types are also immediate.
302             (OperandValue::Immediate(llval), Abi::Vector { .. }) => {
303                 OperandValue::Immediate(bx.extract_element(llval, bx.cx().const_usize(i as u64)))
304             }
305 
306             _ => bug!("OperandRef::extract_field({:?}): not applicable", self),
307         };
308 
309         match (&mut val, field.abi) {
310             (OperandValue::ZeroSized, _) => {}
311             (
312                 OperandValue::Immediate(llval),
313                 Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. },
314             ) => {
315                 // Bools in union fields needs to be truncated.
316                 *llval = bx.to_immediate(*llval, field);
317                 // HACK(eddyb) have to bitcast pointers until LLVM removes pointee types.
318                 let ty = bx.cx().immediate_backend_type(field);
319                 if bx.type_kind(ty) == TypeKind::Pointer {
320                     *llval = bx.pointercast(*llval, ty);
321                 }
322             }
323             (OperandValue::Pair(a, b), Abi::ScalarPair(a_abi, b_abi)) => {
324                 // Bools in union fields needs to be truncated.
325                 *a = bx.to_immediate_scalar(*a, a_abi);
326                 *b = bx.to_immediate_scalar(*b, b_abi);
327                 // HACK(eddyb) have to bitcast pointers until LLVM removes pointee types.
328                 let a_ty = bx.cx().scalar_pair_element_backend_type(field, 0, true);
329                 let b_ty = bx.cx().scalar_pair_element_backend_type(field, 1, true);
330                 if bx.type_kind(a_ty) == TypeKind::Pointer {
331                     *a = bx.pointercast(*a, a_ty);
332                 }
333                 if bx.type_kind(b_ty) == TypeKind::Pointer {
334                     *b = bx.pointercast(*b, b_ty);
335                 }
336             }
337             // Newtype vector of array, e.g. #[repr(simd)] struct S([i32; 4]);
338             (OperandValue::Immediate(llval), Abi::Aggregate { sized: true }) => {
339                 assert!(matches!(self.layout.abi, Abi::Vector { .. }));
340 
341                 let llty = bx.cx().backend_type(self.layout);
342                 let llfield_ty = bx.cx().backend_type(field);
343 
344                 // Can't bitcast an aggregate, so round trip through memory.
345                 let lltemp = bx.alloca(llfield_ty, field.align.abi);
346                 let llptr = bx.pointercast(lltemp, bx.cx().type_ptr_to(llty));
347                 bx.store(*llval, llptr, field.align.abi);
348                 *llval = bx.load(llfield_ty, lltemp, field.align.abi);
349             }
350             (OperandValue::Immediate(_), Abi::Uninhabited | Abi::Aggregate { sized: false }) => {
351                 bug!()
352             }
353             (OperandValue::Pair(..), _) => bug!(),
354             (OperandValue::Ref(..), _) => bug!(),
355         }
356 
357         OperandRef { val, layout: field }
358     }
359 }
360 
361 impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
362     /// Returns an `OperandValue` that's generally UB to use in any way.
363     ///
364     /// Depending on the `layout`, returns `ZeroSized` for ZSTs, an `Immediate` or
365     /// `Pair` containing poison value(s), or a `Ref` containing a poison pointer.
366     ///
367     /// Supports sized types only.
poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>( bx: &mut Bx, layout: TyAndLayout<'tcx>, ) -> OperandValue<V>368     pub fn poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
369         bx: &mut Bx,
370         layout: TyAndLayout<'tcx>,
371     ) -> OperandValue<V> {
372         assert!(layout.is_sized());
373         if layout.is_zst() {
374             OperandValue::ZeroSized
375         } else if bx.cx().is_backend_immediate(layout) {
376             let ibty = bx.cx().immediate_backend_type(layout);
377             OperandValue::Immediate(bx.const_poison(ibty))
378         } else if bx.cx().is_backend_scalar_pair(layout) {
379             let ibty0 = bx.cx().scalar_pair_element_backend_type(layout, 0, true);
380             let ibty1 = bx.cx().scalar_pair_element_backend_type(layout, 1, true);
381             OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1))
382         } else {
383             let bty = bx.cx().backend_type(layout);
384             let ptr_bty = bx.cx().type_ptr_to(bty);
385             OperandValue::Ref(bx.const_poison(ptr_bty), None, layout.align.abi)
386         }
387     }
388 
store<Bx: BuilderMethods<'a, 'tcx, Value = V>>( self, bx: &mut Bx, dest: PlaceRef<'tcx, V>, )389     pub fn store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
390         self,
391         bx: &mut Bx,
392         dest: PlaceRef<'tcx, V>,
393     ) {
394         self.store_with_flags(bx, dest, MemFlags::empty());
395     }
396 
volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>( self, bx: &mut Bx, dest: PlaceRef<'tcx, V>, )397     pub fn volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
398         self,
399         bx: &mut Bx,
400         dest: PlaceRef<'tcx, V>,
401     ) {
402         self.store_with_flags(bx, dest, MemFlags::VOLATILE);
403     }
404 
unaligned_volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>( self, bx: &mut Bx, dest: PlaceRef<'tcx, V>, )405     pub fn unaligned_volatile_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
406         self,
407         bx: &mut Bx,
408         dest: PlaceRef<'tcx, V>,
409     ) {
410         self.store_with_flags(bx, dest, MemFlags::VOLATILE | MemFlags::UNALIGNED);
411     }
412 
nontemporal_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>( self, bx: &mut Bx, dest: PlaceRef<'tcx, V>, )413     pub fn nontemporal_store<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
414         self,
415         bx: &mut Bx,
416         dest: PlaceRef<'tcx, V>,
417     ) {
418         self.store_with_flags(bx, dest, MemFlags::NONTEMPORAL);
419     }
420 
store_with_flags<Bx: BuilderMethods<'a, 'tcx, Value = V>>( self, bx: &mut Bx, dest: PlaceRef<'tcx, V>, flags: MemFlags, )421     fn store_with_flags<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
422         self,
423         bx: &mut Bx,
424         dest: PlaceRef<'tcx, V>,
425         flags: MemFlags,
426     ) {
427         debug!("OperandRef::store: operand={:?}, dest={:?}", self, dest);
428         match self {
429             OperandValue::ZeroSized => {
430                 // Avoid generating stores of zero-sized values, because the only way to have a zero-sized
431                 // value is through `undef`/`poison`, and the store itself is useless.
432             }
433             OperandValue::Ref(r, None, source_align) => {
434                 if flags.contains(MemFlags::NONTEMPORAL) {
435                     // HACK(nox): This is inefficient but there is no nontemporal memcpy.
436                     let ty = bx.backend_type(dest.layout);
437                     let ptr = bx.pointercast(r, bx.type_ptr_to(ty));
438                     let val = bx.load(ty, ptr, source_align);
439                     bx.store_with_flags(val, dest.llval, dest.align, flags);
440                     return;
441                 }
442                 base::memcpy_ty(bx, dest.llval, dest.align, r, source_align, dest.layout, flags)
443             }
444             OperandValue::Ref(_, Some(_), _) => {
445                 bug!("cannot directly store unsized values");
446             }
447             OperandValue::Immediate(s) => {
448                 let val = bx.from_immediate(s);
449                 bx.store_with_flags(val, dest.llval, dest.align, flags);
450             }
451             OperandValue::Pair(a, b) => {
452                 let Abi::ScalarPair(a_scalar, b_scalar) = dest.layout.abi else {
453                     bug!("store_with_flags: invalid ScalarPair layout: {:#?}", dest.layout);
454                 };
455                 let ty = bx.backend_type(dest.layout);
456                 let b_offset = a_scalar.size(bx).align_to(b_scalar.align(bx).abi);
457 
458                 let llptr = bx.struct_gep(ty, dest.llval, 0);
459                 let val = bx.from_immediate(a);
460                 let align = dest.align;
461                 bx.store_with_flags(val, llptr, align, flags);
462 
463                 let llptr = bx.struct_gep(ty, dest.llval, 1);
464                 let val = bx.from_immediate(b);
465                 let align = dest.align.restrict_for_offset(b_offset);
466                 bx.store_with_flags(val, llptr, align, flags);
467             }
468         }
469     }
470 
store_unsized<Bx: BuilderMethods<'a, 'tcx, Value = V>>( self, bx: &mut Bx, indirect_dest: PlaceRef<'tcx, V>, )471     pub fn store_unsized<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
472         self,
473         bx: &mut Bx,
474         indirect_dest: PlaceRef<'tcx, V>,
475     ) {
476         debug!("OperandRef::store_unsized: operand={:?}, indirect_dest={:?}", self, indirect_dest);
477         // `indirect_dest` must have `*mut T` type. We extract `T` out of it.
478         let unsized_ty = indirect_dest
479             .layout
480             .ty
481             .builtin_deref(true)
482             .unwrap_or_else(|| bug!("indirect_dest has non-pointer type: {:?}", indirect_dest))
483             .ty;
484 
485         let OperandValue::Ref(llptr, Some(llextra), _) = self else {
486             bug!("store_unsized called with a sized value")
487         };
488 
489         // Allocate an appropriate region on the stack, and copy the value into it. Since alloca
490         // doesn't support dynamic alignment, we allocate an extra align - 1 bytes, and align the
491         // pointer manually.
492         let (size, align) = glue::size_and_align_of_dst(bx, unsized_ty, Some(llextra));
493         let one = bx.const_usize(1);
494         let align_minus_1 = bx.sub(align, one);
495         let size_extra = bx.add(size, align_minus_1);
496         let min_align = Align::ONE;
497         let alloca = bx.byte_array_alloca(size_extra, min_align);
498         let address = bx.ptrtoint(alloca, bx.type_isize());
499         let neg_address = bx.neg(address);
500         let offset = bx.and(neg_address, align_minus_1);
501         let dst = bx.inbounds_gep(bx.type_i8(), alloca, &[offset]);
502         bx.memcpy(dst, min_align, llptr, min_align, size, MemFlags::empty());
503 
504         // Store the allocated region and the extra to the indirect place.
505         let indirect_operand = OperandValue::Pair(dst, llextra);
506         indirect_operand.store(bx, indirect_dest);
507     }
508 }
509 
510 impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
maybe_codegen_consume_direct( &mut self, bx: &mut Bx, place_ref: mir::PlaceRef<'tcx>, ) -> Option<OperandRef<'tcx, Bx::Value>>511     fn maybe_codegen_consume_direct(
512         &mut self,
513         bx: &mut Bx,
514         place_ref: mir::PlaceRef<'tcx>,
515     ) -> Option<OperandRef<'tcx, Bx::Value>> {
516         debug!("maybe_codegen_consume_direct(place_ref={:?})", place_ref);
517 
518         match self.locals[place_ref.local] {
519             LocalRef::Operand(mut o) => {
520                 // Moves out of scalar and scalar pair fields are trivial.
521                 for elem in place_ref.projection.iter() {
522                     match elem {
523                         mir::ProjectionElem::Field(ref f, _) => {
524                             o = o.extract_field(bx, f.index());
525                         }
526                         mir::ProjectionElem::Index(_)
527                         | mir::ProjectionElem::ConstantIndex { .. } => {
528                             // ZSTs don't require any actual memory access.
529                             // FIXME(eddyb) deduplicate this with the identical
530                             // checks in `codegen_consume` and `extract_field`.
531                             let elem = o.layout.field(bx.cx(), 0);
532                             if elem.is_zst() {
533                                 o = OperandRef::zero_sized(elem);
534                             } else {
535                                 return None;
536                             }
537                         }
538                         _ => return None,
539                     }
540                 }
541 
542                 Some(o)
543             }
544             LocalRef::PendingOperand => {
545                 bug!("use of {:?} before def", place_ref);
546             }
547             LocalRef::Place(..) | LocalRef::UnsizedPlace(..) => {
548                 // watch out for locals that do not have an
549                 // alloca; they are handled somewhat differently
550                 None
551             }
552         }
553     }
554 
codegen_consume( &mut self, bx: &mut Bx, place_ref: mir::PlaceRef<'tcx>, ) -> OperandRef<'tcx, Bx::Value>555     pub fn codegen_consume(
556         &mut self,
557         bx: &mut Bx,
558         place_ref: mir::PlaceRef<'tcx>,
559     ) -> OperandRef<'tcx, Bx::Value> {
560         debug!("codegen_consume(place_ref={:?})", place_ref);
561 
562         let ty = self.monomorphized_place_ty(place_ref);
563         let layout = bx.cx().layout_of(ty);
564 
565         // ZSTs don't require any actual memory access.
566         if layout.is_zst() {
567             return OperandRef::zero_sized(layout);
568         }
569 
570         if let Some(o) = self.maybe_codegen_consume_direct(bx, place_ref) {
571             return o;
572         }
573 
574         // for most places, to consume them we just load them
575         // out from their home
576         let place = self.codegen_place(bx, place_ref);
577         bx.load_operand(place)
578     }
579 
codegen_operand( &mut self, bx: &mut Bx, operand: &mir::Operand<'tcx>, ) -> OperandRef<'tcx, Bx::Value>580     pub fn codegen_operand(
581         &mut self,
582         bx: &mut Bx,
583         operand: &mir::Operand<'tcx>,
584     ) -> OperandRef<'tcx, Bx::Value> {
585         debug!("codegen_operand(operand={:?})", operand);
586 
587         match *operand {
588             mir::Operand::Copy(ref place) | mir::Operand::Move(ref place) => {
589                 self.codegen_consume(bx, place.as_ref())
590             }
591 
592             mir::Operand::Constant(ref constant) => {
593                 // This cannot fail because we checked all required_consts in advance.
594                 self.eval_mir_constant_to_operand(bx, constant).unwrap_or_else(|_err| {
595                     span_bug!(constant.span, "erroneous constant not captured by required_consts")
596                 })
597             }
598         }
599     }
600 }
601