1 use rustc_middle::mir; 2 use rustc_middle::mir::NonDivergingIntrinsic; 3 4 use super::FunctionCx; 5 use super::LocalRef; 6 use crate::traits::BuilderMethods; 7 use crate::traits::*; 8 9 impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { 10 #[instrument(level = "debug", skip(self, bx))] codegen_statement(&mut self, bx: &mut Bx, statement: &mir::Statement<'tcx>)11 pub fn codegen_statement(&mut self, bx: &mut Bx, statement: &mir::Statement<'tcx>) { 12 self.set_debug_loc(bx, statement.source_info); 13 match statement.kind { 14 mir::StatementKind::Assign(box (ref place, ref rvalue)) => { 15 if let Some(index) = place.as_local() { 16 match self.locals[index] { 17 LocalRef::Place(cg_dest) => self.codegen_rvalue(bx, cg_dest, rvalue), 18 LocalRef::UnsizedPlace(cg_indirect_dest) => { 19 self.codegen_rvalue_unsized(bx, cg_indirect_dest, rvalue) 20 } 21 LocalRef::PendingOperand => { 22 let operand = self.codegen_rvalue_operand(bx, rvalue); 23 self.overwrite_local(index, LocalRef::Operand(operand)); 24 self.debug_introduce_local(bx, index); 25 } 26 LocalRef::Operand(op) => { 27 if !op.layout.is_zst() { 28 span_bug!( 29 statement.source_info.span, 30 "operand {:?} already assigned", 31 rvalue 32 ); 33 } 34 35 // If the type is zero-sized, it's already been set here, 36 // but we still need to make sure we codegen the operand 37 self.codegen_rvalue_operand(bx, rvalue); 38 } 39 } 40 } else { 41 let cg_dest = self.codegen_place(bx, place.as_ref()); 42 self.codegen_rvalue(bx, cg_dest, rvalue); 43 } 44 } 45 mir::StatementKind::SetDiscriminant { box ref place, variant_index } => { 46 self.codegen_place(bx, place.as_ref()).codegen_set_discr(bx, variant_index); 47 } 48 mir::StatementKind::Deinit(..) => { 49 // For now, don't codegen this to anything. In the future it may be worth 50 // experimenting with what kind of information we can emit to LLVM without hurting 51 // perf here 52 } 53 mir::StatementKind::StorageLive(local) => { 54 if let LocalRef::Place(cg_place) = self.locals[local] { 55 cg_place.storage_live(bx); 56 } else if let LocalRef::UnsizedPlace(cg_indirect_place) = self.locals[local] { 57 cg_indirect_place.storage_live(bx); 58 } 59 } 60 mir::StatementKind::StorageDead(local) => { 61 if let LocalRef::Place(cg_place) = self.locals[local] { 62 cg_place.storage_dead(bx); 63 } else if let LocalRef::UnsizedPlace(cg_indirect_place) = self.locals[local] { 64 cg_indirect_place.storage_dead(bx); 65 } 66 } 67 mir::StatementKind::Coverage(box ref coverage) => { 68 self.codegen_coverage(bx, coverage, statement.source_info.scope); 69 } 70 mir::StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(ref op)) => { 71 let op_val = self.codegen_operand(bx, op); 72 bx.assume(op_val.immediate()); 73 } 74 mir::StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping( 75 mir::CopyNonOverlapping { ref count, ref src, ref dst }, 76 )) => { 77 let dst_val = self.codegen_operand(bx, dst); 78 let src_val = self.codegen_operand(bx, src); 79 let count = self.codegen_operand(bx, count).immediate(); 80 let pointee_layout = dst_val 81 .layout 82 .pointee_info_at(bx, rustc_target::abi::Size::ZERO) 83 .expect("Expected pointer"); 84 let bytes = bx.mul(count, bx.const_usize(pointee_layout.size.bytes())); 85 86 let align = pointee_layout.align; 87 let dst = dst_val.immediate(); 88 let src = src_val.immediate(); 89 bx.memcpy(dst, align, src, align, bytes, crate::MemFlags::empty()); 90 } 91 mir::StatementKind::FakeRead(..) 92 | mir::StatementKind::Retag { .. } 93 | mir::StatementKind::AscribeUserType(..) 94 | mir::StatementKind::ConstEvalCounter 95 | mir::StatementKind::PlaceMention(..) 96 | mir::StatementKind::Nop => {} 97 } 98 } 99 } 100