1 //! See docs in `build/expr/mod.rs`. 2 3 use rustc_index::{Idx, IndexVec}; 4 use rustc_middle::ty::util::IntTypeExt; 5 use rustc_target::abi::{Abi, FieldIdx, Primitive}; 6 7 use crate::build::expr::as_place::PlaceBase; 8 use crate::build::expr::category::{Category, RvalueFunc}; 9 use crate::build::{BlockAnd, BlockAndExtension, Builder, NeedsTemporary}; 10 use rustc_hir::lang_items::LangItem; 11 use rustc_middle::middle::region; 12 use rustc_middle::mir::interpret::Scalar; 13 use rustc_middle::mir::AssertKind; 14 use rustc_middle::mir::Place; 15 use rustc_middle::mir::*; 16 use rustc_middle::thir::*; 17 use rustc_middle::ty::cast::{mir_cast_kind, CastTy}; 18 use rustc_middle::ty::layout::IntegerExt; 19 use rustc_middle::ty::{self, Ty, UpvarSubsts}; 20 use rustc_span::Span; 21 22 impl<'a, 'tcx> Builder<'a, 'tcx> { 23 /// Returns an rvalue suitable for use until the end of the current 24 /// scope expression. 25 /// 26 /// The operand returned from this function will *not be valid* after 27 /// an ExprKind::Scope is passed, so please do *not* return it from 28 /// functions to avoid bad miscompiles. as_local_rvalue( &mut self, block: BasicBlock, expr: &Expr<'tcx>, ) -> BlockAnd<Rvalue<'tcx>>29 pub(crate) fn as_local_rvalue( 30 &mut self, 31 block: BasicBlock, 32 expr: &Expr<'tcx>, 33 ) -> BlockAnd<Rvalue<'tcx>> { 34 let local_scope = self.local_scope(); 35 self.as_rvalue(block, Some(local_scope), expr) 36 } 37 38 /// Compile `expr`, yielding an rvalue. as_rvalue( &mut self, mut block: BasicBlock, scope: Option<region::Scope>, expr: &Expr<'tcx>, ) -> BlockAnd<Rvalue<'tcx>>39 pub(crate) fn as_rvalue( 40 &mut self, 41 mut block: BasicBlock, 42 scope: Option<region::Scope>, 43 expr: &Expr<'tcx>, 44 ) -> BlockAnd<Rvalue<'tcx>> { 45 debug!("expr_as_rvalue(block={:?}, scope={:?}, expr={:?})", block, scope, expr); 46 47 let this = self; 48 let expr_span = expr.span; 49 let source_info = this.source_info(expr_span); 50 51 match expr.kind { 52 ExprKind::ThreadLocalRef(did) => block.and(Rvalue::ThreadLocalRef(did)), 53 ExprKind::Scope { region_scope, lint_level, value } => { 54 let region_scope = (region_scope, source_info); 55 this.in_scope(region_scope, lint_level, |this| { 56 this.as_rvalue(block, scope, &this.thir[value]) 57 }) 58 } 59 ExprKind::Repeat { value, count } => { 60 if Some(0) == count.try_eval_target_usize(this.tcx, this.param_env) { 61 this.build_zero_repeat(block, value, scope, source_info) 62 } else { 63 let value_operand = unpack!( 64 block = this.as_operand( 65 block, 66 scope, 67 &this.thir[value], 68 LocalInfo::Boring, 69 NeedsTemporary::No 70 ) 71 ); 72 block.and(Rvalue::Repeat(value_operand, count)) 73 } 74 } 75 ExprKind::Binary { op, lhs, rhs } => { 76 let lhs = unpack!( 77 block = this.as_operand( 78 block, 79 scope, 80 &this.thir[lhs], 81 LocalInfo::Boring, 82 NeedsTemporary::Maybe 83 ) 84 ); 85 let rhs = unpack!( 86 block = this.as_operand( 87 block, 88 scope, 89 &this.thir[rhs], 90 LocalInfo::Boring, 91 NeedsTemporary::No 92 ) 93 ); 94 this.build_binary_op(block, op, expr_span, expr.ty, lhs, rhs) 95 } 96 ExprKind::Unary { op, arg } => { 97 let arg = unpack!( 98 block = this.as_operand( 99 block, 100 scope, 101 &this.thir[arg], 102 LocalInfo::Boring, 103 NeedsTemporary::No 104 ) 105 ); 106 // Check for -MIN on signed integers 107 if this.check_overflow && op == UnOp::Neg && expr.ty.is_signed() { 108 let bool_ty = this.tcx.types.bool; 109 110 let minval = this.minval_literal(expr_span, expr.ty); 111 let is_min = this.temp(bool_ty, expr_span); 112 113 this.cfg.push_assign( 114 block, 115 source_info, 116 is_min, 117 Rvalue::BinaryOp(BinOp::Eq, Box::new((arg.to_copy(), minval))), 118 ); 119 120 block = this.assert( 121 block, 122 Operand::Move(is_min), 123 false, 124 AssertKind::OverflowNeg(arg.to_copy()), 125 expr_span, 126 ); 127 } 128 block.and(Rvalue::UnaryOp(op, arg)) 129 } 130 ExprKind::Box { value } => { 131 let value = &this.thir[value]; 132 let tcx = this.tcx; 133 134 // `exchange_malloc` is unsafe but box is safe, so need a new scope. 135 let synth_scope = this.new_source_scope( 136 expr_span, 137 LintLevel::Inherited, 138 Some(Safety::BuiltinUnsafe), 139 ); 140 let synth_info = SourceInfo { span: expr_span, scope: synth_scope }; 141 142 let size = this.temp(tcx.types.usize, expr_span); 143 this.cfg.push_assign( 144 block, 145 synth_info, 146 size, 147 Rvalue::NullaryOp(NullOp::SizeOf, value.ty), 148 ); 149 150 let align = this.temp(tcx.types.usize, expr_span); 151 this.cfg.push_assign( 152 block, 153 synth_info, 154 align, 155 Rvalue::NullaryOp(NullOp::AlignOf, value.ty), 156 ); 157 158 // malloc some memory of suitable size and align: 159 let exchange_malloc = Operand::function_handle( 160 tcx, 161 tcx.require_lang_item(LangItem::ExchangeMalloc, Some(expr_span)), 162 [], 163 expr_span, 164 ); 165 let storage = this.temp(Ty::new_mut_ptr(tcx, tcx.types.u8), expr_span); 166 let success = this.cfg.start_new_block(); 167 this.cfg.terminate( 168 block, 169 synth_info, 170 TerminatorKind::Call { 171 func: exchange_malloc, 172 args: vec![Operand::Move(size), Operand::Move(align)], 173 destination: storage, 174 target: Some(success), 175 unwind: UnwindAction::Continue, 176 call_source: CallSource::Misc, 177 fn_span: expr_span, 178 }, 179 ); 180 this.diverge_from(block); 181 block = success; 182 183 // The `Box<T>` temporary created here is not a part of the HIR, 184 // and therefore is not considered during generator auto-trait 185 // determination. See the comment about `box` at `yield_in_scope`. 186 let result = this.local_decls.push(LocalDecl::new(expr.ty, expr_span).internal()); 187 this.cfg.push( 188 block, 189 Statement { source_info, kind: StatementKind::StorageLive(result) }, 190 ); 191 if let Some(scope) = scope { 192 // schedule a shallow free of that memory, lest we unwind: 193 this.schedule_drop_storage_and_value(expr_span, scope, result); 194 } 195 196 // Transmute `*mut u8` to the box (thus far, uninitialized): 197 let box_ = Rvalue::ShallowInitBox(Operand::Move(storage), value.ty); 198 this.cfg.push_assign(block, source_info, Place::from(result), box_); 199 200 // initialize the box contents: 201 unpack!( 202 block = this.expr_into_dest( 203 this.tcx.mk_place_deref(Place::from(result)), 204 block, 205 value 206 ) 207 ); 208 block.and(Rvalue::Use(Operand::Move(Place::from(result)))) 209 } 210 ExprKind::Cast { source } => { 211 let source = &this.thir[source]; 212 213 // Casting an enum to an integer is equivalent to computing the discriminant and casting the 214 // discriminant. Previously every backend had to repeat the logic for this operation. Now we 215 // create all the steps directly in MIR with operations all backends need to support anyway. 216 let (source, ty) = if let ty::Adt(adt_def, ..) = source.ty.kind() && adt_def.is_enum() { 217 let discr_ty = adt_def.repr().discr_type().to_ty(this.tcx); 218 let temp = unpack!(block = this.as_temp(block, scope, source, Mutability::Not)); 219 let layout = this.tcx.layout_of(this.param_env.and(source.ty)); 220 let discr = this.temp(discr_ty, source.span); 221 this.cfg.push_assign( 222 block, 223 source_info, 224 discr, 225 Rvalue::Discriminant(temp.into()), 226 ); 227 let (op,ty) = (Operand::Move(discr), discr_ty); 228 229 if let Abi::Scalar(scalar) = layout.unwrap().abi 230 && !scalar.is_always_valid(&this.tcx) 231 && let Primitive::Int(int_width, _signed) = scalar.primitive() 232 { 233 let unsigned_ty = int_width.to_ty(this.tcx, false); 234 let unsigned_place = this.temp(unsigned_ty, expr_span); 235 this.cfg.push_assign( 236 block, 237 source_info, 238 unsigned_place, 239 Rvalue::Cast(CastKind::IntToInt, Operand::Copy(discr), unsigned_ty)); 240 241 let bool_ty = this.tcx.types.bool; 242 let range = scalar.valid_range(&this.tcx); 243 let merge_op = 244 if range.start <= range.end { 245 BinOp::BitAnd 246 } else { 247 BinOp::BitOr 248 }; 249 250 let mut comparer = |range: u128, bin_op: BinOp| -> Place<'tcx> { 251 let range_val = 252 ConstantKind::from_bits(this.tcx, range, ty::ParamEnv::empty().and(unsigned_ty)); 253 let lit_op = this.literal_operand(expr.span, range_val); 254 let is_bin_op = this.temp(bool_ty, expr_span); 255 this.cfg.push_assign( 256 block, 257 source_info, 258 is_bin_op, 259 Rvalue::BinaryOp(bin_op, Box::new((Operand::Copy(unsigned_place), lit_op))), 260 ); 261 is_bin_op 262 }; 263 let assert_place = if range.start == 0 { 264 comparer(range.end, BinOp::Le) 265 } else { 266 let start_place = comparer(range.start, BinOp::Ge); 267 let end_place = comparer(range.end, BinOp::Le); 268 let merge_place = this.temp(bool_ty, expr_span); 269 this.cfg.push_assign( 270 block, 271 source_info, 272 merge_place, 273 Rvalue::BinaryOp(merge_op, Box::new((Operand::Move(start_place), Operand::Move(end_place)))), 274 ); 275 merge_place 276 }; 277 this.cfg.push( 278 block, 279 Statement { 280 source_info, 281 kind: StatementKind::Intrinsic(Box::new(NonDivergingIntrinsic::Assume( 282 Operand::Move(assert_place), 283 ))), 284 }, 285 ); 286 } 287 288 (op,ty) 289 290 } else { 291 let ty = source.ty; 292 let source = unpack!( 293 block = this.as_operand(block, scope, source, LocalInfo::Boring, NeedsTemporary::No) 294 ); 295 (source, ty) 296 }; 297 let from_ty = CastTy::from_ty(ty); 298 let cast_ty = CastTy::from_ty(expr.ty); 299 debug!("ExprKind::Cast from_ty={from_ty:?}, cast_ty={:?}/{cast_ty:?}", expr.ty,); 300 let cast_kind = mir_cast_kind(ty, expr.ty); 301 block.and(Rvalue::Cast(cast_kind, source, expr.ty)) 302 } 303 ExprKind::PointerCoercion { cast, source } => { 304 let source = unpack!( 305 block = this.as_operand( 306 block, 307 scope, 308 &this.thir[source], 309 LocalInfo::Boring, 310 NeedsTemporary::No 311 ) 312 ); 313 block.and(Rvalue::Cast(CastKind::PointerCoercion(cast), source, expr.ty)) 314 } 315 ExprKind::Array { ref fields } => { 316 // (*) We would (maybe) be closer to codegen if we 317 // handled this and other aggregate cases via 318 // `into()`, not `as_rvalue` -- in that case, instead 319 // of generating 320 // 321 // let tmp1 = ...1; 322 // let tmp2 = ...2; 323 // dest = Rvalue::Aggregate(Foo, [tmp1, tmp2]) 324 // 325 // we could just generate 326 // 327 // dest.f = ...1; 328 // dest.g = ...2; 329 // 330 // The problem is that then we would need to: 331 // 332 // (a) have a more complex mechanism for handling 333 // partial cleanup; 334 // (b) distinguish the case where the type `Foo` has a 335 // destructor, in which case creating an instance 336 // as a whole "arms" the destructor, and you can't 337 // write individual fields; and, 338 // (c) handle the case where the type Foo has no 339 // fields. We don't want `let x: ();` to compile 340 // to the same MIR as `let x = ();`. 341 342 // first process the set of fields 343 let el_ty = expr.ty.sequence_element_type(this.tcx); 344 let fields: IndexVec<FieldIdx, _> = fields 345 .into_iter() 346 .copied() 347 .map(|f| { 348 unpack!( 349 block = this.as_operand( 350 block, 351 scope, 352 &this.thir[f], 353 LocalInfo::Boring, 354 NeedsTemporary::Maybe 355 ) 356 ) 357 }) 358 .collect(); 359 360 block.and(Rvalue::Aggregate(Box::new(AggregateKind::Array(el_ty)), fields)) 361 } 362 ExprKind::Tuple { ref fields } => { 363 // see (*) above 364 // first process the set of fields 365 let fields: IndexVec<FieldIdx, _> = fields 366 .into_iter() 367 .copied() 368 .map(|f| { 369 unpack!( 370 block = this.as_operand( 371 block, 372 scope, 373 &this.thir[f], 374 LocalInfo::Boring, 375 NeedsTemporary::Maybe 376 ) 377 ) 378 }) 379 .collect(); 380 381 block.and(Rvalue::Aggregate(Box::new(AggregateKind::Tuple), fields)) 382 } 383 ExprKind::Closure(box ClosureExpr { 384 closure_id, 385 substs, 386 ref upvars, 387 movability, 388 ref fake_reads, 389 }) => { 390 // Convert the closure fake reads, if any, from `ExprRef` to mir `Place` 391 // and push the fake reads. 392 // This must come before creating the operands. This is required in case 393 // there is a fake read and a borrow of the same path, since otherwise the 394 // fake read might interfere with the borrow. Consider an example like this 395 // one: 396 // ``` 397 // let mut x = 0; 398 // let c = || { 399 // &mut x; // mutable borrow of `x` 400 // match x { _ => () } // fake read of `x` 401 // }; 402 // ``` 403 // 404 for (thir_place, cause, hir_id) in fake_reads.into_iter() { 405 let place_builder = 406 unpack!(block = this.as_place_builder(block, &this.thir[*thir_place])); 407 408 if let Some(mir_place) = place_builder.try_to_place(this) { 409 this.cfg.push_fake_read( 410 block, 411 this.source_info(this.tcx.hir().span(*hir_id)), 412 *cause, 413 mir_place, 414 ); 415 } 416 } 417 418 // see (*) above 419 let operands: IndexVec<FieldIdx, _> = upvars 420 .into_iter() 421 .copied() 422 .map(|upvar| { 423 let upvar = &this.thir[upvar]; 424 match Category::of(&upvar.kind) { 425 // Use as_place to avoid creating a temporary when 426 // moving a variable into a closure, so that 427 // borrowck knows which variables to mark as being 428 // used as mut. This is OK here because the upvar 429 // expressions have no side effects and act on 430 // disjoint places. 431 // This occurs when capturing by copy/move, while 432 // by reference captures use as_operand 433 Some(Category::Place) => { 434 let place = unpack!(block = this.as_place(block, upvar)); 435 this.consume_by_copy_or_move(place) 436 } 437 _ => { 438 // Turn mutable borrow captures into unique 439 // borrow captures when capturing an immutable 440 // variable. This is sound because the mutation 441 // that caused the capture will cause an error. 442 match upvar.kind { 443 ExprKind::Borrow { 444 borrow_kind: 445 BorrowKind::Mut { kind: MutBorrowKind::Default }, 446 arg, 447 } => unpack!( 448 block = this.limit_capture_mutability( 449 upvar.span, 450 upvar.ty, 451 scope, 452 block, 453 &this.thir[arg], 454 ) 455 ), 456 _ => { 457 unpack!( 458 block = this.as_operand( 459 block, 460 scope, 461 upvar, 462 LocalInfo::Boring, 463 NeedsTemporary::Maybe 464 ) 465 ) 466 } 467 } 468 } 469 } 470 }) 471 .collect(); 472 473 let result = match substs { 474 UpvarSubsts::Generator(substs) => { 475 // We implicitly set the discriminant to 0. See 476 // librustc_mir/transform/deaggregator.rs for details. 477 let movability = movability.unwrap(); 478 Box::new(AggregateKind::Generator( 479 closure_id.to_def_id(), 480 substs, 481 movability, 482 )) 483 } 484 UpvarSubsts::Closure(substs) => { 485 Box::new(AggregateKind::Closure(closure_id.to_def_id(), substs)) 486 } 487 }; 488 block.and(Rvalue::Aggregate(result, operands)) 489 } 490 ExprKind::Assign { .. } | ExprKind::AssignOp { .. } => { 491 block = unpack!(this.stmt_expr(block, expr, None)); 492 block.and(Rvalue::Use(Operand::Constant(Box::new(Constant { 493 span: expr_span, 494 user_ty: None, 495 literal: ConstantKind::zero_sized(this.tcx.types.unit), 496 })))) 497 } 498 499 ExprKind::OffsetOf { container, fields } => { 500 block.and(Rvalue::NullaryOp(NullOp::OffsetOf(fields), container)) 501 } 502 503 ExprKind::Literal { .. } 504 | ExprKind::NamedConst { .. } 505 | ExprKind::NonHirLiteral { .. } 506 | ExprKind::ZstLiteral { .. } 507 | ExprKind::ConstParam { .. } 508 | ExprKind::ConstBlock { .. } 509 | ExprKind::StaticRef { .. } => { 510 let constant = this.as_constant(expr); 511 block.and(Rvalue::Use(Operand::Constant(Box::new(constant)))) 512 } 513 514 ExprKind::Yield { .. } 515 | ExprKind::Block { .. } 516 | ExprKind::Match { .. } 517 | ExprKind::If { .. } 518 | ExprKind::NeverToAny { .. } 519 | ExprKind::Use { .. } 520 | ExprKind::Borrow { .. } 521 | ExprKind::AddressOf { .. } 522 | ExprKind::Adt { .. } 523 | ExprKind::Loop { .. } 524 | ExprKind::LogicalOp { .. } 525 | ExprKind::Call { .. } 526 | ExprKind::Field { .. } 527 | ExprKind::Let { .. } 528 | ExprKind::Deref { .. } 529 | ExprKind::Index { .. } 530 | ExprKind::VarRef { .. } 531 | ExprKind::UpvarRef { .. } 532 | ExprKind::Break { .. } 533 | ExprKind::Continue { .. } 534 | ExprKind::Return { .. } 535 | ExprKind::Become { .. } 536 | ExprKind::InlineAsm { .. } 537 | ExprKind::PlaceTypeAscription { .. } 538 | ExprKind::ValueTypeAscription { .. } => { 539 // these do not have corresponding `Rvalue` variants, 540 // so make an operand and then return that 541 debug_assert!(!matches!( 542 Category::of(&expr.kind), 543 Some(Category::Rvalue(RvalueFunc::AsRvalue) | Category::Constant) 544 )); 545 let operand = unpack!( 546 block = 547 this.as_operand(block, scope, expr, LocalInfo::Boring, NeedsTemporary::No) 548 ); 549 block.and(Rvalue::Use(operand)) 550 } 551 } 552 } 553 build_binary_op( &mut self, mut block: BasicBlock, op: BinOp, span: Span, ty: Ty<'tcx>, lhs: Operand<'tcx>, rhs: Operand<'tcx>, ) -> BlockAnd<Rvalue<'tcx>>554 pub(crate) fn build_binary_op( 555 &mut self, 556 mut block: BasicBlock, 557 op: BinOp, 558 span: Span, 559 ty: Ty<'tcx>, 560 lhs: Operand<'tcx>, 561 rhs: Operand<'tcx>, 562 ) -> BlockAnd<Rvalue<'tcx>> { 563 let source_info = self.source_info(span); 564 let bool_ty = self.tcx.types.bool; 565 let rvalue = match op { 566 BinOp::Add | BinOp::Sub | BinOp::Mul if self.check_overflow && ty.is_integral() => { 567 let result_tup = Ty::new_tup(self.tcx, &[ty, bool_ty]); 568 let result_value = self.temp(result_tup, span); 569 570 self.cfg.push_assign( 571 block, 572 source_info, 573 result_value, 574 Rvalue::CheckedBinaryOp(op, Box::new((lhs.to_copy(), rhs.to_copy()))), 575 ); 576 let val_fld = FieldIdx::new(0); 577 let of_fld = FieldIdx::new(1); 578 579 let tcx = self.tcx; 580 let val = tcx.mk_place_field(result_value, val_fld, ty); 581 let of = tcx.mk_place_field(result_value, of_fld, bool_ty); 582 583 let err = AssertKind::Overflow(op, lhs, rhs); 584 block = self.assert(block, Operand::Move(of), false, err, span); 585 586 Rvalue::Use(Operand::Move(val)) 587 } 588 BinOp::Shl | BinOp::Shr if self.check_overflow && ty.is_integral() => { 589 // For an unsigned RHS, the shift is in-range for `rhs < bits`. 590 // For a signed RHS, `IntToInt` cast to the equivalent unsigned 591 // type and do that same comparison. Because the type is the 592 // same size, there's no negative shift amount that ends up 593 // overlapping with valid ones, thus it catches negatives too. 594 let (lhs_size, _) = ty.int_size_and_signed(self.tcx); 595 let rhs_ty = rhs.ty(&self.local_decls, self.tcx); 596 let (rhs_size, _) = rhs_ty.int_size_and_signed(self.tcx); 597 598 let (unsigned_rhs, unsigned_ty) = match rhs_ty.kind() { 599 ty::Uint(_) => (rhs.to_copy(), rhs_ty), 600 ty::Int(int_width) => { 601 let uint_ty = Ty::new_uint(self.tcx, int_width.to_unsigned()); 602 let rhs_temp = self.temp(uint_ty, span); 603 self.cfg.push_assign( 604 block, 605 source_info, 606 rhs_temp, 607 Rvalue::Cast(CastKind::IntToInt, rhs.to_copy(), uint_ty), 608 ); 609 (Operand::Move(rhs_temp), uint_ty) 610 } 611 _ => unreachable!("only integers are shiftable"), 612 }; 613 614 // This can't overflow because the largest shiftable types are 128-bit, 615 // which fits in `u8`, the smallest possible `unsigned_ty`. 616 // (And `from_uint` will `bug!` if that's ever no longer true.) 617 let lhs_bits = Operand::const_from_scalar( 618 self.tcx, 619 unsigned_ty, 620 Scalar::from_uint(lhs_size.bits(), rhs_size), 621 span, 622 ); 623 624 let inbounds = self.temp(bool_ty, span); 625 self.cfg.push_assign( 626 block, 627 source_info, 628 inbounds, 629 Rvalue::BinaryOp(BinOp::Lt, Box::new((unsigned_rhs, lhs_bits))), 630 ); 631 632 let overflow_err = AssertKind::Overflow(op, lhs.to_copy(), rhs.to_copy()); 633 block = self.assert(block, Operand::Move(inbounds), true, overflow_err, span); 634 Rvalue::BinaryOp(op, Box::new((lhs, rhs))) 635 } 636 BinOp::Div | BinOp::Rem if ty.is_integral() => { 637 // Checking division and remainder is more complex, since we 1. always check 638 // and 2. there are two possible failure cases, divide-by-zero and overflow. 639 640 let zero_err = if op == BinOp::Div { 641 AssertKind::DivisionByZero(lhs.to_copy()) 642 } else { 643 AssertKind::RemainderByZero(lhs.to_copy()) 644 }; 645 let overflow_err = AssertKind::Overflow(op, lhs.to_copy(), rhs.to_copy()); 646 647 // Check for / 0 648 let is_zero = self.temp(bool_ty, span); 649 let zero = self.zero_literal(span, ty); 650 self.cfg.push_assign( 651 block, 652 source_info, 653 is_zero, 654 Rvalue::BinaryOp(BinOp::Eq, Box::new((rhs.to_copy(), zero))), 655 ); 656 657 block = self.assert(block, Operand::Move(is_zero), false, zero_err, span); 658 659 // We only need to check for the overflow in one case: 660 // MIN / -1, and only for signed values. 661 if ty.is_signed() { 662 let neg_1 = self.neg_1_literal(span, ty); 663 let min = self.minval_literal(span, ty); 664 665 let is_neg_1 = self.temp(bool_ty, span); 666 let is_min = self.temp(bool_ty, span); 667 let of = self.temp(bool_ty, span); 668 669 // this does (rhs == -1) & (lhs == MIN). It could short-circuit instead 670 671 self.cfg.push_assign( 672 block, 673 source_info, 674 is_neg_1, 675 Rvalue::BinaryOp(BinOp::Eq, Box::new((rhs.to_copy(), neg_1))), 676 ); 677 self.cfg.push_assign( 678 block, 679 source_info, 680 is_min, 681 Rvalue::BinaryOp(BinOp::Eq, Box::new((lhs.to_copy(), min))), 682 ); 683 684 let is_neg_1 = Operand::Move(is_neg_1); 685 let is_min = Operand::Move(is_min); 686 self.cfg.push_assign( 687 block, 688 source_info, 689 of, 690 Rvalue::BinaryOp(BinOp::BitAnd, Box::new((is_neg_1, is_min))), 691 ); 692 693 block = self.assert(block, Operand::Move(of), false, overflow_err, span); 694 } 695 696 Rvalue::BinaryOp(op, Box::new((lhs, rhs))) 697 } 698 _ => Rvalue::BinaryOp(op, Box::new((lhs, rhs))), 699 }; 700 block.and(rvalue) 701 } 702 build_zero_repeat( &mut self, mut block: BasicBlock, value: ExprId, scope: Option<region::Scope>, outer_source_info: SourceInfo, ) -> BlockAnd<Rvalue<'tcx>>703 fn build_zero_repeat( 704 &mut self, 705 mut block: BasicBlock, 706 value: ExprId, 707 scope: Option<region::Scope>, 708 outer_source_info: SourceInfo, 709 ) -> BlockAnd<Rvalue<'tcx>> { 710 let this = self; 711 let value = &this.thir[value]; 712 let elem_ty = value.ty; 713 if let Some(Category::Constant) = Category::of(&value.kind) { 714 // Repeating a const does nothing 715 } else { 716 // For a non-const, we may need to generate an appropriate `Drop` 717 let value_operand = unpack!( 718 block = this.as_operand(block, scope, value, LocalInfo::Boring, NeedsTemporary::No) 719 ); 720 if let Operand::Move(to_drop) = value_operand { 721 let success = this.cfg.start_new_block(); 722 this.cfg.terminate( 723 block, 724 outer_source_info, 725 TerminatorKind::Drop { 726 place: to_drop, 727 target: success, 728 unwind: UnwindAction::Continue, 729 replace: false, 730 }, 731 ); 732 this.diverge_from(block); 733 block = success; 734 } 735 this.record_operands_moved(&[value_operand]); 736 } 737 block.and(Rvalue::Aggregate(Box::new(AggregateKind::Array(elem_ty)), IndexVec::new())) 738 } 739 limit_capture_mutability( &mut self, upvar_span: Span, upvar_ty: Ty<'tcx>, temp_lifetime: Option<region::Scope>, mut block: BasicBlock, arg: &Expr<'tcx>, ) -> BlockAnd<Operand<'tcx>>740 fn limit_capture_mutability( 741 &mut self, 742 upvar_span: Span, 743 upvar_ty: Ty<'tcx>, 744 temp_lifetime: Option<region::Scope>, 745 mut block: BasicBlock, 746 arg: &Expr<'tcx>, 747 ) -> BlockAnd<Operand<'tcx>> { 748 let this = self; 749 750 let source_info = this.source_info(upvar_span); 751 let temp = this.local_decls.push(LocalDecl::new(upvar_ty, upvar_span)); 752 753 this.cfg.push(block, Statement { source_info, kind: StatementKind::StorageLive(temp) }); 754 755 let arg_place_builder = unpack!(block = this.as_place_builder(block, arg)); 756 757 let mutability = match arg_place_builder.base() { 758 // We are capturing a path that starts off a local variable in the parent. 759 // The mutability of the current capture is same as the mutability 760 // of the local declaration in the parent. 761 PlaceBase::Local(local) => this.local_decls[local].mutability, 762 // Parent is a closure and we are capturing a path that is captured 763 // by the parent itself. The mutability of the current capture 764 // is same as that of the capture in the parent closure. 765 PlaceBase::Upvar { .. } => { 766 let enclosing_upvars_resolved = arg_place_builder.to_place(this); 767 768 match enclosing_upvars_resolved.as_ref() { 769 PlaceRef { 770 local, 771 projection: &[ProjectionElem::Field(upvar_index, _), ..], 772 } 773 | PlaceRef { 774 local, 775 projection: 776 &[ProjectionElem::Deref, ProjectionElem::Field(upvar_index, _), ..], 777 } => { 778 // Not in a closure 779 debug_assert!( 780 local == ty::CAPTURE_STRUCT_LOCAL, 781 "Expected local to be Local(1), found {:?}", 782 local 783 ); 784 // Not in a closure 785 debug_assert!( 786 this.upvars.len() > upvar_index.index(), 787 "Unexpected capture place, upvars={:#?}, upvar_index={:?}", 788 this.upvars, 789 upvar_index 790 ); 791 this.upvars[upvar_index.index()].mutability 792 } 793 _ => bug!("Unexpected capture place"), 794 } 795 } 796 }; 797 798 let borrow_kind = match mutability { 799 Mutability::Not => BorrowKind::Mut { kind: MutBorrowKind::ClosureCapture }, 800 Mutability::Mut => BorrowKind::Mut { kind: MutBorrowKind::Default }, 801 }; 802 803 let arg_place = arg_place_builder.to_place(this); 804 805 this.cfg.push_assign( 806 block, 807 source_info, 808 Place::from(temp), 809 Rvalue::Ref(this.tcx.lifetimes.re_erased, borrow_kind, arg_place), 810 ); 811 812 // See the comment in `expr_as_temp` and on the `rvalue_scopes` field for why 813 // this can be `None`. 814 if let Some(temp_lifetime) = temp_lifetime { 815 this.schedule_drop_storage_and_value(upvar_span, temp_lifetime, temp); 816 } 817 818 block.and(Operand::Move(Place::from(temp))) 819 } 820 821 // Helper to get a `-1` value of the appropriate type neg_1_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx>822 fn neg_1_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx> { 823 let param_ty = ty::ParamEnv::empty().and(ty); 824 let size = self.tcx.layout_of(param_ty).unwrap().size; 825 let literal = ConstantKind::from_bits(self.tcx, size.unsigned_int_max(), param_ty); 826 827 self.literal_operand(span, literal) 828 } 829 830 // Helper to get the minimum value of the appropriate type minval_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx>831 fn minval_literal(&mut self, span: Span, ty: Ty<'tcx>) -> Operand<'tcx> { 832 assert!(ty.is_signed()); 833 let param_ty = ty::ParamEnv::empty().and(ty); 834 let bits = self.tcx.layout_of(param_ty).unwrap().size.bits(); 835 let n = 1 << (bits - 1); 836 let literal = ConstantKind::from_bits(self.tcx, n, param_ty); 837 838 self.literal_operand(span, literal) 839 } 840 } 841