diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index b01a55b48da66..dc0a69980c120 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -1195,6 +1195,10 @@ impl<'a, 'tcx> CrateMetadataRef<'a> { } } + fn get_is_trivial_mir(&self, id: DefIndex) -> bool { + self.root.tables.is_trivial_mir.get(self, id).filter(|_| !self.is_proc_macro(id)).is_some() + } + fn get_optimized_mir(&self, tcx: TyCtxt<'tcx>, id: DefIndex) -> Body<'tcx> { self.root .tables diff --git a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs index 05b8dad3097e4..a09f9d566349b 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs @@ -111,6 +111,7 @@ provide! { <'tcx> tcx, def_id, other, cdata, bug!("coerce_unsized_info: `{:?}` is missing its info", def_id); }) } + is_trivial_mir => { cdata.get_is_trivial_mir(def_id.index) } optimized_mir => { tcx.arena.alloc(cdata.get_optimized_mir(tcx, def_id.index)) } promoted_mir => { tcx.arena.alloc(cdata.get_promoted_mir(tcx, def_id.index)) } mir_abstract_const => { cdata.get_mir_abstract_const(tcx, def_id.index) } diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index f0911928e81c9..9caeafc4216c8 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -1160,6 +1160,12 @@ impl EncodeContext<'a, 'tcx> { fn encode_optimized_mir(&mut self, def_id: LocalDefId) { debug!("EntryBuilder::encode_mir({:?})", def_id); if self.tcx.mir_keys(LOCAL_CRATE).contains(&def_id) { + if self.tcx.is_trivial_mir(def_id) { + // We don't store anything if `is_trivial_mir` is `false` + // so we can use a unit type here. + self.tables.is_trivial_mir.set(def_id.local_def_index, ()); + } + record!(self.tables.mir[def_id.to_def_id()] <- self.tcx.optimized_mir(def_id)); let unused = self.tcx.unused_generic_params(def_id); diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index 2bd2019d3cdb5..03aba4d59fa7b 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -300,6 +300,7 @@ define_tables! { super_predicates: Table)>, // As an optimization, a missing entry indicates an empty `&[]`. explicit_item_bounds: Table, Span)])>, + is_trivial_mir: Table, mir: Table)>, promoted_mir: Table>)>, mir_abstract_consts: Table])>, diff --git a/compiler/rustc_metadata/src/rmeta/table.rs b/compiler/rustc_metadata/src/rmeta/table.rs index 03bd4170ea990..1dd663ef2c87f 100644 --- a/compiler/rustc_metadata/src/rmeta/table.rs +++ b/compiler/rustc_metadata/src/rmeta/table.rs @@ -4,7 +4,7 @@ use rustc_index::vec::Idx; use rustc_serialize::opaque::Encoder; use std::convert::TryInto; use std::marker::PhantomData; -use std::num::NonZeroUsize; +use std::num::{NonZeroU8, NonZeroUsize}; use tracing::debug; /// Helper trait, for encoding to, and decoding from, a fixed number of bytes. @@ -75,6 +75,30 @@ impl FixedSizeEncoding for u32 { } } +impl FixedSizeEncoding for Option { + fixed_size_encoding_byte_len_and_defaults!(1); + + fn from_bytes(b: &[u8]) -> Self { + NonZeroU8::new(b[0]) + } + + fn write_to_bytes(self, b: &mut [u8]) { + b[0] = self.map_or(0, |x| x.get()); + } +} + +impl FixedSizeEncoding for Option<()> { + fixed_size_encoding_byte_len_and_defaults!(Option::::BYTE_LEN); + + fn from_bytes(b: &[u8]) -> Self { + Option::::from_bytes(b).map(|_| ()) + } + + fn write_to_bytes(self, b: &mut [u8]) { + self.map(|()| NonZeroU8::new(1).unwrap()).write_to_bytes(b) + } +} + // NOTE(eddyb) there could be an impl for `usize`, which would enable a more // generic `Lazy` impl, but in the general case we might not need / want to // fit every `usize` in `u32`. diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index 1acb44f6d22f3..8114764cb7baa 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -325,6 +325,11 @@ rustc_queries! { } } + query is_trivial_mir(key: DefId) -> bool { + desc { |tcx| "checking if MIR for `{}` is trivial", tcx.def_path_str(key) } + cache_on_disk_if { key.is_local() } + } + /// MIR after our optimization passes have run. This is MIR that is ready /// for codegen. This is also the only query that can fetch non-local MIR, at present. query optimized_mir(key: DefId) -> &'tcx mir::Body<'tcx> { diff --git a/compiler/rustc_mir/src/transform/inline.rs b/compiler/rustc_mir/src/transform/inline.rs index 796ad6c9c2978..ae4fd1df8186a 100644 --- a/compiler/rustc_mir/src/transform/inline.rs +++ b/compiler/rustc_mir/src/transform/inline.rs @@ -38,7 +38,7 @@ struct CallSite<'tcx> { impl<'tcx> MirPass<'tcx> for Inline { fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { - if tcx.sess.opts.debugging_opts.mir_opt_level >= 2 { + if tcx.sess.opts.debugging_opts.mir_opt_level >= 1 { if tcx.sess.opts.debugging_opts.instrument_coverage { // The current implementation of source code coverage injects code region counters // into the MIR, and assumes a 1-to-1 correspondence between MIR and source-code- @@ -106,7 +106,12 @@ impl Inliner<'tcx> { continue; } - let callee_body = if let Some(callee_def_id) = callsite.callee.as_local() { + let callee_body = if self.tcx.is_trivial_mir(callsite.callee) { + self.tcx.optimized_mir(callsite.callee) + } else if self.tcx.sess.opts.debugging_opts.mir_opt_level < 2 { + // Only inline trivial functions by default. + continue; + } else if let Some(callee_def_id) = callsite.callee.as_local() { let callee_hir_id = self.tcx.hir().local_def_id_to_hir_id(callee_def_id); // Avoid a cycle here by only using `optimized_mir` only if we have // a lower `HirId` than the callee. This ensures that the callee will @@ -843,3 +848,61 @@ impl<'a, 'tcx> MutVisitor<'tcx> for Integrator<'a, 'tcx> { *scope = self.scope_map[*scope]; } } + +struct FunctionCallFinder<'tcx> { + tcx: TyCtxt<'tcx>, + found: bool, +} + +impl<'tcx> FunctionCallFinder<'tcx> { + fn new(tcx: TyCtxt<'tcx>) -> Self { + FunctionCallFinder { tcx, found: false } + } +} + +impl<'tcx> Visitor<'tcx> for FunctionCallFinder<'tcx> { + fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, _location: Location) { + if let TerminatorKind::Call { func: Operand::Constant(ref f), .. } = terminator.kind { + if let ty::FnDef(def_id, _) = *f.literal.ty.kind() { + // Don't forbid intrinsics. + let f = self.tcx.fn_sig(def_id); + if f.abi() == Abi::RustIntrinsic || f.abi() == Abi::PlatformIntrinsic { + return; + } + + // FIXME: We may also want to check for `tcx.is_mir_available(def_id)`. + } + + self.found = true; + } + } +} + +pub fn is_trivial_mir(tcx: TyCtxt<'tcx>, did: DefId) -> bool { + debug!("is_trivial_mir({:?})", did); + if tcx.is_constructor(did) { + debug!("is_trivial_mir = true (constructor)"); + return true; + } + + use rustc_hir::def::DefKind; + if !matches!(tcx.def_kind(did), DefKind::Fn | DefKind::AssocFn) { + // Only inline functions, don't look at constants here. + return false; + } + + if let Some(did) = did.as_local() { + let body = tcx + .mir_drops_elaborated_and_const_checked(ty::WithOptConstParam::unknown(did)) + .borrow(); + let mut finder = FunctionCallFinder::new(tcx); + finder.visit_body(&body); + debug!("is_trivial_mir = {}", !finder.found); + !finder.found + } else { + // This branch is only taken if no `optimized_mir` is available for + // an extern crate, as `is_trivial_mir` has otherwise been encoded. + debug!("is_trivial_mir = false (no MIR available)"); + false + } +} diff --git a/compiler/rustc_mir/src/transform/mod.rs b/compiler/rustc_mir/src/transform/mod.rs index 20b8c90a9dcad..4c46e053822dc 100644 --- a/compiler/rustc_mir/src/transform/mod.rs +++ b/compiler/rustc_mir/src/transform/mod.rs @@ -68,6 +68,7 @@ pub(crate) fn provide(providers: &mut Providers) { }, mir_promoted, mir_drops_elaborated_and_const_checked, + is_trivial_mir: inline::is_trivial_mir, optimized_mir, optimized_mir_of_const_arg, is_mir_available, @@ -478,6 +479,8 @@ fn inner_optimized_mir(tcx: TyCtxt<'_>, def: ty::WithOptConstParam) return shim::build_adt_ctor(tcx, def.did.to_def_id()); } + // `is_trivial_mir` uses `mir_drops_elaborated_and_const_checked` so run that first. + tcx.ensure().is_trivial_mir(def.did.to_def_id()); let mut body = tcx.mir_drops_elaborated_and_const_checked(def).steal(); run_optimization_passes(tcx, &mut body); diff --git a/library/core/src/iter/range.rs b/library/core/src/iter/range.rs index 9f34aee1947cd..4365c436fe494 100644 --- a/library/core/src/iter/range.rs +++ b/library/core/src/iter/range.rs @@ -1,7 +1,7 @@ use crate::char; use crate::convert::TryFrom; use crate::mem; -use crate::ops::{self, Add, Sub, Try}; +use crate::ops::{self, Try}; use super::{FusedIterator, TrustedLen}; @@ -201,24 +201,12 @@ macro_rules! step_identical_methods { #[inline] fn forward(start: Self, n: usize) -> Self { - // In debug builds, trigger a panic on overflow. - // This should optimize completely out in release builds. - if Self::forward_checked(start, n).is_none() { - let _ = Add::add(Self::MAX, 1); - } - // Do wrapping math to allow e.g. `Step::forward(-128i8, 255)`. - start.wrapping_add(n as Self) + start + (n as Self) } #[inline] fn backward(start: Self, n: usize) -> Self { - // In debug builds, trigger a panic on overflow. - // This should optimize completely out in release builds. - if Self::backward_checked(start, n).is_none() { - let _ = Sub::sub(Self::MIN, 1); - } - // Do wrapping math to allow e.g. `Step::backward(127i8, 255)`. - start.wrapping_sub(n as Self) + start - (n as Self) } }; }