Skip to content

Commit

Permalink
Prevent stack overflow for deeply recursive code
Browse files Browse the repository at this point in the history
  • Loading branch information
oli-obk authored and mati865 committed May 2, 2020
1 parent 7184d13 commit 26edcee
Show file tree
Hide file tree
Showing 14 changed files with 445 additions and 351 deletions.
23 changes: 23 additions & 0 deletions Cargo.lock
Expand Up @@ -2630,6 +2630,15 @@ dependencies = [
"core",
]

[[package]]
name = "psm"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b14fc68b454f875abc8354c2555e1d56596f74833ddc0f77f87f4871ed6a30e0"
dependencies = [
"cc",
]

[[package]]
name = "publicsuffix"
version = "1.5.3"
Expand Down Expand Up @@ -3152,6 +3161,7 @@ checksum = "81dfcfbb0ddfd533abf8c076e3b49d1e5042d1962526a12ce2c66d514b24cca3"
dependencies = [
"rustc-ap-rustc_data_structures",
"smallvec 1.0.0",
"stacker",
]

[[package]]
Expand Down Expand Up @@ -4657,6 +4667,19 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffbc596e092fe5f598b12ef46cc03754085ac2f4d8c739ad61c4ae266cc3b3fa"

[[package]]
name = "stacker"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d96fc4f13a0ac088e9a3cd9af1cc8c5cc1ab5deb2145cef661267dfc9c542f8a"
dependencies = [
"cc",
"cfg-if",
"libc",
"psm",
"winapi 0.3.8",
]

[[package]]
name = "std"
version = "0.0.0"
Expand Down
367 changes: 191 additions & 176 deletions src/librustc_ast_lowering/expr.rs

Large diffs are not rendered by default.

144 changes: 75 additions & 69 deletions src/librustc_ast_lowering/pat.rs
Expand Up @@ -4,81 +4,87 @@ use rustc_ast::ast::*;
use rustc_ast::ptr::P;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_middle::limits::ensure_sufficient_stack;
use rustc_span::{source_map::Spanned, Span};

impl<'a, 'hir> LoweringContext<'a, 'hir> {
crate fn lower_pat(&mut self, p: &Pat) -> &'hir hir::Pat<'hir> {
let node = match p.kind {
PatKind::Wild => hir::PatKind::Wild,
PatKind::Ident(ref binding_mode, ident, ref sub) => {
let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(&*s));
let node = self.lower_pat_ident(p, binding_mode, ident, lower_sub);
node
}
PatKind::Lit(ref e) => hir::PatKind::Lit(self.lower_expr(e)),
PatKind::TupleStruct(ref path, ref pats) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct");
hir::PatKind::TupleStruct(qpath, pats, ddpos)
}
PatKind::Or(ref pats) => {
hir::PatKind::Or(self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat(x))))
}
PatKind::Path(ref qself, ref path) => {
let qpath = self.lower_qpath(
p.id,
qself,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
hir::PatKind::Path(qpath)
}
PatKind::Struct(ref path, ref fields, etc) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
ensure_sufficient_stack(|| {
let node = match p.kind {
PatKind::Wild => hir::PatKind::Wild,
PatKind::Ident(ref binding_mode, ident, ref sub) => {
let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(&*s));
let node = self.lower_pat_ident(p, binding_mode, ident, lower_sub);
node
}
PatKind::Lit(ref e) => hir::PatKind::Lit(self.lower_expr(e)),
PatKind::TupleStruct(ref path, ref pats) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct");
hir::PatKind::TupleStruct(qpath, pats, ddpos)
}
PatKind::Or(ref pats) => hir::PatKind::Or(
self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat(x))),
),
PatKind::Path(ref qself, ref path) => {
let qpath = self.lower_qpath(
p.id,
qself,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
hir::PatKind::Path(qpath)
}
PatKind::Struct(ref path, ref fields, etc) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);

let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::FieldPat {
hir_id: self.next_id(),
ident: f.ident,
pat: self.lower_pat(&f.pat),
is_shorthand: f.is_shorthand,
span: f.span,
}));
hir::PatKind::Struct(qpath, fs, etc)
}
PatKind::Tuple(ref pats) => {
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple");
hir::PatKind::Tuple(pats, ddpos)
}
PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)),
PatKind::Ref(ref inner, mutbl) => hir::PatKind::Ref(self.lower_pat(inner), mutbl),
PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => hir::PatKind::Range(
e1.as_deref().map(|e| self.lower_expr(e)),
e2.as_deref().map(|e| self.lower_expr(e)),
self.lower_range_end(end, e2.is_some()),
),
PatKind::Slice(ref pats) => self.lower_pat_slice(pats),
PatKind::Rest => {
// If we reach here the `..` pattern is not semantically allowed.
self.ban_illegal_rest_pat(p.span)
}
PatKind::Paren(ref inner) => return self.lower_pat(inner),
PatKind::MacCall(_) => panic!("Shouldn't exist here"),
};
let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::FieldPat {
hir_id: self.next_id(),
ident: f.ident,
pat: self.lower_pat(&f.pat),
is_shorthand: f.is_shorthand,
span: f.span,
}));
hir::PatKind::Struct(qpath, fs, etc)
}
PatKind::Tuple(ref pats) => {
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple");
hir::PatKind::Tuple(pats, ddpos)
}
PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)),
PatKind::Ref(ref inner, mutbl) => hir::PatKind::Ref(self.lower_pat(inner), mutbl),
PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => {
hir::PatKind::Range(
e1.as_deref().map(|e| self.lower_expr(e)),
e2.as_deref().map(|e| self.lower_expr(e)),
self.lower_range_end(end, e2.is_some()),
)
}
PatKind::Slice(ref pats) => self.lower_pat_slice(pats),
PatKind::Rest => {
// If we reach here the `..` pattern is not semantically allowed.
self.ban_illegal_rest_pat(p.span)
}
// FIXME: consider not using recursion to lower this.
PatKind::Paren(ref inner) => return self.lower_pat(inner),
PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", p.span),
};

self.pat_with_node_id_of(p, node)
self.pat_with_node_id_of(p, node)
})
}

fn lower_pat_tuple(
Expand Down
9 changes: 1 addition & 8 deletions src/librustc_interface/util.rs
Expand Up @@ -80,14 +80,7 @@ pub fn create_session(
(Lrc::new(sess), Lrc::new(codegen_backend), source_map)
}

// Temporarily have stack size set to 32MB to deal with various crates with long method
// chains or deep syntax trees, except when on Haiku.
// FIXME(oli-obk): get https://github.com/rust-lang/rust/pull/55617 the finish line
#[cfg(not(target_os = "haiku"))]
const STACK_SIZE: usize = 32 * 1024 * 1024;

#[cfg(target_os = "haiku")]
const STACK_SIZE: usize = 16 * 1024 * 1024;
const STACK_SIZE: usize = 2 * 1024 * 1024;

fn get_stack_size() -> Option<usize> {
// FIXME: Hacks on hacks. If the env is trying to override the stack size
Expand Down
1 change: 1 addition & 0 deletions src/librustc_middle/Cargo.toml
Expand Up @@ -34,3 +34,4 @@ byteorder = { version = "1.3" }
smallvec = { version = "1.0", features = ["union", "may_dangle"] }
measureme = "0.7.1"
rustc_session = { path = "../librustc_session" }
stacker = "0.1.6"
18 changes: 18 additions & 0 deletions src/librustc_middle/middle/limits.rs
Expand Up @@ -13,6 +13,24 @@ use rustc_span::symbol::{sym, Symbol};

use std::num::IntErrorKind;

// This is the amount of bytes that need to be left on the stack before increasing the size.
// It must be at least as large as the stack required by any code that does not call
// `ensure_sufficient_stack`.
const RED_ZONE: usize = 100 * 1024; // 100k

// Ony the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then
// on. This flag has performance relevant characteristics. Don't set it too high.
const STACK_PER_RECURSION: usize = 1 * 1024 * 1024; // 1MB

/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations
/// to "break up" recursive calls. E.g. almost any call to `visit_expr` or equivalent can benefit
/// from this.
///
/// Should not be sprinkled around carelessly, as it causes a little bit of overhead.
pub fn ensure_sufficient_stack<R, F: FnOnce() -> R>(f: F) -> R {
stacker::maybe_grow(RED_ZONE, STACK_PER_RECURSION, f)
}

pub fn update_limits(sess: &Session, krate: &ast::Crate) {
update_limit(sess, krate, &sess.recursion_limit, sym::recursion_limit, 128);
update_limit(sess, krate, &sess.type_length_limit, sym::type_length_limit, 1048576);
Expand Down
5 changes: 4 additions & 1 deletion src/librustc_middle/ty/inhabitedness/mod.rs
@@ -1,5 +1,6 @@
pub use self::def_id_forest::DefIdForest;

use crate::middle::limits::ensure_sufficient_stack;
use crate::ty;
use crate::ty::context::TyCtxt;
use crate::ty::TyKind::*;
Expand Down Expand Up @@ -196,7 +197,9 @@ impl<'tcx> TyS<'tcx> {
/// Calculates the forest of `DefId`s from which this type is visibly uninhabited.
fn uninhabited_from(&self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> DefIdForest {
match self.kind {
Adt(def, substs) => def.uninhabited_from(tcx, substs, param_env),
Adt(def, substs) => {
ensure_sufficient_stack(|| def.uninhabited_from(tcx, substs, param_env))
}

Never => DefIdForest::full(tcx),

Expand Down
4 changes: 3 additions & 1 deletion src/librustc_middle/ty/query/plumbing.rs
Expand Up @@ -68,7 +68,9 @@ impl QueryContext for TyCtxt<'tcx> {
};

// Use the `ImplicitCtxt` while we execute the query.
tls::enter_context(&new_icx, |_| compute(*self))
tls::enter_context(&new_icx, |_| {
crate::middle::limits::ensure_sufficient_stack(|| compute(*self))
})
})
}
}
Expand Down
8 changes: 6 additions & 2 deletions src/librustc_mir/monomorphize/collector.rs
Expand Up @@ -369,7 +369,9 @@ fn collect_items_rec<'tcx>(
recursion_depth_reset = Some(check_recursion_limit(tcx, instance, recursion_depths));
check_type_length_limit(tcx, instance);

collect_neighbours(tcx, instance, &mut neighbors);
rustc::middle::limits::ensure_sufficient_stack(|| {
collect_neighbours(tcx, instance, &mut neighbors);
});
}
MonoItem::GlobalAsm(..) => {
recursion_depth_reset = None;
Expand Down Expand Up @@ -1146,7 +1148,9 @@ fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut Vec<Mon
Some(GlobalAlloc::Memory(alloc)) => {
trace!("collecting {:?} with {:#?}", alloc_id, alloc);
for &((), inner) in alloc.relocations().values() {
collect_miri(tcx, inner, output);
rustc_middle::limits::ensure_sufficient_stack(|| {
collect_miri(tcx, inner, output);
});
}
}
Some(GlobalAlloc::Function(fn_instance)) => {
Expand Down
7 changes: 6 additions & 1 deletion src/librustc_mir_build/build/expr/as_temp.rs
Expand Up @@ -4,6 +4,7 @@ use crate::build::scope::DropKind;
use crate::build::{BlockAnd, BlockAndExtension, Builder};
use crate::hair::*;
use rustc_hir as hir;
use rustc_middle::limits::ensure_sufficient_stack;
use rustc_middle::middle::region;
use rustc_middle::mir::*;

Expand All @@ -21,7 +22,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
M: Mirror<'tcx, Output = Expr<'tcx>>,
{
let expr = self.hir.mirror(expr);
self.expr_as_temp(block, temp_lifetime, expr, mutability)
//
// this is the only place in mir building that we need to truly need to worry about
// infinite recursion. Everything else does recurse, too, but it always gets broken up
// at some point by inserting an intermediate temporary
ensure_sufficient_stack(|| self.expr_as_temp(block, temp_lifetime, expr, mutability))
}

fn expr_as_temp(
Expand Down
3 changes: 2 additions & 1 deletion src/librustc_trait_selection/traits/project.rs
Expand Up @@ -20,6 +20,7 @@ use crate::traits::error_reporting::InferCtxtExt;
use rustc_ast::ast::Ident;
use rustc_errors::ErrorReported;
use rustc_hir::def_id::DefId;
use rustc_middle::limits::ensure_sufficient_stack;
use rustc_middle::ty::fold::{TypeFoldable, TypeFolder};
use rustc_middle::ty::subst::{InternalSubsts, Subst};
use rustc_middle::ty::{self, ToPolyTraitRef, ToPredicate, Ty, TyCtxt, WithConstness};
Expand Down Expand Up @@ -261,7 +262,7 @@ where
{
debug!("normalize_with_depth(depth={}, value={:?})", depth, value);
let mut normalizer = AssocTypeNormalizer::new(selcx, param_env, cause, depth, obligations);
let result = normalizer.fold(value);
let result = ensure_sufficient_stack(|| normalizer.fold(value));
debug!(
"normalize_with_depth: depth={} result={:?} with {} obligations",
depth,
Expand Down
3 changes: 2 additions & 1 deletion src/librustc_trait_selection/traits/query/normalize.rs
Expand Up @@ -8,6 +8,7 @@ use crate::infer::{InferCtxt, InferOk};
use crate::traits::error_reporting::InferCtxtExt;
use crate::traits::{Obligation, ObligationCause, PredicateObligation, Reveal};
use rustc_infer::traits::Normalized;
use rustc_middle::limits::ensure_sufficient_stack;
use rustc_middle::ty::fold::{TypeFoldable, TypeFolder};
use rustc_middle::ty::subst::Subst;
use rustc_middle::ty::{self, Ty, TyCtxt};
Expand Down Expand Up @@ -131,7 +132,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> {
ty
);
}
let folded_ty = self.fold_ty(concrete_ty);
let folded_ty = ensure_sufficient_stack(|| self.fold_ty(concrete_ty));
self.anon_depth -= 1;
folded_ty
}
Expand Down

0 comments on commit 26edcee

Please sign in to comment.