Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -3800,6 +3800,7 @@ name = "rustc_data_structures"
version = "0.0.0"
dependencies = [
"arrayvec",
"backtrace",
"bitflags",
"either",
"elsa",
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_data_structures/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ edition = "2024"
[dependencies]
# tidy-alphabetical-start
arrayvec = { version = "0.7", default-features = false }
backtrace = "0.3.75"
bitflags = "2.4.1"
either = "1.0"
elsa = "1.11.0"
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_data_structures/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
#![feature(dropck_eyepatch)]
#![feature(extend_one)]
#![feature(file_buffered)]
#![feature(likely_unlikely)]
#![feature(map_try_insert)]
#![feature(min_specialization)]
#![feature(negative_impls)]
Expand Down
90 changes: 88 additions & 2 deletions compiler/rustc_data_structures/src/stack.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
use std::hint::{likely, unlikely};
use std::io::{self, Write};
use std::sync::atomic::{AtomicU16, Ordering};

use backtrace::{Backtrace, BacktraceFrame};

// This is the amount of bytes that need to be left on the stack before increasing the size.
// It must be at least as large as the stack required by any code that does not call
// `ensure_sufficient_stack`.
Expand All @@ -11,12 +17,92 @@ const STACK_PER_RECURSION: usize = 1024 * 1024; // 1MB
#[cfg(target_os = "aix")]
const STACK_PER_RECURSION: usize = 16 * 1024 * 1024; // 16MB

thread_local! {
static TIMES_GROWN: AtomicU16 = const { AtomicU16::new(0) };
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This can just be Cell<u16>, right?

}

// Give up if we expand the stack this many times and are still trying to recurse deeper.
const MAX_STACK_GROWTH: u16 = 1000;

/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations
/// to "break up" recursive calls. E.g. almost any call to `visit_expr` or equivalent can benefit
/// from this.
///
/// Should not be sprinkled around carelessly, as it causes a little bit of overhead.
#[inline]
pub fn ensure_sufficient_stack<R>(f: impl FnOnce() -> R) -> R {
stacker::maybe_grow(RED_ZONE, STACK_PER_RECURSION, f)
// if we can't guess the remaining stack (unsupported on some platforms) we immediately grow
// the stack and then cache the new stack size (which we do know now because we allocated it.
let enough_space = match stacker::remaining_stack() {
Some(remaining) => remaining >= RED_ZONE,
None => false,
};
if likely(enough_space) {
f()
} else {
let times = TIMES_GROWN.with(|times| times.fetch_add(1, Ordering::Relaxed));
if unlikely(times > MAX_STACK_GROWTH) {
too_much_stack();
}
let out = stacker::grow(STACK_PER_RECURSION, f);
TIMES_GROWN.with(|times| times.fetch_sub(1, Ordering::Relaxed));
out
}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm pretty sure this new implementation breaks on targets where stacker uses the default impl of not growing the stack. There remaining_stack would return None and thus on ever recursion TIMES_GROWN would be increased, only allowing a 1000x recursion rather than 1000 * however many recursions a single stack segment fits.

}

#[cold]
fn too_much_stack() -> ! {
let Err(e) = std::panic::catch_unwind(report_too_much_stack);
let mut stderr = io::stderr();
let _ = writeln!(stderr, "ensure_sufficient_stack: panicked while handling stack overflow!");
if let Ok(s) = e.downcast::<String>() {
let _ = writeln!(stderr, "{s}");
}
std::process::abort();
}

#[cold]
fn report_too_much_stack() -> ! {
// something is *definitely* wrong.
eprintln!(
"still not enough stack after {MAX_STACK_GROWTH} expansions of dynamic stack; infinite recursion?"
);

let backtrace = Backtrace::new_unresolved();
let frames = backtrace.frames();
eprintln!("first hundred frames:");
print_frames(0, &frames[..100]);

eprintln!("...\nlast hundred frames:");
let start = frames.len() - 100;
print_frames(start, &frames[start..]);
std::process::abort();
}

#[cold]
fn print_frames(mut i: usize, frames: &[BacktraceFrame]) {
for frame in frames {
let mut frame = frame.clone();
frame.resolve();
for symbol in frame.symbols() {
eprint!("{i}: ");
match symbol.name() {
Some(sym) => eprint!("{sym}"),
None => eprint!("<unknown>"),
}
eprint!("\n\t\tat ");
if let Some(file) = symbol.filename() {
eprint!("{}", file.display());
if let Some(line) = symbol.lineno() {
eprint!(":{line}");
if let Some(col) = symbol.colno() {
eprint!(":{col}");
}
}
} else {
eprint!("<unknown>");
}
eprintln!();
i += 1;
}
}
}
3 changes: 3 additions & 0 deletions compiler/rustc_middle/src/hir/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -485,6 +485,9 @@ pub fn provide(providers: &mut Providers) {
};
providers.opt_ast_lowering_delayed_lints =
|tcx, id| tcx.hir_crate(()).owner(tcx, id.def_id).as_owner().map(|o| &o.delayed_lints);
// WARNING: this provider is used in `create_query_cycle` to detect the span where a cycle
// occurred. You *must* update `rustc_middle::query::plumbing::TaggedQueryKey::default_span` if
// you update this implementation to use additional queries.
providers.def_span = |tcx, def_id| tcx.hir_span(tcx.local_def_id_to_hir_id(def_id));
providers.def_ident_span = |tcx, def_id| {
let hir_id = tcx.local_def_id_to_hir_id(def_id);
Expand Down
3 changes: 2 additions & 1 deletion compiler/rustc_middle/src/query/plumbing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -432,11 +432,12 @@ macro_rules! define_callbacks {
}

/// Returns the default span for this query if `span` is a dummy span.
#[tracing::instrument(level = "debug", skip(tcx, span))]
pub fn default_span(&self, tcx: TyCtxt<'tcx>, span: Span) -> Span {
if !span.is_dummy() {
return span
}
if let TaggedQueryKey::def_span(..) = self {
if let TaggedQueryKey::def_span(..) | TaggedQueryKey::local_def_id_to_hir_id(..) = self {
// The `def_span` query is used to calculate `default_span`,
// so exit to avoid infinite recursion.
return DUMMY_SP
Expand Down
3 changes: 3 additions & 0 deletions compiler/rustc_query_impl/src/execution.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,6 +201,8 @@ fn find_and_handle_cycle<'tcx, C: QueryCache>(
try_execute: QueryJobId,
span: Span,
) -> (C::Value, Option<DepNodeIndex>) {
tracing::info!("hit a query cycle evaluating {}!", query.name);

// Ensure there were no errors collecting all active jobs.
// We need the complete map to ensure we find a cycle to break.
let job_map = collect_active_query_jobs(tcx, CollectActiveJobsKind::FullNoContention);
Expand Down Expand Up @@ -256,6 +258,7 @@ fn wait_for_query<'tcx, C: QueryCache>(

/// Shared main part of both [`execute_query_incr_inner`] and [`execute_query_non_incr_inner`].
#[inline(never)]
#[tracing::instrument(level = "debug", skip_all, fields(query = query.name, key))]
fn try_execute_query<'tcx, C: QueryCache, const INCR: bool>(
query: &'tcx QueryVTable<'tcx, C>,
tcx: TyCtxt<'tcx>,
Expand Down
2 changes: 2 additions & 0 deletions src/tools/tidy/src/deps.rs
Original file line number Diff line number Diff line change
Expand Up @@ -275,6 +275,7 @@ const PERMITTED_RUSTC_DEPS_LOCATION: ListLocation = location!(+6);
/// rustc. Please check with the compiler team before adding an entry.
const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
// tidy-alphabetical-start
"addr2line",
"adler2",
"aho-corasick",
"allocator-api2", // FIXME: only appears in Cargo.lock due to https://github.com/rust-lang/cargo/issues/10801
Expand All @@ -287,6 +288,7 @@ const PERMITTED_RUSTC_DEPENDENCIES: &[&str] = &[
"ar_archive_writer",
"arrayref",
"arrayvec",
"backtrace",
"bitflags",
"blake3",
"block-buffer",
Expand Down
Loading