Skip to content

Subpart10 for async drop (major4) - StateTransform pass improvements #129746

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Subpart8 for async drop (major2) - dropline in scopes for potentially…
… async drops
  • Loading branch information
azhogin committed Apr 6, 2025
commit 59b99453854d16326438a32cdd2bc39c87a1267b
151 changes: 131 additions & 20 deletions compiler/rustc_mir_build/src/builder/scope.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ use rustc_index::{IndexSlice, IndexVec};
use rustc_middle::middle::region;
use rustc_middle::mir::*;
use rustc_middle::thir::{ExprId, LintLevel};
use rustc_middle::ty::{self, TyCtxt};
use rustc_middle::{bug, span_bug};
use rustc_session::lint::Level;
use rustc_span::source_map::Spanned;
Expand Down Expand Up @@ -883,6 +884,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
block.unit()
}

fn is_async_drop_impl(
tcx: TyCtxt<'tcx>,
local_decls: &IndexVec<Local, LocalDecl<'tcx>>,
typing_env: ty::TypingEnv<'tcx>,
local: Local,
) -> bool {
let ty = local_decls[local].ty;
if ty.is_async_drop(tcx, typing_env) || ty.is_coroutine() {
return true;
}
ty.needs_async_drop(tcx, typing_env)
}
fn is_async_drop(&self, local: Local) -> bool {
Self::is_async_drop_impl(self.tcx, &self.local_decls, self.typing_env(), local)
}

fn leave_top_scope(&mut self, block: BasicBlock) -> BasicBlock {
// If we are emitting a `drop` statement, we need to have the cached
// diverge cleanup pads ready in case that drop panics.
Expand All @@ -891,14 +908,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let unwind_to = if needs_cleanup { self.diverge_cleanup() } else { DropIdx::MAX };

let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes");
let has_async_drops = is_coroutine
&& scope.drops.iter().any(|v| v.kind == DropKind::Value && self.is_async_drop(v.local));
let dropline_to = if has_async_drops { Some(self.diverge_dropline()) } else { None };
let scope = self.scopes.scopes.last().expect("leave_top_scope called with no scopes");
let typing_env = self.typing_env();
build_scope_drops(
&mut self.cfg,
&mut self.scopes.unwind_drops,
&mut self.scopes.coroutine_drops,
scope,
block,
unwind_to,
dropline_to,
is_coroutine && needs_cleanup,
self.arg_count,
|v: Local| Self::is_async_drop_impl(self.tcx, &self.local_decls, typing_env, v),
)
.into_block()
}
Expand Down Expand Up @@ -1314,22 +1339,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
self.scopes.unwind_drops.add_entry_point(start, next_drop);
}

/// Sets up a path that performs all required cleanup for dropping a
/// coroutine, starting from the given block that ends in
/// [TerminatorKind::Yield].
///
/// This path terminates in CoroutineDrop.
pub(crate) fn coroutine_drop_cleanup(&mut self, yield_block: BasicBlock) {
/// Returns the [DropIdx] for the innermost drop for dropline (coroutine drop path).
/// The `DropIdx` will be created if it doesn't already exist.
fn diverge_dropline(&mut self) -> DropIdx {
// It is okay to use dummy span because the getting scope index on the topmost scope
// must always succeed.
self.diverge_dropline_target(self.scopes.topmost(), DUMMY_SP)
}

/// Similar to diverge_cleanup_target, but for dropline (coroutine drop path)
fn diverge_dropline_target(&mut self, target_scope: region::Scope, span: Span) -> DropIdx {
debug_assert!(
matches!(
self.cfg.block_data(yield_block).terminator().kind,
TerminatorKind::Yield { .. }
),
"coroutine_drop_cleanup called on block with non-yield terminator."
self.coroutine.is_some(),
"diverge_dropline_target is valid only for coroutine"
);
let (uncached_scope, mut cached_drop) = self
.scopes
.scopes
let target = self.scopes.scope_index(target_scope, span);
let (uncached_scope, mut cached_drop) = self.scopes.scopes[..=target]
.iter()
.enumerate()
.rev()
Expand All @@ -1338,13 +1363,34 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
})
.unwrap_or((0, ROOT_NODE));

for scope in &mut self.scopes.scopes[uncached_scope..] {
if uncached_scope > target {
return cached_drop;
}

for scope in &mut self.scopes.scopes[uncached_scope..=target] {
for drop in &scope.drops {
cached_drop = self.scopes.coroutine_drops.add_drop(*drop, cached_drop);
}
scope.cached_coroutine_drop_block = Some(cached_drop);
}

cached_drop
}

/// Sets up a path that performs all required cleanup for dropping a
/// coroutine, starting from the given block that ends in
/// [TerminatorKind::Yield].
///
/// This path terminates in CoroutineDrop.
pub(crate) fn coroutine_drop_cleanup(&mut self, yield_block: BasicBlock) {
debug_assert!(
matches!(
self.cfg.block_data(yield_block).terminator().kind,
TerminatorKind::Yield { .. }
),
"coroutine_drop_cleanup called on block with non-yield terminator."
);
let cached_drop = self.diverge_dropline();
self.scopes.coroutine_drops.add_entry_point(yield_block, cached_drop);
}

Expand Down Expand Up @@ -1438,18 +1484,26 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// * `unwind_to`, describes the drops that would occur at this point in the code if a
/// panic occurred (a subset of the drops in `scope`, since we sometimes elide StorageDead and other
/// instructions on unwinding)
/// * `dropline_to`, describes the drops that would occur at this point in the code if a
/// coroutine drop occured.
/// * `storage_dead_on_unwind`, if true, then we should emit `StorageDead` even when unwinding
/// * `arg_count`, number of MIR local variables corresponding to fn arguments (used to assert that we don't drop those)
fn build_scope_drops<'tcx>(
fn build_scope_drops<'tcx, F>(
cfg: &mut CFG<'tcx>,
unwind_drops: &mut DropTree,
coroutine_drops: &mut DropTree,
scope: &Scope,
block: BasicBlock,
unwind_to: DropIdx,
dropline_to: Option<DropIdx>,
storage_dead_on_unwind: bool,
arg_count: usize,
) -> BlockAnd<()> {
debug!("build_scope_drops({:?} -> {:?})", block, scope);
is_async_drop: F,
) -> BlockAnd<()>
where
F: Fn(Local) -> bool,
{
debug!("build_scope_drops({:?} -> {:?}), dropline_to={:?}", block, scope, dropline_to);

// Build up the drops in evaluation order. The end result will
// look like:
Expand Down Expand Up @@ -1482,6 +1536,9 @@ fn build_scope_drops<'tcx>(
// will branch to `drops[n]`.
let mut block = block;

// `dropline_to` indicates what needs to be dropped should coroutine drop occur.
let mut dropline_to = dropline_to;

for drop_data in scope.drops.iter().rev() {
let source_info = drop_data.source_info;
let local = drop_data.local;
Expand All @@ -1498,6 +1555,12 @@ fn build_scope_drops<'tcx>(
debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind);
unwind_to = unwind_drops.drops[unwind_to].next;

if let Some(idx) = dropline_to {
debug_assert_eq!(coroutine_drops.drops[idx].data.local, drop_data.local);
debug_assert_eq!(coroutine_drops.drops[idx].data.kind, drop_data.kind);
dropline_to = Some(coroutine_drops.drops[idx].next);
}

// If the operand has been moved, and we are not on an unwind
// path, then don't generate the drop. (We only take this into
// account for non-unwind paths so as not to disturb the
Expand All @@ -1507,6 +1570,12 @@ fn build_scope_drops<'tcx>(
}

unwind_drops.add_entry_point(block, unwind_to);
if let Some(to) = dropline_to
&& is_async_drop(local)
{
coroutine_drops.add_entry_point(block, to);
}

let next = cfg.start_new_block();
cfg.terminate(
block,
Expand Down Expand Up @@ -1564,6 +1633,11 @@ fn build_scope_drops<'tcx>(
debug_assert_eq!(unwind_drops.drops[unwind_to].data.kind, drop_data.kind);
unwind_to = unwind_drops.drops[unwind_to].next;
}
if let Some(idx) = dropline_to {
debug_assert_eq!(coroutine_drops.drops[idx].data.local, drop_data.local);
debug_assert_eq!(coroutine_drops.drops[idx].data.kind, drop_data.kind);
dropline_to = Some(coroutine_drops.drops[idx].next);
}
// Only temps and vars need their storage dead.
assert!(local.index() > arg_count);
cfg.push(block, Statement { source_info, kind: StatementKind::StorageDead(local) });
Expand Down Expand Up @@ -1619,6 +1693,39 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
}
}
}
// Link the exit drop tree to dropline drop tree (coroutine drop path) for async drops
if is_coroutine
&& drops.drops.iter().any(|DropNode { data, next: _ }| {
data.kind == DropKind::Value && self.is_async_drop(data.local)
})
{
let dropline_target = self.diverge_dropline_target(else_scope, span);
let mut dropline_indices = IndexVec::from_elem_n(dropline_target, 1);
for (drop_idx, drop_data) in drops.drops.iter_enumerated().skip(1) {
match drop_data.data.kind {
DropKind::Storage | DropKind::ForLint => {
let coroutine_drop = self
.scopes
.coroutine_drops
.add_drop(drop_data.data, dropline_indices[drop_data.next]);
dropline_indices.push(coroutine_drop);
}
DropKind::Value => {
let coroutine_drop = self
.scopes
.coroutine_drops
.add_drop(drop_data.data, dropline_indices[drop_data.next]);
if self.is_async_drop(drop_data.data.local) {
self.scopes.coroutine_drops.add_entry_point(
blocks[drop_idx].unwrap(),
dropline_indices[drop_data.next],
);
}
dropline_indices.push(coroutine_drop);
}
}
}
}
blocks[ROOT_NODE].map(BasicBlock::unit)
}

Expand Down Expand Up @@ -1663,9 +1770,11 @@ impl<'a, 'tcx: 'a> Builder<'a, 'tcx> {
// to be captured by the coroutine. I'm not sure how important this
// optimization is, but it is here.
for (drop_idx, drop_node) in drops.drops.iter_enumerated() {
if let DropKind::Value = drop_node.data.kind {
if let DropKind::Value = drop_node.data.kind
&& let Some(bb) = blocks[drop_idx]
{
debug_assert!(drop_node.next < drops.drops.next_index());
drops.entry_points.push((drop_node.next, blocks[drop_idx].unwrap()));
drops.entry_points.push((drop_node.next, bb));
}
}
Self::build_unwind_tree(cfg, drops, fn_span, resume_block);
Expand Down Expand Up @@ -1717,6 +1826,8 @@ impl<'tcx> DropTreeBuilder<'tcx> for CoroutineDrop {
let term = cfg.block_data_mut(from).terminator_mut();
if let TerminatorKind::Yield { ref mut drop, .. } = term.kind {
*drop = Some(to);
} else if let TerminatorKind::Drop { ref mut drop, .. } = term.kind {
*drop = Some(to);
} else {
span_bug!(
term.source_info.span,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,18 @@ yields ()
StorageDead(_5);
StorageDead(_4);
StorageDead(_3);
drop(_1) -> [return: bb1, unwind: bb2];
drop(_1) -> [return: bb1, unwind: bb3, drop: bb2];
}

bb1: {
return;
}

bb2 (cleanup): {
bb2: {
coroutine_drop;
}

bb3 (cleanup): {
resume;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,18 @@ yields ()
StorageDead(_5);
StorageDead(_4);
StorageDead(_3);
drop(_1) -> [return: bb1, unwind: bb2];
drop(_1) -> [return: bb1, unwind: bb3, drop: bb2];
}

bb1: {
return;
}

bb2 (cleanup): {
bb2: {
coroutine_drop;
}

bb3 (cleanup): {
resume;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,18 @@ yields ()
StorageDead(_5);
StorageDead(_4);
StorageDead(_3);
drop(_1) -> [return: bb1, unwind: bb2];
drop(_1) -> [return: bb1, unwind: bb3, drop: bb2];
}

bb1: {
return;
}

bb2 (cleanup): {
bb2: {
coroutine_drop;
}

bb3 (cleanup): {
resume;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,18 @@ yields ()
StorageDead(_5);
StorageDead(_4);
StorageDead(_3);
drop(_1) -> [return: bb1, unwind: bb2];
drop(_1) -> [return: bb1, unwind: bb3, drop: bb2];
}

bb1: {
return;
}

bb2 (cleanup): {
bb2: {
coroutine_drop;
}

bb3 (cleanup): {
resume;
}
}
Loading