diff --git a/compiler/rustc_mir_transform/src/inline/cycle.rs b/compiler/rustc_mir_transform/src/inline/cycle.rs index 7a1340f3a5527..99c7b616f1b21 100644 --- a/compiler/rustc_mir_transform/src/inline/cycle.rs +++ b/compiler/rustc_mir_transform/src/inline/cycle.rs @@ -5,6 +5,7 @@ use rustc_middle::mir::TerminatorKind; use rustc_middle::ty::TypeVisitableExt; use rustc_middle::ty::{self, GenericArgsRef, InstanceDef, TyCtxt}; use rustc_session::Limit; +use rustc_span::sym; // FIXME: check whether it is cheaper to precompute the entire call graph instead of invoking // this query ridiculously often. @@ -164,11 +165,20 @@ pub(crate) fn mir_inliner_callees<'tcx>( let mut calls = FxIndexSet::default(); for bb_data in body.basic_blocks.iter() { let terminator = bb_data.terminator(); - if let TerminatorKind::Call { func, .. } = &terminator.kind { + if let TerminatorKind::Call { func, args: call_args, .. } = &terminator.kind { let ty = func.ty(&body.local_decls, tcx); - let call = match ty.kind() { - ty::FnDef(def_id, args) => (*def_id, *args), - _ => continue, + let ty::FnDef(def_id, generic_args) = ty.kind() else { + continue; + }; + let call = if tcx.is_intrinsic(*def_id, sym::const_eval_select) { + let func = &call_args[2].node; + let ty = func.ty(&body.local_decls, tcx); + let ty::FnDef(def_id, generic_args) = ty.kind() else { + continue; + }; + (*def_id, *generic_args) + } else { + (*def_id, *generic_args) }; calls.insert(call); } diff --git a/tests/ui/mir/const_eval_select_cycle.rs b/tests/ui/mir/const_eval_select_cycle.rs new file mode 100644 index 0000000000000..0b84455b2f7fb --- /dev/null +++ b/tests/ui/mir/const_eval_select_cycle.rs @@ -0,0 +1,18 @@ +// Regression test for #122659 +//@ build-pass +//@ compile-flags: -O --crate-type=lib + +#![feature(core_intrinsics)] +#![feature(const_eval_select)] + +use std::intrinsics::const_eval_select; + +#[inline] +pub const fn f() { + const_eval_select((), g, g) +} + +#[inline] +pub const fn g() { + const_eval_select((), f, f) +}